mirror of
https://github.com/LukeHagar/arbiter.git
synced 2025-12-06 04:19:14 +00:00
added gitignore, and cleaned repo, swapped to pnpm and updated actions
This commit is contained in:
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -17,16 +17,21 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format:check
|
||||
run: pnpm run format:check
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint -- --max-warnings 0
|
||||
run: pnpm run lint -- --max-warnings 0
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
@@ -43,13 +48,18 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run unit tests
|
||||
run: npm run test:unit
|
||||
run: pnpm run test:unit
|
||||
|
||||
- name: Run integration tests
|
||||
run: npm run test:integration
|
||||
run: pnpm run test:integration
|
||||
12
.github/workflows/publish.yml
vendored
12
.github/workflows/publish.yml
vendored
@@ -26,15 +26,21 @@ jobs:
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
run: pnpm test
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Determine publish necessity
|
||||
id: check
|
||||
|
||||
1
node_modules/.bin/acorn
generated
vendored
1
node_modules/.bin/acorn
generated
vendored
@@ -1 +0,0 @@
|
||||
../acorn/bin/acorn
|
||||
1
node_modules/.bin/esbuild
generated
vendored
1
node_modules/.bin/esbuild
generated
vendored
@@ -1 +0,0 @@
|
||||
../esbuild/bin/esbuild
|
||||
1
node_modules/.bin/eslint
generated
vendored
1
node_modules/.bin/eslint
generated
vendored
@@ -1 +0,0 @@
|
||||
../eslint/bin/eslint.js
|
||||
1
node_modules/.bin/eslint-config-prettier
generated
vendored
1
node_modules/.bin/eslint-config-prettier
generated
vendored
@@ -1 +0,0 @@
|
||||
../eslint-config-prettier/bin/cli.js
|
||||
1
node_modules/.bin/js-yaml
generated
vendored
1
node_modules/.bin/js-yaml
generated
vendored
@@ -1 +0,0 @@
|
||||
../js-yaml/bin/js-yaml.js
|
||||
1
node_modules/.bin/mime
generated
vendored
1
node_modules/.bin/mime
generated
vendored
@@ -1 +0,0 @@
|
||||
../mime/cli.js
|
||||
1
node_modules/.bin/mkdirp
generated
vendored
1
node_modules/.bin/mkdirp
generated
vendored
@@ -1 +0,0 @@
|
||||
../mkdirp/bin/cmd.js
|
||||
1
node_modules/.bin/nanoid
generated
vendored
1
node_modules/.bin/nanoid
generated
vendored
@@ -1 +0,0 @@
|
||||
../nanoid/bin/nanoid.cjs
|
||||
1
node_modules/.bin/node-which
generated
vendored
1
node_modules/.bin/node-which
generated
vendored
@@ -1 +0,0 @@
|
||||
../which/bin/node-which
|
||||
1
node_modules/.bin/parser
generated
vendored
1
node_modules/.bin/parser
generated
vendored
@@ -1 +0,0 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
||||
1
node_modules/.bin/prettier
generated
vendored
1
node_modules/.bin/prettier
generated
vendored
@@ -1 +0,0 @@
|
||||
../prettier/bin/prettier.cjs
|
||||
1
node_modules/.bin/resolve
generated
vendored
1
node_modules/.bin/resolve
generated
vendored
@@ -1 +0,0 @@
|
||||
../resolve/bin/resolve
|
||||
1
node_modules/.bin/rimraf
generated
vendored
1
node_modules/.bin/rimraf
generated
vendored
@@ -1 +0,0 @@
|
||||
../rimraf/bin.js
|
||||
1
node_modules/.bin/rollup
generated
vendored
1
node_modules/.bin/rollup
generated
vendored
@@ -1 +0,0 @@
|
||||
../rollup/dist/bin/rollup
|
||||
1
node_modules/.bin/semver
generated
vendored
1
node_modules/.bin/semver
generated
vendored
@@ -1 +0,0 @@
|
||||
../semver/bin/semver.js
|
||||
1
node_modules/.bin/tree-kill
generated
vendored
1
node_modules/.bin/tree-kill
generated
vendored
@@ -1 +0,0 @@
|
||||
../tree-kill/cli.js
|
||||
1
node_modules/.bin/ts-node
generated
vendored
1
node_modules/.bin/ts-node
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin.js
|
||||
1
node_modules/.bin/ts-node-cwd
generated
vendored
1
node_modules/.bin/ts-node-cwd
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin-cwd.js
|
||||
1
node_modules/.bin/ts-node-dev
generated
vendored
1
node_modules/.bin/ts-node-dev
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node-dev/lib/bin.js
|
||||
1
node_modules/.bin/ts-node-esm
generated
vendored
1
node_modules/.bin/ts-node-esm
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin-esm.js
|
||||
1
node_modules/.bin/ts-node-script
generated
vendored
1
node_modules/.bin/ts-node-script
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin-script.js
|
||||
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin-transpile.js
|
||||
1
node_modules/.bin/ts-script
generated
vendored
1
node_modules/.bin/ts-script
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node/dist/bin-script-deprecated.js
|
||||
1
node_modules/.bin/tsc
generated
vendored
1
node_modules/.bin/tsc
generated
vendored
@@ -1 +0,0 @@
|
||||
../typescript/bin/tsc
|
||||
1
node_modules/.bin/tsnd
generated
vendored
1
node_modules/.bin/tsnd
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-node-dev/lib/bin.js
|
||||
1
node_modules/.bin/tsserver
generated
vendored
1
node_modules/.bin/tsserver
generated
vendored
@@ -1 +0,0 @@
|
||||
../typescript/bin/tsserver
|
||||
1
node_modules/.bin/uuid
generated
vendored
1
node_modules/.bin/uuid
generated
vendored
@@ -1 +0,0 @@
|
||||
../uuid/dist/bin/uuid
|
||||
1
node_modules/.bin/vite
generated
vendored
1
node_modules/.bin/vite
generated
vendored
@@ -1 +0,0 @@
|
||||
../vite/bin/vite.js
|
||||
1
node_modules/.bin/vite-node
generated
vendored
1
node_modules/.bin/vite-node
generated
vendored
@@ -1 +0,0 @@
|
||||
../vite-node/vite-node.mjs
|
||||
1
node_modules/.bin/vitest
generated
vendored
1
node_modules/.bin/vitest
generated
vendored
@@ -1 +0,0 @@
|
||||
../vitest/vitest.mjs
|
||||
1
node_modules/.bin/why-is-node-running
generated
vendored
1
node_modules/.bin/why-is-node-running
generated
vendored
@@ -1 +0,0 @@
|
||||
../why-is-node-running/cli.js
|
||||
1
node_modules/.bin/yaml
generated
vendored
1
node_modules/.bin/yaml
generated
vendored
@@ -1 +0,0 @@
|
||||
../yaml/bin.mjs
|
||||
8462
node_modules/.package-lock.json
generated
vendored
8462
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/.vite/results.json
generated
vendored
1
node_modules/.vite/results.json
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":"3.0.9","results":[[":src/store/__tests__/openApiStore.test.ts",{"duration":17.177212999999995,"failed":false}],[":src/middleware/__tests__/harRecorder.test.ts",{"duration":16.706658000000004,"failed":false}],[":integration/__tests__/proxy.test.ts",{"duration":72.56247300000007,"failed":false}],[":integration/__tests__/server.test.ts",{"duration":47.499804999999924,"failed":false}],[":src/__tests__/cli.test.ts",{"duration":4.407485999999949,"failed":false}]]}
|
||||
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
@@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
218
node_modules/@ampproject/remapping/README.md
generated
vendored
218
node_modules/@ampproject/remapping/README.md
generated
vendored
@@ -1,218 +0,0 @@
|
||||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
||||
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
@@ -1,197 +0,0 @@
|
||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
||||
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
@@ -1,202 +0,0 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = traceMapping.decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
genMapping.setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
genMapping.setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
return remapping;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=remapping.umd.js.map
|
||||
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
@@ -1,14 +0,0 @@
|
||||
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
||||
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
@@ -1,20 +0,0 @@
|
||||
import SourceMap from './source-map';
|
||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||
export type { SourceMap };
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
||||
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
@@ -1,45 +0,0 @@
|
||||
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||
export declare type SourceMapSegmentObject = {
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type OriginalSource = {
|
||||
map: null;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type MapSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: null;
|
||||
ignore: false;
|
||||
};
|
||||
export declare type Sources = OriginalSource | MapSource;
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
||||
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
@@ -1,18 +0,0 @@
|
||||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
export default class SourceMap {
|
||||
file?: string | null;
|
||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||
sourceRoot?: string;
|
||||
names: string[];
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList: number[] | undefined;
|
||||
constructor(map: GenMapping, options: Options);
|
||||
toString(): string;
|
||||
}
|
||||
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
@@ -1,15 +0,0 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapInput };
|
||||
export declare type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
ignore: boolean | undefined;
|
||||
};
|
||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||
export declare type Options = {
|
||||
excludeContent?: boolean;
|
||||
decodedMappings?: boolean;
|
||||
};
|
||||
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
@@ -1,19 +0,0 @@
|
||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
@@ -1,257 +0,0 @@
|
||||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import {
|
||||
TraceMap,
|
||||
originalPositionFor,
|
||||
generatedPositionFor,
|
||||
sourceContentFor,
|
||||
isIgnored,
|
||||
} from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['content of input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
ignoreList: [],
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const content = sourceContentFor(tracer, traced.source);
|
||||
assert.strictEqual(content, 'content for input.js');
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
|
||||
const ignored = isIgnored(tracer, 'input.js');
|
||||
assert.equal(ignored, false);
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 562400 bytes
|
||||
trace-mapping encoded 5706544 bytes
|
||||
source-map-js 10717664 bytes
|
||||
source-map-0.6.1 17446384 bytes
|
||||
source-map-0.8.0 9701757 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 89832 bytes
|
||||
trace-mapping encoded 35474640 bytes
|
||||
source-map-js 51257176 bytes
|
||||
source-map-0.6.1 63515664 bytes
|
||||
source-map-0.8.0 42933752 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 37128 bytes
|
||||
trace-mapping encoded 247280 bytes
|
||||
source-map-js 1143536 bytes
|
||||
source-map-0.6.1 1290992 bytes
|
||||
source-map-0.8.0 96544 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 16176 bytes
|
||||
trace-mapping encoded 681552 bytes
|
||||
source-map-js 2418352 bytes
|
||||
source-map-0.6.1 2443672 bytes
|
||||
source-map-0.8.0 111768 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
@@ -1,580 +0,0 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
||||
File diff suppressed because one or more lines are too long
@@ -1,600 +0,0 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||
exports.decodedMap = decodedMap;
|
||||
exports.decodedMappings = decodedMappings;
|
||||
exports.eachMapping = eachMapping;
|
||||
exports.encodedMap = encodedMap;
|
||||
exports.encodedMappings = encodedMappings;
|
||||
exports.generatedPositionFor = generatedPositionFor;
|
||||
exports.isIgnored = isIgnored;
|
||||
exports.originalPositionFor = originalPositionFor;
|
||||
exports.presortedDecodedMap = presortedDecodedMap;
|
||||
exports.sourceContentFor = sourceContentFor;
|
||||
exports.traceSegment = traceSegment;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -1,8 +0,0 @@
|
||||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
||||
@@ -1,32 +0,0 @@
|
||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
||||
@@ -1,7 +0,0 @@
|
||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
||||
@@ -1 +0,0 @@
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
||||
@@ -1,2 +0,0 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
||||
@@ -1,16 +0,0 @@
|
||||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
type GeneratedLine = number;
|
||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
||||
@@ -1,4 +0,0 @@
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
||||
@@ -1,79 +0,0 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
||||
@@ -1,99 +0,0 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||
export type XInput = {
|
||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||
};
|
||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||
sections: SectionXInput[];
|
||||
};
|
||||
export type SectionXInput = Omit<Section, 'map'> & {
|
||||
map: SectionedSourceMapInput;
|
||||
};
|
||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||
export type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
}
|
||||
77
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
77
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
@@ -1,77 +0,0 @@
|
||||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.25",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"types": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/trace-mapping.d.ts",
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./dist/trace-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.mjs",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "11.1.6",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "20.11.20",
|
||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||
"@typescript-eslint/parser": "6.18.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "9.0.0",
|
||||
"esbuild": "0.19.11",
|
||||
"eslint": "8.56.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-no-only-tests": "3.1.0",
|
||||
"mocha": "10.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "3.1.1",
|
||||
"rollup": "4.9.4",
|
||||
"tsx": "4.7.0",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
}
|
||||
75
node_modules/@ampproject/remapping/package.json
generated
vendored
75
node_modules/@ampproject/remapping/package.json
generated
vendored
@@ -1,75 +0,0 @@
|
||||
{
|
||||
"name": "@ampproject/remapping",
|
||||
"version": "2.3.0",
|
||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map",
|
||||
"remap"
|
||||
],
|
||||
"main": "dist/remapping.umd.js",
|
||||
"module": "dist/remapping.mjs",
|
||||
"types": "dist/types/remapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/remapping.d.ts",
|
||||
"browser": "./dist/remapping.umd.js",
|
||||
"require": "./dist/remapping.umd.js",
|
||||
"import": "./dist/remapping.mjs"
|
||||
},
|
||||
"./dist/remapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ampproject/remapping.git"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "jest --coverage",
|
||||
"test:watch": "jest --coverage --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/jest": "27.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||
"@typescript-eslint/parser": "5.20.0",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"jest": "27.5.1",
|
||||
"jest-config": "27.5.1",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"ts-jest": "27.1.4",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.5",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
}
|
||||
22
node_modules/@babel/helper-string-parser/LICENSE
generated
vendored
22
node_modules/@babel/helper-string-parser/LICENSE
generated
vendored
@@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/helper-string-parser/README.md
generated
vendored
19
node_modules/@babel/helper-string-parser/README.md
generated
vendored
@@ -1,19 +0,0 @@
|
||||
# @babel/helper-string-parser
|
||||
|
||||
> A utility package to parse strings
|
||||
|
||||
See our website [@babel/helper-string-parser](https://babeljs.io/docs/babel-helper-string-parser) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/helper-string-parser
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/helper-string-parser
|
||||
```
|
||||
295
node_modules/@babel/helper-string-parser/lib/index.js
generated
vendored
295
node_modules/@babel/helper-string-parser/lib/index.js
generated
vendored
@@ -1,295 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.readCodePoint = readCodePoint;
|
||||
exports.readInt = readInt;
|
||||
exports.readStringContents = readStringContents;
|
||||
var _isDigit = function isDigit(code) {
|
||||
return code >= 48 && code <= 57;
|
||||
};
|
||||
const forbiddenNumericSeparatorSiblings = {
|
||||
decBinOct: new Set([46, 66, 69, 79, 95, 98, 101, 111]),
|
||||
hex: new Set([46, 88, 95, 120])
|
||||
};
|
||||
const isAllowedNumericSeparatorSibling = {
|
||||
bin: ch => ch === 48 || ch === 49,
|
||||
oct: ch => ch >= 48 && ch <= 55,
|
||||
dec: ch => ch >= 48 && ch <= 57,
|
||||
hex: ch => ch >= 48 && ch <= 57 || ch >= 65 && ch <= 70 || ch >= 97 && ch <= 102
|
||||
};
|
||||
function readStringContents(type, input, pos, lineStart, curLine, errors) {
|
||||
const initialPos = pos;
|
||||
const initialLineStart = lineStart;
|
||||
const initialCurLine = curLine;
|
||||
let out = "";
|
||||
let firstInvalidLoc = null;
|
||||
let chunkStart = pos;
|
||||
const {
|
||||
length
|
||||
} = input;
|
||||
for (;;) {
|
||||
if (pos >= length) {
|
||||
errors.unterminated(initialPos, initialLineStart, initialCurLine);
|
||||
out += input.slice(chunkStart, pos);
|
||||
break;
|
||||
}
|
||||
const ch = input.charCodeAt(pos);
|
||||
if (isStringEnd(type, ch, input, pos)) {
|
||||
out += input.slice(chunkStart, pos);
|
||||
break;
|
||||
}
|
||||
if (ch === 92) {
|
||||
out += input.slice(chunkStart, pos);
|
||||
const res = readEscapedChar(input, pos, lineStart, curLine, type === "template", errors);
|
||||
if (res.ch === null && !firstInvalidLoc) {
|
||||
firstInvalidLoc = {
|
||||
pos,
|
||||
lineStart,
|
||||
curLine
|
||||
};
|
||||
} else {
|
||||
out += res.ch;
|
||||
}
|
||||
({
|
||||
pos,
|
||||
lineStart,
|
||||
curLine
|
||||
} = res);
|
||||
chunkStart = pos;
|
||||
} else if (ch === 8232 || ch === 8233) {
|
||||
++pos;
|
||||
++curLine;
|
||||
lineStart = pos;
|
||||
} else if (ch === 10 || ch === 13) {
|
||||
if (type === "template") {
|
||||
out += input.slice(chunkStart, pos) + "\n";
|
||||
++pos;
|
||||
if (ch === 13 && input.charCodeAt(pos) === 10) {
|
||||
++pos;
|
||||
}
|
||||
++curLine;
|
||||
chunkStart = lineStart = pos;
|
||||
} else {
|
||||
errors.unterminated(initialPos, initialLineStart, initialCurLine);
|
||||
}
|
||||
} else {
|
||||
++pos;
|
||||
}
|
||||
}
|
||||
return {
|
||||
pos,
|
||||
str: out,
|
||||
firstInvalidLoc,
|
||||
lineStart,
|
||||
curLine,
|
||||
containsInvalid: !!firstInvalidLoc
|
||||
};
|
||||
}
|
||||
function isStringEnd(type, ch, input, pos) {
|
||||
if (type === "template") {
|
||||
return ch === 96 || ch === 36 && input.charCodeAt(pos + 1) === 123;
|
||||
}
|
||||
return ch === (type === "double" ? 34 : 39);
|
||||
}
|
||||
function readEscapedChar(input, pos, lineStart, curLine, inTemplate, errors) {
|
||||
const throwOnInvalid = !inTemplate;
|
||||
pos++;
|
||||
const res = ch => ({
|
||||
pos,
|
||||
ch,
|
||||
lineStart,
|
||||
curLine
|
||||
});
|
||||
const ch = input.charCodeAt(pos++);
|
||||
switch (ch) {
|
||||
case 110:
|
||||
return res("\n");
|
||||
case 114:
|
||||
return res("\r");
|
||||
case 120:
|
||||
{
|
||||
let code;
|
||||
({
|
||||
code,
|
||||
pos
|
||||
} = readHexChar(input, pos, lineStart, curLine, 2, false, throwOnInvalid, errors));
|
||||
return res(code === null ? null : String.fromCharCode(code));
|
||||
}
|
||||
case 117:
|
||||
{
|
||||
let code;
|
||||
({
|
||||
code,
|
||||
pos
|
||||
} = readCodePoint(input, pos, lineStart, curLine, throwOnInvalid, errors));
|
||||
return res(code === null ? null : String.fromCodePoint(code));
|
||||
}
|
||||
case 116:
|
||||
return res("\t");
|
||||
case 98:
|
||||
return res("\b");
|
||||
case 118:
|
||||
return res("\u000b");
|
||||
case 102:
|
||||
return res("\f");
|
||||
case 13:
|
||||
if (input.charCodeAt(pos) === 10) {
|
||||
++pos;
|
||||
}
|
||||
case 10:
|
||||
lineStart = pos;
|
||||
++curLine;
|
||||
case 8232:
|
||||
case 8233:
|
||||
return res("");
|
||||
case 56:
|
||||
case 57:
|
||||
if (inTemplate) {
|
||||
return res(null);
|
||||
} else {
|
||||
errors.strictNumericEscape(pos - 1, lineStart, curLine);
|
||||
}
|
||||
default:
|
||||
if (ch >= 48 && ch <= 55) {
|
||||
const startPos = pos - 1;
|
||||
const match = /^[0-7]+/.exec(input.slice(startPos, pos + 2));
|
||||
let octalStr = match[0];
|
||||
let octal = parseInt(octalStr, 8);
|
||||
if (octal > 255) {
|
||||
octalStr = octalStr.slice(0, -1);
|
||||
octal = parseInt(octalStr, 8);
|
||||
}
|
||||
pos += octalStr.length - 1;
|
||||
const next = input.charCodeAt(pos);
|
||||
if (octalStr !== "0" || next === 56 || next === 57) {
|
||||
if (inTemplate) {
|
||||
return res(null);
|
||||
} else {
|
||||
errors.strictNumericEscape(startPos, lineStart, curLine);
|
||||
}
|
||||
}
|
||||
return res(String.fromCharCode(octal));
|
||||
}
|
||||
return res(String.fromCharCode(ch));
|
||||
}
|
||||
}
|
||||
function readHexChar(input, pos, lineStart, curLine, len, forceLen, throwOnInvalid, errors) {
|
||||
const initialPos = pos;
|
||||
let n;
|
||||
({
|
||||
n,
|
||||
pos
|
||||
} = readInt(input, pos, lineStart, curLine, 16, len, forceLen, false, errors, !throwOnInvalid));
|
||||
if (n === null) {
|
||||
if (throwOnInvalid) {
|
||||
errors.invalidEscapeSequence(initialPos, lineStart, curLine);
|
||||
} else {
|
||||
pos = initialPos - 1;
|
||||
}
|
||||
}
|
||||
return {
|
||||
code: n,
|
||||
pos
|
||||
};
|
||||
}
|
||||
function readInt(input, pos, lineStart, curLine, radix, len, forceLen, allowNumSeparator, errors, bailOnError) {
|
||||
const start = pos;
|
||||
const forbiddenSiblings = radix === 16 ? forbiddenNumericSeparatorSiblings.hex : forbiddenNumericSeparatorSiblings.decBinOct;
|
||||
const isAllowedSibling = radix === 16 ? isAllowedNumericSeparatorSibling.hex : radix === 10 ? isAllowedNumericSeparatorSibling.dec : radix === 8 ? isAllowedNumericSeparatorSibling.oct : isAllowedNumericSeparatorSibling.bin;
|
||||
let invalid = false;
|
||||
let total = 0;
|
||||
for (let i = 0, e = len == null ? Infinity : len; i < e; ++i) {
|
||||
const code = input.charCodeAt(pos);
|
||||
let val;
|
||||
if (code === 95 && allowNumSeparator !== "bail") {
|
||||
const prev = input.charCodeAt(pos - 1);
|
||||
const next = input.charCodeAt(pos + 1);
|
||||
if (!allowNumSeparator) {
|
||||
if (bailOnError) return {
|
||||
n: null,
|
||||
pos
|
||||
};
|
||||
errors.numericSeparatorInEscapeSequence(pos, lineStart, curLine);
|
||||
} else if (Number.isNaN(next) || !isAllowedSibling(next) || forbiddenSiblings.has(prev) || forbiddenSiblings.has(next)) {
|
||||
if (bailOnError) return {
|
||||
n: null,
|
||||
pos
|
||||
};
|
||||
errors.unexpectedNumericSeparator(pos, lineStart, curLine);
|
||||
}
|
||||
++pos;
|
||||
continue;
|
||||
}
|
||||
if (code >= 97) {
|
||||
val = code - 97 + 10;
|
||||
} else if (code >= 65) {
|
||||
val = code - 65 + 10;
|
||||
} else if (_isDigit(code)) {
|
||||
val = code - 48;
|
||||
} else {
|
||||
val = Infinity;
|
||||
}
|
||||
if (val >= radix) {
|
||||
if (val <= 9 && bailOnError) {
|
||||
return {
|
||||
n: null,
|
||||
pos
|
||||
};
|
||||
} else if (val <= 9 && errors.invalidDigit(pos, lineStart, curLine, radix)) {
|
||||
val = 0;
|
||||
} else if (forceLen) {
|
||||
val = 0;
|
||||
invalid = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
++pos;
|
||||
total = total * radix + val;
|
||||
}
|
||||
if (pos === start || len != null && pos - start !== len || invalid) {
|
||||
return {
|
||||
n: null,
|
||||
pos
|
||||
};
|
||||
}
|
||||
return {
|
||||
n: total,
|
||||
pos
|
||||
};
|
||||
}
|
||||
function readCodePoint(input, pos, lineStart, curLine, throwOnInvalid, errors) {
|
||||
const ch = input.charCodeAt(pos);
|
||||
let code;
|
||||
if (ch === 123) {
|
||||
++pos;
|
||||
({
|
||||
code,
|
||||
pos
|
||||
} = readHexChar(input, pos, lineStart, curLine, input.indexOf("}", pos) - pos, true, throwOnInvalid, errors));
|
||||
++pos;
|
||||
if (code !== null && code > 0x10ffff) {
|
||||
if (throwOnInvalid) {
|
||||
errors.invalidCodePoint(pos, lineStart, curLine);
|
||||
} else {
|
||||
return {
|
||||
code: null,
|
||||
pos
|
||||
};
|
||||
}
|
||||
}
|
||||
} else {
|
||||
({
|
||||
code,
|
||||
pos
|
||||
} = readHexChar(input, pos, lineStart, curLine, 4, false, throwOnInvalid, errors));
|
||||
}
|
||||
return {
|
||||
code,
|
||||
pos
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@babel/helper-string-parser/lib/index.js.map
generated
vendored
1
node_modules/@babel/helper-string-parser/lib/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
31
node_modules/@babel/helper-string-parser/package.json
generated
vendored
31
node_modules/@babel/helper-string-parser/package.json
generated
vendored
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@babel/helper-string-parser",
|
||||
"version": "7.25.9",
|
||||
"description": "A utility package to parse strings",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-helper-string-parser"
|
||||
},
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-helper-string-parser",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"devDependencies": {
|
||||
"charcodes": "^0.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./lib/index.d.ts",
|
||||
"default": "./lib/index.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
||||
22
node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
22
node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
@@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
19
node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
@@ -1,19 +0,0 @@
|
||||
# @babel/helper-validator-identifier
|
||||
|
||||
> Validate identifier/keywords name
|
||||
|
||||
See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/babel-helper-validator-identifier) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/helper-validator-identifier
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/helper-validator-identifier
|
||||
```
|
||||
70
node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
70
node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
@@ -1,70 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isIdentifierChar = isIdentifierChar;
|
||||
exports.isIdentifierName = isIdentifierName;
|
||||
exports.isIdentifierStart = isIdentifierStart;
|
||||
let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c8a\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7cd\ua7d0\ua7d1\ua7d3\ua7d5-\ua7dc\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
|
||||
let nonASCIIidentifierChars = "\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0897-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u200c\u200d\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\u30fb\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f\uff65";
|
||||
const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]");
|
||||
const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]");
|
||||
nonASCIIidentifierStartChars = nonASCIIidentifierChars = null;
|
||||
const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 4, 51, 13, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 39, 27, 10, 22, 251, 41, 7, 1, 17, 2, 60, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 31, 9, 2, 0, 3, 0, 2, 37, 2, 0, 26, 0, 2, 0, 45, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 200, 32, 32, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 26, 3994, 6, 582, 6842, 29, 1763, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 433, 44, 212, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 42, 9, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 229, 29, 3, 0, 496, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 16, 621, 2467, 541, 1507, 4938, 6, 4191];
|
||||
const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 7, 9, 32, 4, 318, 1, 80, 3, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 68, 8, 2, 0, 3, 0, 2, 3, 2, 4, 2, 0, 15, 1, 83, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 7, 19, 58, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 343, 9, 54, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 10, 5350, 0, 7, 14, 11465, 27, 2343, 9, 87, 9, 39, 4, 60, 6, 26, 9, 535, 9, 470, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4178, 9, 519, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 245, 1, 2, 9, 726, 6, 110, 6, 6, 9, 4759, 9, 787719, 239];
|
||||
function isInAstralSet(code, set) {
|
||||
let pos = 0x10000;
|
||||
for (let i = 0, length = set.length; i < length; i += 2) {
|
||||
pos += set[i];
|
||||
if (pos > code) return false;
|
||||
pos += set[i + 1];
|
||||
if (pos >= code) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isIdentifierStart(code) {
|
||||
if (code < 65) return code === 36;
|
||||
if (code <= 90) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code <= 122) return true;
|
||||
if (code <= 0xffff) {
|
||||
return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code));
|
||||
}
|
||||
return isInAstralSet(code, astralIdentifierStartCodes);
|
||||
}
|
||||
function isIdentifierChar(code) {
|
||||
if (code < 48) return code === 36;
|
||||
if (code < 58) return true;
|
||||
if (code < 65) return false;
|
||||
if (code <= 90) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code <= 122) return true;
|
||||
if (code <= 0xffff) {
|
||||
return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));
|
||||
}
|
||||
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes);
|
||||
}
|
||||
function isIdentifierName(name) {
|
||||
let isFirst = true;
|
||||
for (let i = 0; i < name.length; i++) {
|
||||
let cp = name.charCodeAt(i);
|
||||
if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {
|
||||
const trail = name.charCodeAt(++i);
|
||||
if ((trail & 0xfc00) === 0xdc00) {
|
||||
cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);
|
||||
}
|
||||
}
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
if (!isIdentifierStart(cp)) {
|
||||
return false;
|
||||
}
|
||||
} else if (!isIdentifierChar(cp)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return !isFirst;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=identifier.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/identifier.js.map
generated
vendored
1
node_modules/@babel/helper-validator-identifier/lib/identifier.js.map
generated
vendored
File diff suppressed because one or more lines are too long
57
node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
57
node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
@@ -1,57 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierChar", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierChar;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierName", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierName;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierStart", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierStart;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isKeyword", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isKeyword;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictBindOnlyReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictBindOnlyReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictBindReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictBindReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictReservedWord;
|
||||
}
|
||||
});
|
||||
var _identifier = require("./identifier.js");
|
||||
var _keyword = require("./keyword.js");
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/index.js.map
generated
vendored
1
node_modules/@babel/helper-validator-identifier/lib/index.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"names":["_identifier","require","_keyword"],"sources":["../src/index.ts"],"sourcesContent":["export {\n isIdentifierName,\n isIdentifierChar,\n isIdentifierStart,\n} from \"./identifier.ts\";\nexport {\n isReservedWord,\n isStrictBindOnlyReservedWord,\n isStrictBindReservedWord,\n isStrictReservedWord,\n isKeyword,\n} from \"./keyword.ts\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,WAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA","ignoreList":[]}
|
||||
35
node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
35
node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
@@ -1,35 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isKeyword = isKeyword;
|
||||
exports.isReservedWord = isReservedWord;
|
||||
exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord;
|
||||
exports.isStrictBindReservedWord = isStrictBindReservedWord;
|
||||
exports.isStrictReservedWord = isStrictReservedWord;
|
||||
const reservedWords = {
|
||||
keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"],
|
||||
strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"],
|
||||
strictBind: ["eval", "arguments"]
|
||||
};
|
||||
const keywords = new Set(reservedWords.keyword);
|
||||
const reservedWordsStrictSet = new Set(reservedWords.strict);
|
||||
const reservedWordsStrictBindSet = new Set(reservedWords.strictBind);
|
||||
function isReservedWord(word, inModule) {
|
||||
return inModule && word === "await" || word === "enum";
|
||||
}
|
||||
function isStrictReservedWord(word, inModule) {
|
||||
return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);
|
||||
}
|
||||
function isStrictBindOnlyReservedWord(word) {
|
||||
return reservedWordsStrictBindSet.has(word);
|
||||
}
|
||||
function isStrictBindReservedWord(word, inModule) {
|
||||
return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word);
|
||||
}
|
||||
function isKeyword(word) {
|
||||
return keywords.has(word);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=keyword.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/keyword.js.map
generated
vendored
1
node_modules/@babel/helper-validator-identifier/lib/keyword.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OAAO,EACP,MAAM,EACN,OAAO,EACP,UAAU,EACV,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,EACN,SAAS,EACT,KAAK,EACL,UAAU,EACV,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,OAAO,EACP,OAAO,EACP,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,EACP,OAAO,EACP,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,MAAM,EACN,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,QAAQ,EACR,MAAM,EACN,QAAQ,CACT;EACDC,MAAM,EAAE,CACN,YAAY,EACZ,WAAW,EACX,KAAK,EACL,SAAS,EACT,SAAS,EACT,WAAW,EACX,QAAQ,EACR,QAAQ,EACR,OAAO,CACR;EACDC,UAAU,EAAE,CAAC,MAAM,EAAE,WAAW;AAClC,CAAC;AACD,MAAMC,QAAQ,GAAG,IAAIC,GAAG,CAACL,aAAa,CAACC,OAAO,CAAC;AAC/C,MAAMK,sBAAsB,GAAG,IAAID,GAAG,CAACL,aAAa,CAACE,MAAM,CAAC;AAC5D,MAAMK,0BAA0B,GAAG,IAAIF,GAAG,CAACL,aAAa,CAACG,UAAU,CAAC;AAK7D,SAASK,cAAcA,CAACC,IAAY,EAAEC,QAAiB,EAAW;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAO,IAAKA,IAAI,KAAK,MAAM;AAC1D;AAOO,SAASE,oBAAoBA,CAACF,IAAY,EAAEC,QAAiB,EAAW;EAC7E,OAAOF,cAAc,CAACC,IAAI,EAAEC,QAAQ,CAAC,IAAIJ,sBAAsB,CAACM,GAAG,CAACH,IAAI,CAAC;AAC3E;AAMO,SAASI,4BAA4BA,CAACJ,IAAY,EAAW;EAClE,OAAOF,0BAA0B,CAACK,GAAG,CAACH,IAAI,CAAC;AAC7C;AAOO,SAASK,wBAAwBA,CACtCL,IAAY,EACZC,QAAiB,EACR;EACT,OACEC,oBAAoB,CAACF,IAAI,EAAEC,QAAQ,CAAC,IAAIG,4BAA4B,CAACJ,IAAI,CAAC;AAE9E;AAEO,SAASM,SAASA,CAACN,IAAY,EAAW;EAC/C,OAAOL,QAAQ,CAACQ,GAAG,CAACH,IAAI,CAAC;AAC3B","ignoreList":[]}
|
||||
31
node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
31
node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@babel/helper-validator-identifier",
|
||||
"version": "7.25.9",
|
||||
"description": "Validate identifier/keywords name",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-helper-validator-identifier"
|
||||
},
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./lib/index.d.ts",
|
||||
"default": "./lib/index.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@unicode/unicode-16.0.0": "^1.0.0",
|
||||
"charcodes": "^0.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"type": "commonjs"
|
||||
}
|
||||
1073
node_modules/@babel/parser/CHANGELOG.md
generated
vendored
1073
node_modules/@babel/parser/CHANGELOG.md
generated
vendored
File diff suppressed because it is too large
Load Diff
19
node_modules/@babel/parser/LICENSE
generated
vendored
19
node_modules/@babel/parser/LICENSE
generated
vendored
@@ -1,19 +0,0 @@
|
||||
Copyright (C) 2012-2014 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
19
node_modules/@babel/parser/README.md
generated
vendored
19
node_modules/@babel/parser/README.md
generated
vendored
@@ -1,19 +0,0 @@
|
||||
# @babel/parser
|
||||
|
||||
> A JavaScript parser
|
||||
|
||||
See our website [@babel/parser](https://babeljs.io/docs/babel-parser) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20parser%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/parser
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/parser --dev
|
||||
```
|
||||
15
node_modules/@babel/parser/bin/babel-parser.js
generated
vendored
15
node_modules/@babel/parser/bin/babel-parser.js
generated
vendored
@@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/* eslint no-var: 0 */
|
||||
|
||||
var parser = require("..");
|
||||
var fs = require("fs");
|
||||
|
||||
var filename = process.argv[2];
|
||||
if (!filename) {
|
||||
console.error("no filename specified");
|
||||
} else {
|
||||
var file = fs.readFileSync(filename, "utf8");
|
||||
var ast = parser.parse(file);
|
||||
|
||||
console.log(JSON.stringify(ast, null, " "));
|
||||
}
|
||||
5
node_modules/@babel/parser/index.cjs
generated
vendored
5
node_modules/@babel/parser/index.cjs
generated
vendored
@@ -1,5 +0,0 @@
|
||||
try {
|
||||
module.exports = require("./lib/index.cjs");
|
||||
} catch {
|
||||
module.exports = require("./lib/index.js");
|
||||
}
|
||||
14217
node_modules/@babel/parser/lib/index.js
generated
vendored
14217
node_modules/@babel/parser/lib/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/@babel/parser/lib/index.js.map
generated
vendored
1
node_modules/@babel/parser/lib/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
50
node_modules/@babel/parser/package.json
generated
vendored
50
node_modules/@babel/parser/package.json
generated
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
"name": "@babel/parser",
|
||||
"version": "7.26.10",
|
||||
"description": "A JavaScript parser",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-parser",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A+parser+%28babylon%29%22+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"keywords": [
|
||||
"babel",
|
||||
"javascript",
|
||||
"parser",
|
||||
"tc39",
|
||||
"ecmascript",
|
||||
"@babel/parser"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-parser"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"types": "./typings/babel-parser.d.ts",
|
||||
"files": [
|
||||
"bin",
|
||||
"lib",
|
||||
"typings/babel-parser.d.ts",
|
||||
"index.cjs"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"# dependencies": "This package doesn't actually have runtime dependencies. @babel/types is only needed for type definitions.",
|
||||
"dependencies": {
|
||||
"@babel/types": "^7.26.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/code-frame": "^7.26.2",
|
||||
"@babel/helper-check-duplicate-nodes": "^7.25.9",
|
||||
"@babel/helper-fixtures": "^7.26.0",
|
||||
"@babel/helper-string-parser": "^7.25.9",
|
||||
"@babel/helper-validator-identifier": "^7.25.9",
|
||||
"charcodes": "^0.2.0"
|
||||
},
|
||||
"bin": "./bin/babel-parser.js",
|
||||
"type": "commonjs"
|
||||
}
|
||||
230
node_modules/@babel/parser/typings/babel-parser.d.ts
generated
vendored
230
node_modules/@babel/parser/typings/babel-parser.d.ts
generated
vendored
@@ -1,230 +0,0 @@
|
||||
// This file is auto-generated! Do not modify it directly.
|
||||
// Run `yarn gulp bundle-dts` to re-generate it.
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports, @typescript-eslint/no-redundant-type-constituents */
|
||||
import { File, Expression } from '@babel/types';
|
||||
|
||||
type BABEL_8_BREAKING = false;
|
||||
type IF_BABEL_7<V> = false extends BABEL_8_BREAKING ? V : never;
|
||||
|
||||
type Plugin$1 =
|
||||
| "asyncDoExpressions"
|
||||
| IF_BABEL_7<"asyncGenerators">
|
||||
| IF_BABEL_7<"bigInt">
|
||||
| IF_BABEL_7<"classPrivateMethods">
|
||||
| IF_BABEL_7<"classPrivateProperties">
|
||||
| IF_BABEL_7<"classProperties">
|
||||
| IF_BABEL_7<"classStaticBlock">
|
||||
| IF_BABEL_7<"decimal">
|
||||
| "decorators-legacy"
|
||||
| "deferredImportEvaluation"
|
||||
| "decoratorAutoAccessors"
|
||||
| "destructuringPrivate"
|
||||
| "deprecatedImportAssert"
|
||||
| "doExpressions"
|
||||
| IF_BABEL_7<"dynamicImport">
|
||||
| "explicitResourceManagement"
|
||||
| "exportDefaultFrom"
|
||||
| IF_BABEL_7<"exportNamespaceFrom">
|
||||
| "flow"
|
||||
| "flowComments"
|
||||
| "functionBind"
|
||||
| "functionSent"
|
||||
| "importMeta"
|
||||
| "jsx"
|
||||
| IF_BABEL_7<"jsonStrings">
|
||||
| IF_BABEL_7<"logicalAssignment">
|
||||
| IF_BABEL_7<"importAssertions">
|
||||
| IF_BABEL_7<"importReflection">
|
||||
| "moduleBlocks"
|
||||
| IF_BABEL_7<"moduleStringNames">
|
||||
| IF_BABEL_7<"nullishCoalescingOperator">
|
||||
| IF_BABEL_7<"numericSeparator">
|
||||
| IF_BABEL_7<"objectRestSpread">
|
||||
| IF_BABEL_7<"optionalCatchBinding">
|
||||
| IF_BABEL_7<"optionalChaining">
|
||||
| "partialApplication"
|
||||
| "placeholders"
|
||||
| IF_BABEL_7<"privateIn">
|
||||
| IF_BABEL_7<"regexpUnicodeSets">
|
||||
| "sourcePhaseImports"
|
||||
| "throwExpressions"
|
||||
| IF_BABEL_7<"topLevelAwait">
|
||||
| "v8intrinsic"
|
||||
| ParserPluginWithOptions[0];
|
||||
|
||||
type ParserPluginWithOptions =
|
||||
| ["decorators", DecoratorsPluginOptions]
|
||||
| ["estree", { classFeatures?: boolean }]
|
||||
| IF_BABEL_7<["importAttributes", { deprecatedAssertSyntax: boolean }]>
|
||||
| IF_BABEL_7<["moduleAttributes", { version: "may-2020" }]>
|
||||
| ["optionalChainingAssign", { version: "2023-07" }]
|
||||
| ["pipelineOperator", PipelineOperatorPluginOptions]
|
||||
| ["recordAndTuple", RecordAndTuplePluginOptions]
|
||||
| ["flow", FlowPluginOptions]
|
||||
| ["typescript", TypeScriptPluginOptions];
|
||||
|
||||
type PluginConfig = Plugin$1 | ParserPluginWithOptions;
|
||||
|
||||
interface DecoratorsPluginOptions {
|
||||
decoratorsBeforeExport?: boolean;
|
||||
allowCallParenthesized?: boolean;
|
||||
}
|
||||
|
||||
interface PipelineOperatorPluginOptions {
|
||||
proposal: BABEL_8_BREAKING extends false
|
||||
? "minimal" | "fsharp" | "hack" | "smart"
|
||||
: "fsharp" | "hack";
|
||||
topicToken?: "%" | "#" | "@@" | "^^" | "^";
|
||||
}
|
||||
|
||||
interface RecordAndTuplePluginOptions {
|
||||
syntaxType: "bar" | "hash";
|
||||
}
|
||||
|
||||
type FlowPluginOptions = BABEL_8_BREAKING extends true
|
||||
? {
|
||||
all?: boolean;
|
||||
enums?: boolean;
|
||||
}
|
||||
: {
|
||||
all?: boolean;
|
||||
};
|
||||
|
||||
interface TypeScriptPluginOptions {
|
||||
dts?: boolean;
|
||||
disallowAmbiguousJSXLike?: boolean;
|
||||
}
|
||||
|
||||
type Plugin = PluginConfig;
|
||||
|
||||
interface Options {
|
||||
/**
|
||||
* By default, import and export declarations can only appear at a program's top level.
|
||||
* Setting this option to true allows them anywhere where a statement is allowed.
|
||||
*/
|
||||
allowImportExportEverywhere?: boolean;
|
||||
/**
|
||||
* By default, await use is not allowed outside of an async function.
|
||||
* Set this to true to accept such code.
|
||||
*/
|
||||
allowAwaitOutsideFunction?: boolean;
|
||||
/**
|
||||
* By default, a return statement at the top level raises an error.
|
||||
* Set this to true to accept such code.
|
||||
*/
|
||||
allowReturnOutsideFunction?: boolean;
|
||||
/**
|
||||
* By default, new.target use is not allowed outside of a function or class.
|
||||
* Set this to true to accept such code.
|
||||
*/
|
||||
allowNewTargetOutsideFunction?: boolean;
|
||||
allowSuperOutsideMethod?: boolean;
|
||||
/**
|
||||
* By default, exported identifiers must refer to a declared variable.
|
||||
* Set this to true to allow export statements to reference undeclared variables.
|
||||
*/
|
||||
allowUndeclaredExports?: boolean;
|
||||
/**
|
||||
* By default, Babel parser JavaScript code according to Annex B syntax.
|
||||
* Set this to `false` to disable such behavior.
|
||||
*/
|
||||
annexB?: boolean;
|
||||
/**
|
||||
* By default, Babel attaches comments to adjacent AST nodes.
|
||||
* When this option is set to false, comments are not attached.
|
||||
* It can provide up to 30% performance improvement when the input code has many comments.
|
||||
* @babel/eslint-parser will set it for you.
|
||||
* It is not recommended to use attachComment: false with Babel transform,
|
||||
* as doing so removes all the comments in output code, and renders annotations such as
|
||||
* /* istanbul ignore next *\/ nonfunctional.
|
||||
*/
|
||||
attachComment?: boolean;
|
||||
/**
|
||||
* By default, Babel always throws an error when it finds some invalid code.
|
||||
* When this option is set to true, it will store the parsing error and
|
||||
* try to continue parsing the invalid input file.
|
||||
*/
|
||||
errorRecovery?: boolean;
|
||||
/**
|
||||
* Indicate the mode the code should be parsed in.
|
||||
* Can be one of "script", "module", or "unambiguous". Defaults to "script".
|
||||
* "unambiguous" will make @babel/parser attempt to guess, based on the presence
|
||||
* of ES6 import or export statements.
|
||||
* Files with ES6 imports and exports are considered "module" and are otherwise "script".
|
||||
*/
|
||||
sourceType?: "script" | "module" | "unambiguous";
|
||||
/**
|
||||
* Correlate output AST nodes with their source filename.
|
||||
* Useful when generating code and source maps from the ASTs of multiple input files.
|
||||
*/
|
||||
sourceFilename?: string;
|
||||
/**
|
||||
* By default, all source indexes start from 0.
|
||||
* You can provide a start index to alternatively start with.
|
||||
* Useful for integration with other source tools.
|
||||
*/
|
||||
startIndex?: number;
|
||||
/**
|
||||
* By default, the first line of code parsed is treated as line 1.
|
||||
* You can provide a line number to alternatively start with.
|
||||
* Useful for integration with other source tools.
|
||||
*/
|
||||
startLine?: number;
|
||||
/**
|
||||
* By default, the parsed code is treated as if it starts from line 1, column 0.
|
||||
* You can provide a column number to alternatively start with.
|
||||
* Useful for integration with other source tools.
|
||||
*/
|
||||
startColumn?: number;
|
||||
/**
|
||||
* Array containing the plugins that you want to enable.
|
||||
*/
|
||||
plugins?: Plugin[];
|
||||
/**
|
||||
* Should the parser work in strict mode.
|
||||
* Defaults to true if sourceType === 'module'. Otherwise, false.
|
||||
*/
|
||||
strictMode?: boolean;
|
||||
/**
|
||||
* Adds a ranges property to each node: [node.start, node.end]
|
||||
*/
|
||||
ranges?: boolean;
|
||||
/**
|
||||
* Adds all parsed tokens to a tokens property on the File node.
|
||||
*/
|
||||
tokens?: boolean;
|
||||
/**
|
||||
* By default, the parser adds information about parentheses by setting
|
||||
* `extra.parenthesized` to `true` as needed.
|
||||
* When this option is `true` the parser creates `ParenthesizedExpression`
|
||||
* AST nodes instead of using the `extra` property.
|
||||
*/
|
||||
createParenthesizedExpressions?: boolean;
|
||||
/**
|
||||
* The default is false in Babel 7 and true in Babel 8
|
||||
* Set this to true to parse it as an `ImportExpression` node.
|
||||
* Otherwise `import(foo)` is parsed as `CallExpression(Import, [Identifier(foo)])`.
|
||||
*/
|
||||
createImportExpressions?: boolean;
|
||||
}
|
||||
|
||||
type ParserOptions = Partial<Options>;
|
||||
interface ParseError {
|
||||
code: string;
|
||||
reasonCode: string;
|
||||
}
|
||||
type ParseResult<Result extends File | Expression = File> = Result & {
|
||||
errors: null | ParseError[];
|
||||
};
|
||||
/**
|
||||
* Parse the provided code as an entire ECMAScript program.
|
||||
*/
|
||||
declare function parse(input: string, options?: ParserOptions): ParseResult<File>;
|
||||
declare function parseExpression(input: string, options?: ParserOptions): ParseResult<Expression>;
|
||||
|
||||
declare const tokTypes: {
|
||||
// todo(flow->ts) real token type
|
||||
[name: string]: any;
|
||||
};
|
||||
|
||||
export { DecoratorsPluginOptions, FlowPluginOptions, ParseError, ParseResult, ParserOptions, PluginConfig as ParserPlugin, ParserPluginWithOptions, PipelineOperatorPluginOptions, RecordAndTuplePluginOptions, TypeScriptPluginOptions, parse, parseExpression, tokTypes };
|
||||
22
node_modules/@babel/types/LICENSE
generated
vendored
22
node_modules/@babel/types/LICENSE
generated
vendored
@@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/types/README.md
generated
vendored
19
node_modules/@babel/types/README.md
generated
vendored
@@ -1,19 +0,0 @@
|
||||
# @babel/types
|
||||
|
||||
> Babel Types is a Lodash-esque utility library for AST nodes
|
||||
|
||||
See our website [@babel/types](https://babeljs.io/docs/babel-types) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20types%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/types
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/types --dev
|
||||
```
|
||||
16
node_modules/@babel/types/lib/asserts/assertNode.js
generated
vendored
16
node_modules/@babel/types/lib/asserts/assertNode.js
generated
vendored
@@ -1,16 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = assertNode;
|
||||
var _isNode = require("../validators/isNode.js");
|
||||
function assertNode(node) {
|
||||
if (!(0, _isNode.default)(node)) {
|
||||
var _node$type;
|
||||
const type = (_node$type = node == null ? void 0 : node.type) != null ? _node$type : JSON.stringify(node);
|
||||
throw new TypeError(`Not a valid node of type "${type}"`);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=assertNode.js.map
|
||||
1
node_modules/@babel/types/lib/asserts/assertNode.js.map
generated
vendored
1
node_modules/@babel/types/lib/asserts/assertNode.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"names":["_isNode","require","assertNode","node","isNode","_node$type","type","JSON","stringify","TypeError"],"sources":["../../src/asserts/assertNode.ts"],"sourcesContent":["import isNode from \"../validators/isNode.ts\";\nimport type * as t from \"../index.ts\";\n\nexport default function assertNode(node?: any): asserts node is t.Node {\n if (!isNode(node)) {\n const type = node?.type ?? JSON.stringify(node);\n throw new TypeError(`Not a valid node of type \"${type}\"`);\n }\n}\n"],"mappings":";;;;;;AAAA,IAAAA,OAAA,GAAAC,OAAA;AAGe,SAASC,UAAUA,CAACC,IAAU,EAA0B;EACrE,IAAI,CAAC,IAAAC,eAAM,EAACD,IAAI,CAAC,EAAE;IAAA,IAAAE,UAAA;IACjB,MAAMC,IAAI,IAAAD,UAAA,GAAGF,IAAI,oBAAJA,IAAI,CAAEG,IAAI,YAAAD,UAAA,GAAIE,IAAI,CAACC,SAAS,CAACL,IAAI,CAAC;IAC/C,MAAM,IAAIM,SAAS,CAAC,6BAA6BH,IAAI,GAAG,CAAC;EAC3D;AACF","ignoreList":[]}
|
||||
1243
node_modules/@babel/types/lib/asserts/generated/index.js
generated
vendored
1243
node_modules/@babel/types/lib/asserts/generated/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/@babel/types/lib/asserts/generated/index.js.map
generated
vendored
1
node_modules/@babel/types/lib/asserts/generated/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@babel/types/lib/ast-types/generated/index.js
generated
vendored
3
node_modules/@babel/types/lib/ast-types/generated/index.js
generated
vendored
@@ -1,3 +0,0 @@
|
||||
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@babel/types/lib/ast-types/generated/index.js.map
generated
vendored
1
node_modules/@babel/types/lib/ast-types/generated/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
18
node_modules/@babel/types/lib/builders/flow/createFlowUnionType.js
generated
vendored
18
node_modules/@babel/types/lib/builders/flow/createFlowUnionType.js
generated
vendored
@@ -1,18 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = createFlowUnionType;
|
||||
var _index = require("../generated/index.js");
|
||||
var _removeTypeDuplicates = require("../../modifications/flow/removeTypeDuplicates.js");
|
||||
function createFlowUnionType(types) {
|
||||
const flattened = (0, _removeTypeDuplicates.default)(types);
|
||||
if (flattened.length === 1) {
|
||||
return flattened[0];
|
||||
} else {
|
||||
return (0, _index.unionTypeAnnotation)(flattened);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=createFlowUnionType.js.map
|
||||
1
node_modules/@babel/types/lib/builders/flow/createFlowUnionType.js.map
generated
vendored
1
node_modules/@babel/types/lib/builders/flow/createFlowUnionType.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"names":["_index","require","_removeTypeDuplicates","createFlowUnionType","types","flattened","removeTypeDuplicates","length","unionTypeAnnotation"],"sources":["../../../src/builders/flow/createFlowUnionType.ts"],"sourcesContent":["import { unionTypeAnnotation } from \"../generated/index.ts\";\nimport removeTypeDuplicates from \"../../modifications/flow/removeTypeDuplicates.ts\";\nimport type * as t from \"../../index.ts\";\n\n/**\n * Takes an array of `types` and flattens them, removing duplicates and\n * returns a `UnionTypeAnnotation` node containing them.\n */\nexport default function createFlowUnionType<T extends t.FlowType>(\n types: [T] | Array<T>,\n): T | t.UnionTypeAnnotation {\n const flattened = removeTypeDuplicates(types);\n\n if (flattened.length === 1) {\n return flattened[0] as T;\n } else {\n return unionTypeAnnotation(flattened);\n }\n}\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,qBAAA,GAAAD,OAAA;AAOe,SAASE,mBAAmBA,CACzCC,KAAqB,EACM;EAC3B,MAAMC,SAAS,GAAG,IAAAC,6BAAoB,EAACF,KAAK,CAAC;EAE7C,IAAIC,SAAS,CAACE,MAAM,KAAK,CAAC,EAAE;IAC1B,OAAOF,SAAS,CAAC,CAAC,CAAC;EACrB,CAAC,MAAM;IACL,OAAO,IAAAG,0BAAmB,EAACH,SAAS,CAAC;EACvC;AACF","ignoreList":[]}
|
||||
31
node_modules/@babel/types/lib/builders/flow/createTypeAnnotationBasedOnTypeof.js
generated
vendored
31
node_modules/@babel/types/lib/builders/flow/createTypeAnnotationBasedOnTypeof.js
generated
vendored
@@ -1,31 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
var _index = require("../generated/index.js");
|
||||
var _default = exports.default = createTypeAnnotationBasedOnTypeof;
|
||||
function createTypeAnnotationBasedOnTypeof(type) {
|
||||
switch (type) {
|
||||
case "string":
|
||||
return (0, _index.stringTypeAnnotation)();
|
||||
case "number":
|
||||
return (0, _index.numberTypeAnnotation)();
|
||||
case "undefined":
|
||||
return (0, _index.voidTypeAnnotation)();
|
||||
case "boolean":
|
||||
return (0, _index.booleanTypeAnnotation)();
|
||||
case "function":
|
||||
return (0, _index.genericTypeAnnotation)((0, _index.identifier)("Function"));
|
||||
case "object":
|
||||
return (0, _index.genericTypeAnnotation)((0, _index.identifier)("Object"));
|
||||
case "symbol":
|
||||
return (0, _index.genericTypeAnnotation)((0, _index.identifier)("Symbol"));
|
||||
case "bigint":
|
||||
return (0, _index.anyTypeAnnotation)();
|
||||
}
|
||||
throw new Error("Invalid typeof value: " + type);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=createTypeAnnotationBasedOnTypeof.js.map
|
||||
1
node_modules/@babel/types/lib/builders/flow/createTypeAnnotationBasedOnTypeof.js.map
generated
vendored
1
node_modules/@babel/types/lib/builders/flow/createTypeAnnotationBasedOnTypeof.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"names":["_index","require","_default","exports","default","createTypeAnnotationBasedOnTypeof","type","stringTypeAnnotation","numberTypeAnnotation","voidTypeAnnotation","booleanTypeAnnotation","genericTypeAnnotation","identifier","anyTypeAnnotation","Error"],"sources":["../../../src/builders/flow/createTypeAnnotationBasedOnTypeof.ts"],"sourcesContent":["import {\n anyTypeAnnotation,\n stringTypeAnnotation,\n numberTypeAnnotation,\n voidTypeAnnotation,\n booleanTypeAnnotation,\n genericTypeAnnotation,\n identifier,\n} from \"../generated/index.ts\";\nimport type * as t from \"../../index.ts\";\n\nexport default createTypeAnnotationBasedOnTypeof as {\n (type: \"string\"): t.StringTypeAnnotation;\n (type: \"number\"): t.NumberTypeAnnotation;\n (type: \"undefined\"): t.VoidTypeAnnotation;\n (type: \"boolean\"): t.BooleanTypeAnnotation;\n (type: \"function\"): t.GenericTypeAnnotation;\n (type: \"object\"): t.GenericTypeAnnotation;\n (type: \"symbol\"): t.GenericTypeAnnotation;\n (type: \"bigint\"): t.AnyTypeAnnotation;\n};\n\n/**\n * Create a type annotation based on typeof expression.\n */\nfunction createTypeAnnotationBasedOnTypeof(type: string): t.FlowType {\n switch (type) {\n case \"string\":\n return stringTypeAnnotation();\n case \"number\":\n return numberTypeAnnotation();\n case \"undefined\":\n return voidTypeAnnotation();\n case \"boolean\":\n return booleanTypeAnnotation();\n case \"function\":\n return genericTypeAnnotation(identifier(\"Function\"));\n case \"object\":\n return genericTypeAnnotation(identifier(\"Object\"));\n case \"symbol\":\n return genericTypeAnnotation(identifier(\"Symbol\"));\n case \"bigint\":\n // todo: use BigInt annotation when Flow supports BigInt\n // https://github.com/facebook/flow/issues/6639\n return anyTypeAnnotation();\n }\n throw new Error(\"Invalid typeof value: \" + type);\n}\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAQ+B,IAAAC,QAAA,GAAAC,OAAA,CAAAC,OAAA,GAGhBC,iCAAiC;AAchD,SAASA,iCAAiCA,CAACC,IAAY,EAAc;EACnE,QAAQA,IAAI;IACV,KAAK,QAAQ;MACX,OAAO,IAAAC,2BAAoB,EAAC,CAAC;IAC/B,KAAK,QAAQ;MACX,OAAO,IAAAC,2BAAoB,EAAC,CAAC;IAC/B,KAAK,WAAW;MACd,OAAO,IAAAC,yBAAkB,EAAC,CAAC;IAC7B,KAAK,SAAS;MACZ,OAAO,IAAAC,4BAAqB,EAAC,CAAC;IAChC,KAAK,UAAU;MACb,OAAO,IAAAC,4BAAqB,EAAC,IAAAC,iBAAU,EAAC,UAAU,CAAC,CAAC;IACtD,KAAK,QAAQ;MACX,OAAO,IAAAD,4BAAqB,EAAC,IAAAC,iBAAU,EAAC,QAAQ,CAAC,CAAC;IACpD,KAAK,QAAQ;MACX,OAAO,IAAAD,4BAAqB,EAAC,IAAAC,iBAAU,EAAC,QAAQ,CAAC,CAAC;IACpD,KAAK,QAAQ;MAGX,OAAO,IAAAC,wBAAiB,EAAC,CAAC;EAC9B;EACA,MAAM,IAAIC,KAAK,CAAC,wBAAwB,GAAGR,IAAI,CAAC;AAClD","ignoreList":[]}
|
||||
29
node_modules/@babel/types/lib/builders/generated/index.js
generated
vendored
29
node_modules/@babel/types/lib/builders/generated/index.js
generated
vendored
@@ -1,29 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
var _lowercase = require("./lowercase.js");
|
||||
Object.keys(_lowercase).forEach(function (key) {
|
||||
if (key === "default" || key === "__esModule") return;
|
||||
if (key in exports && exports[key] === _lowercase[key]) return;
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _lowercase[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
var _uppercase = require("./uppercase.js");
|
||||
Object.keys(_uppercase).forEach(function (key) {
|
||||
if (key === "default" || key === "__esModule") return;
|
||||
if (key in exports && exports[key] === _uppercase[key]) return;
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _uppercase[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user