mirror of
https://github.com/LukeHagar/arbiter.git
synced 2025-12-06 04:19:14 +00:00
Saving all progress
This commit is contained in:
36
.github/workflows/ci.yml
vendored
Normal file
36
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [18.x, 20.x]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run linter
|
||||||
|
run: npm run lint
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: npm run test:unit
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
run: npm run test:integration
|
||||||
162
README.md
162
README.md
@@ -1 +1,161 @@
|
|||||||
# arbiter
|
# Arbiter
|
||||||
|
|
||||||
|
A powerful API proxy with automatic OpenAPI documentation generation and HAR export capabilities.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Proxy API requests to any target server
|
||||||
|
- Automatic OpenAPI documentation generation
|
||||||
|
- HAR file export for request/response analysis
|
||||||
|
- Beautiful Swagger UI for API exploration
|
||||||
|
- CLI interface for easy configuration
|
||||||
|
- Support for API key injection
|
||||||
|
- CORS enabled by default
|
||||||
|
- Pretty JSON responses
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install -g arbiter
|
||||||
|
```
|
||||||
|
|
||||||
|
Or clone the repository and install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/yourusername/arbiter.git
|
||||||
|
cd arbiter
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### CLI
|
||||||
|
|
||||||
|
The easiest way to use Arbiter is through the CLI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic usage
|
||||||
|
arbiter --target http://api.example.com
|
||||||
|
|
||||||
|
# Specify custom port
|
||||||
|
arbiter --port 8080 --target http://api.example.com
|
||||||
|
|
||||||
|
# Add API key to requests
|
||||||
|
arbiter --key your-api-key --target http://api.example.com
|
||||||
|
|
||||||
|
# Run only the documentation server
|
||||||
|
arbiter --docs-only --port 3000
|
||||||
|
|
||||||
|
# Run only the proxy server
|
||||||
|
arbiter --proxy-only --port 3000
|
||||||
|
|
||||||
|
# Enable verbose logging
|
||||||
|
arbiter --verbose --target http://api.example.com
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
For development, you can use the following commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start the development server
|
||||||
|
npm run dev
|
||||||
|
|
||||||
|
# Build the project
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
# Start the production server
|
||||||
|
npm start
|
||||||
|
|
||||||
|
# Run the CLI in development mode
|
||||||
|
npm run cli
|
||||||
|
```
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
Arbiter runs two separate servers:
|
||||||
|
|
||||||
|
1. **Proxy Server** (default port 3000)
|
||||||
|
- Handles all API requests
|
||||||
|
- Forwards requests to the target API
|
||||||
|
- Records request/response data
|
||||||
|
- Supports API key injection
|
||||||
|
|
||||||
|
2. **Documentation Server** (default port 3001)
|
||||||
|
- Serves the Swagger UI interface
|
||||||
|
- Provides OpenAPI specification
|
||||||
|
- Handles HAR file exports
|
||||||
|
- Separated from proxy for better performance
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
### Proxy Server
|
||||||
|
- All requests are proxied to the target API
|
||||||
|
- No path prefix required
|
||||||
|
- Example: `http://localhost:3000/api/v1/users`
|
||||||
|
|
||||||
|
### Documentation Server
|
||||||
|
- `/docs` - Swagger UI interface
|
||||||
|
- `/openapi.json` - OpenAPI specification
|
||||||
|
- `/har` - HAR file export
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
- `PORT` - Proxy server port (default: 3000)
|
||||||
|
- `TARGET` - Target API URL
|
||||||
|
- `API_KEY` - API key to add to requests
|
||||||
|
- `VERBOSE` - Enable verbose logging
|
||||||
|
|
||||||
|
### Command Line Options
|
||||||
|
|
||||||
|
- `-p, --port <number>` - Proxy server port
|
||||||
|
- `-t, --target <url>` - Target API URL
|
||||||
|
- `-k, --key <string>` - API key
|
||||||
|
- `-d, --docs-only` - Run only documentation server
|
||||||
|
- `-x, --proxy-only` - Run only proxy server
|
||||||
|
- `-v, --verbose` - Enable verbose logging
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
1. Fork the repository
|
||||||
|
2. Create your feature branch (`git checkout -b feature/amazing-feature`)
|
||||||
|
3. Commit your changes (`git commit -m 'Add some amazing feature'`)
|
||||||
|
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||||
|
5. Open a Pull Request
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the ISC License - see the LICENSE file for details.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
The project includes both unit tests and integration tests. Tests are written using Vitest.
|
||||||
|
|
||||||
|
### Running Tests Locally
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
npm test
|
||||||
|
|
||||||
|
# Run unit tests only
|
||||||
|
npm run test:unit
|
||||||
|
|
||||||
|
# Run integration tests only
|
||||||
|
npm run test:integration
|
||||||
|
|
||||||
|
# Run tests with coverage
|
||||||
|
npm run test:coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
### Continuous Integration
|
||||||
|
|
||||||
|
The project uses GitHub Actions for continuous integration. The CI pipeline runs on every push to the main branch and on pull requests. It includes:
|
||||||
|
|
||||||
|
- Running unit tests
|
||||||
|
- Running integration tests
|
||||||
|
- Linting checks
|
||||||
|
- Testing against multiple Node.js versions (18.x and 20.x)
|
||||||
|
|
||||||
|
You can view the CI status in the GitHub Actions tab of the repository.
|
||||||
|
|||||||
224
coverage/base.css
Normal file
224
coverage/base.css
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
body, html {
|
||||||
|
margin:0; padding: 0;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: Helvetica Neue, Helvetica, Arial;
|
||||||
|
font-size: 14px;
|
||||||
|
color:#333;
|
||||||
|
}
|
||||||
|
.small { font-size: 12px; }
|
||||||
|
*, *:after, *:before {
|
||||||
|
-webkit-box-sizing:border-box;
|
||||||
|
-moz-box-sizing:border-box;
|
||||||
|
box-sizing:border-box;
|
||||||
|
}
|
||||||
|
h1 { font-size: 20px; margin: 0;}
|
||||||
|
h2 { font-size: 14px; }
|
||||||
|
pre {
|
||||||
|
font: 12px/1.4 Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
-moz-tab-size: 2;
|
||||||
|
-o-tab-size: 2;
|
||||||
|
tab-size: 2;
|
||||||
|
}
|
||||||
|
a { color:#0074D9; text-decoration:none; }
|
||||||
|
a:hover { text-decoration:underline; }
|
||||||
|
.strong { font-weight: bold; }
|
||||||
|
.space-top1 { padding: 10px 0 0 0; }
|
||||||
|
.pad2y { padding: 20px 0; }
|
||||||
|
.pad1y { padding: 10px 0; }
|
||||||
|
.pad2x { padding: 0 20px; }
|
||||||
|
.pad2 { padding: 20px; }
|
||||||
|
.pad1 { padding: 10px; }
|
||||||
|
.space-left2 { padding-left:55px; }
|
||||||
|
.space-right2 { padding-right:20px; }
|
||||||
|
.center { text-align:center; }
|
||||||
|
.clearfix { display:block; }
|
||||||
|
.clearfix:after {
|
||||||
|
content:'';
|
||||||
|
display:block;
|
||||||
|
height:0;
|
||||||
|
clear:both;
|
||||||
|
visibility:hidden;
|
||||||
|
}
|
||||||
|
.fl { float: left; }
|
||||||
|
@media only screen and (max-width:640px) {
|
||||||
|
.col3 { width:100%; max-width:100%; }
|
||||||
|
.hide-mobile { display:none!important; }
|
||||||
|
}
|
||||||
|
|
||||||
|
.quiet {
|
||||||
|
color: #7f7f7f;
|
||||||
|
color: rgba(0,0,0,0.5);
|
||||||
|
}
|
||||||
|
.quiet a { opacity: 0.7; }
|
||||||
|
|
||||||
|
.fraction {
|
||||||
|
font-family: Consolas, 'Liberation Mono', Menlo, Courier, monospace;
|
||||||
|
font-size: 10px;
|
||||||
|
color: #555;
|
||||||
|
background: #E8E8E8;
|
||||||
|
padding: 4px 5px;
|
||||||
|
border-radius: 3px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.path a:link, div.path a:visited { color: #333; }
|
||||||
|
table.coverage {
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin: 10px 0 0 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.coverage td {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
table.coverage td.line-count {
|
||||||
|
text-align: right;
|
||||||
|
padding: 0 5px 0 20px;
|
||||||
|
}
|
||||||
|
table.coverage td.line-coverage {
|
||||||
|
text-align: right;
|
||||||
|
padding-right: 10px;
|
||||||
|
min-width:20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.coverage td span.cline-any {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0 5px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
.missing-if-branch {
|
||||||
|
display: inline-block;
|
||||||
|
margin-right: 5px;
|
||||||
|
border-radius: 3px;
|
||||||
|
position: relative;
|
||||||
|
padding: 0 4px;
|
||||||
|
background: #333;
|
||||||
|
color: yellow;
|
||||||
|
}
|
||||||
|
|
||||||
|
.skip-if-branch {
|
||||||
|
display: none;
|
||||||
|
margin-right: 10px;
|
||||||
|
position: relative;
|
||||||
|
padding: 0 4px;
|
||||||
|
background: #ccc;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
.missing-if-branch .typ, .skip-if-branch .typ {
|
||||||
|
color: inherit !important;
|
||||||
|
}
|
||||||
|
.coverage-summary {
|
||||||
|
border-collapse: collapse;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
.coverage-summary tr { border-bottom: 1px solid #bbb; }
|
||||||
|
.keyline-all { border: 1px solid #ddd; }
|
||||||
|
.coverage-summary td, .coverage-summary th { padding: 10px; }
|
||||||
|
.coverage-summary tbody { border: 1px solid #bbb; }
|
||||||
|
.coverage-summary td { border-right: 1px solid #bbb; }
|
||||||
|
.coverage-summary td:last-child { border-right: none; }
|
||||||
|
.coverage-summary th {
|
||||||
|
text-align: left;
|
||||||
|
font-weight: normal;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.coverage-summary th.file { border-right: none !important; }
|
||||||
|
.coverage-summary th.pct { }
|
||||||
|
.coverage-summary th.pic,
|
||||||
|
.coverage-summary th.abs,
|
||||||
|
.coverage-summary td.pct,
|
||||||
|
.coverage-summary td.abs { text-align: right; }
|
||||||
|
.coverage-summary td.file { white-space: nowrap; }
|
||||||
|
.coverage-summary td.pic { min-width: 120px !important; }
|
||||||
|
.coverage-summary tfoot td { }
|
||||||
|
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
height: 10px;
|
||||||
|
width: 7px;
|
||||||
|
display: inline-block;
|
||||||
|
margin-left: 0.5em;
|
||||||
|
background: url(sort-arrow-sprite.png) no-repeat scroll 0 0 transparent;
|
||||||
|
}
|
||||||
|
.coverage-summary .sorted .sorter {
|
||||||
|
background-position: 0 -20px;
|
||||||
|
}
|
||||||
|
.coverage-summary .sorted-desc .sorter {
|
||||||
|
background-position: 0 -10px;
|
||||||
|
}
|
||||||
|
.status-line { height: 10px; }
|
||||||
|
/* yellow */
|
||||||
|
.cbranch-no { background: yellow !important; color: #111; }
|
||||||
|
/* dark red */
|
||||||
|
.red.solid, .status-line.low, .low .cover-fill { background:#C21F39 }
|
||||||
|
.low .chart { border:1px solid #C21F39 }
|
||||||
|
.highlighted,
|
||||||
|
.highlighted .cstat-no, .highlighted .fstat-no, .highlighted .cbranch-no{
|
||||||
|
background: #C21F39 !important;
|
||||||
|
}
|
||||||
|
/* medium red */
|
||||||
|
.cstat-no, .fstat-no, .cbranch-no, .cbranch-no { background:#F6C6CE }
|
||||||
|
/* light red */
|
||||||
|
.low, .cline-no { background:#FCE1E5 }
|
||||||
|
/* light green */
|
||||||
|
.high, .cline-yes { background:rgb(230,245,208) }
|
||||||
|
/* medium green */
|
||||||
|
.cstat-yes { background:rgb(161,215,106) }
|
||||||
|
/* dark green */
|
||||||
|
.status-line.high, .high .cover-fill { background:rgb(77,146,33) }
|
||||||
|
.high .chart { border:1px solid rgb(77,146,33) }
|
||||||
|
/* dark yellow (gold) */
|
||||||
|
.status-line.medium, .medium .cover-fill { background: #f9cd0b; }
|
||||||
|
.medium .chart { border:1px solid #f9cd0b; }
|
||||||
|
/* light yellow */
|
||||||
|
.medium { background: #fff4c2; }
|
||||||
|
|
||||||
|
.cstat-skip { background: #ddd; color: #111; }
|
||||||
|
.fstat-skip { background: #ddd; color: #111 !important; }
|
||||||
|
.cbranch-skip { background: #ddd !important; color: #111; }
|
||||||
|
|
||||||
|
span.cline-neutral { background: #eaeaea; }
|
||||||
|
|
||||||
|
.coverage-summary td.empty {
|
||||||
|
opacity: .5;
|
||||||
|
padding-top: 4px;
|
||||||
|
padding-bottom: 4px;
|
||||||
|
line-height: 1;
|
||||||
|
color: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cover-fill, .cover-empty {
|
||||||
|
display:inline-block;
|
||||||
|
height: 12px;
|
||||||
|
}
|
||||||
|
.chart {
|
||||||
|
line-height: 0;
|
||||||
|
}
|
||||||
|
.cover-empty {
|
||||||
|
background: white;
|
||||||
|
}
|
||||||
|
.cover-full {
|
||||||
|
border-right: none !important;
|
||||||
|
}
|
||||||
|
pre.prettyprint {
|
||||||
|
border: none !important;
|
||||||
|
padding: 0 !important;
|
||||||
|
margin: 0 !important;
|
||||||
|
}
|
||||||
|
.com { color: #999 !important; }
|
||||||
|
.ignore-none { color: #999; font-weight: normal; }
|
||||||
|
|
||||||
|
.wrapper {
|
||||||
|
min-height: 100%;
|
||||||
|
height: auto !important;
|
||||||
|
height: 100%;
|
||||||
|
margin: 0 auto -48px;
|
||||||
|
}
|
||||||
|
.footer, .push {
|
||||||
|
height: 48px;
|
||||||
|
}
|
||||||
87
coverage/block-navigation.js
Normal file
87
coverage/block-navigation.js
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
/* eslint-disable */
|
||||||
|
var jumpToCode = (function init() {
|
||||||
|
// Classes of code we would like to highlight in the file view
|
||||||
|
var missingCoverageClasses = ['.cbranch-no', '.cstat-no', '.fstat-no'];
|
||||||
|
|
||||||
|
// Elements to highlight in the file listing view
|
||||||
|
var fileListingElements = ['td.pct.low'];
|
||||||
|
|
||||||
|
// We don't want to select elements that are direct descendants of another match
|
||||||
|
var notSelector = ':not(' + missingCoverageClasses.join('):not(') + ') > '; // becomes `:not(a):not(b) > `
|
||||||
|
|
||||||
|
// Selecter that finds elements on the page to which we can jump
|
||||||
|
var selector =
|
||||||
|
fileListingElements.join(', ') +
|
||||||
|
', ' +
|
||||||
|
notSelector +
|
||||||
|
missingCoverageClasses.join(', ' + notSelector); // becomes `:not(a):not(b) > a, :not(a):not(b) > b`
|
||||||
|
|
||||||
|
// The NodeList of matching elements
|
||||||
|
var missingCoverageElements = document.querySelectorAll(selector);
|
||||||
|
|
||||||
|
var currentIndex;
|
||||||
|
|
||||||
|
function toggleClass(index) {
|
||||||
|
missingCoverageElements
|
||||||
|
.item(currentIndex)
|
||||||
|
.classList.remove('highlighted');
|
||||||
|
missingCoverageElements.item(index).classList.add('highlighted');
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeCurrent(index) {
|
||||||
|
toggleClass(index);
|
||||||
|
currentIndex = index;
|
||||||
|
missingCoverageElements.item(index).scrollIntoView({
|
||||||
|
behavior: 'smooth',
|
||||||
|
block: 'center',
|
||||||
|
inline: 'center'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function goToPrevious() {
|
||||||
|
var nextIndex = 0;
|
||||||
|
if (typeof currentIndex !== 'number' || currentIndex === 0) {
|
||||||
|
nextIndex = missingCoverageElements.length - 1;
|
||||||
|
} else if (missingCoverageElements.length > 1) {
|
||||||
|
nextIndex = currentIndex - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeCurrent(nextIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
function goToNext() {
|
||||||
|
var nextIndex = 0;
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof currentIndex === 'number' &&
|
||||||
|
currentIndex < missingCoverageElements.length - 1
|
||||||
|
) {
|
||||||
|
nextIndex = currentIndex + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeCurrent(nextIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
return function jump(event) {
|
||||||
|
if (
|
||||||
|
document.getElementById('fileSearch') === document.activeElement &&
|
||||||
|
document.activeElement != null
|
||||||
|
) {
|
||||||
|
// if we're currently focused on the search input, we don't want to navigate
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (event.which) {
|
||||||
|
case 78: // n
|
||||||
|
case 74: // j
|
||||||
|
goToNext();
|
||||||
|
break;
|
||||||
|
case 66: // b
|
||||||
|
case 75: // k
|
||||||
|
case 80: // p
|
||||||
|
goToPrevious();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
window.addEventListener('keydown', jumpToCode);
|
||||||
6
coverage/coverage-final.json
Normal file
6
coverage/coverage-final.json
Normal file
File diff suppressed because one or more lines are too long
BIN
coverage/favicon.png
Normal file
BIN
coverage/favicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 445 B |
146
coverage/index.html
Normal file
146
coverage/index.html
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for All files</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="prettify.css" />
|
||||||
|
<link rel="stylesheet" href="base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1>All files</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">52.16% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>374/717</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">71.42% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>60/84</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">94.44% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>17/18</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">52.16% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>374/717</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line medium'></div>
|
||||||
|
<div class="pad1">
|
||||||
|
<table class="coverage-summary">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th data-col="file" data-fmt="html" data-html="true" class="file">File</th>
|
||||||
|
<th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th>
|
||||||
|
<th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th>
|
||||||
|
<th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th>
|
||||||
|
<th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th>
|
||||||
|
<th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th>
|
||||||
|
<th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody><tr>
|
||||||
|
<td class="file low" data-value="src"><a href="src/index.html">src</a></td>
|
||||||
|
<td data-value="0" class="pic low">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 0%"></div><div class="cover-empty" style="width: 100%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="264" class="abs low">0/264</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="2" class="abs high">2/2</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="2" class="abs high">2/2</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="264" class="abs low">0/264</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td class="file medium" data-value="src/middleware"><a href="src/middleware/index.html">src/middleware</a></td>
|
||||||
|
<td data-value="60.6" class="pic medium">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 60%"></div><div class="cover-empty" style="width: 40%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="60.6" class="pct medium">60.6%</td>
|
||||||
|
<td data-value="66" class="abs medium">40/66</td>
|
||||||
|
<td data-value="44.44" class="pct low">44.44%</td>
|
||||||
|
<td data-value="9" class="abs low">4/9</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="2" class="abs high">2/2</td>
|
||||||
|
<td data-value="60.6" class="pct medium">60.6%</td>
|
||||||
|
<td data-value="66" class="abs medium">40/66</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td class="file high" data-value="src/store"><a href="src/store/index.html">src/store</a></td>
|
||||||
|
<td data-value="86.3" class="pic high">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 86%"></div><div class="cover-empty" style="width: 14%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="86.3" class="pct high">86.3%</td>
|
||||||
|
<td data-value="387" class="abs high">334/387</td>
|
||||||
|
<td data-value="73.97" class="pct medium">73.97%</td>
|
||||||
|
<td data-value="73" class="abs medium">54/73</td>
|
||||||
|
<td data-value="92.85" class="pct high">92.85%</td>
|
||||||
|
<td data-value="14" class="abs high">13/14</td>
|
||||||
|
<td data-value="86.3" class="pct high">86.3%</td>
|
||||||
|
<td data-value="387" class="abs high">334/387</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="sorter.js"></script>
|
||||||
|
<script src="block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
1
coverage/prettify.css
Normal file
1
coverage/prettify.css
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee}
|
||||||
2
coverage/prettify.js
Normal file
2
coverage/prettify.js
Normal file
File diff suppressed because one or more lines are too long
BIN
coverage/sort-arrow-sprite.png
Normal file
BIN
coverage/sort-arrow-sprite.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 138 B |
196
coverage/sorter.js
Normal file
196
coverage/sorter.js
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
/* eslint-disable */
|
||||||
|
var addSorting = (function() {
|
||||||
|
'use strict';
|
||||||
|
var cols,
|
||||||
|
currentSort = {
|
||||||
|
index: 0,
|
||||||
|
desc: false
|
||||||
|
};
|
||||||
|
|
||||||
|
// returns the summary table element
|
||||||
|
function getTable() {
|
||||||
|
return document.querySelector('.coverage-summary');
|
||||||
|
}
|
||||||
|
// returns the thead element of the summary table
|
||||||
|
function getTableHeader() {
|
||||||
|
return getTable().querySelector('thead tr');
|
||||||
|
}
|
||||||
|
// returns the tbody element of the summary table
|
||||||
|
function getTableBody() {
|
||||||
|
return getTable().querySelector('tbody');
|
||||||
|
}
|
||||||
|
// returns the th element for nth column
|
||||||
|
function getNthColumn(n) {
|
||||||
|
return getTableHeader().querySelectorAll('th')[n];
|
||||||
|
}
|
||||||
|
|
||||||
|
function onFilterInput() {
|
||||||
|
const searchValue = document.getElementById('fileSearch').value;
|
||||||
|
const rows = document.getElementsByTagName('tbody')[0].children;
|
||||||
|
for (let i = 0; i < rows.length; i++) {
|
||||||
|
const row = rows[i];
|
||||||
|
if (
|
||||||
|
row.textContent
|
||||||
|
.toLowerCase()
|
||||||
|
.includes(searchValue.toLowerCase())
|
||||||
|
) {
|
||||||
|
row.style.display = '';
|
||||||
|
} else {
|
||||||
|
row.style.display = 'none';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loads the search box
|
||||||
|
function addSearchBox() {
|
||||||
|
var template = document.getElementById('filterTemplate');
|
||||||
|
var templateClone = template.content.cloneNode(true);
|
||||||
|
templateClone.getElementById('fileSearch').oninput = onFilterInput;
|
||||||
|
template.parentElement.appendChild(templateClone);
|
||||||
|
}
|
||||||
|
|
||||||
|
// loads all columns
|
||||||
|
function loadColumns() {
|
||||||
|
var colNodes = getTableHeader().querySelectorAll('th'),
|
||||||
|
colNode,
|
||||||
|
cols = [],
|
||||||
|
col,
|
||||||
|
i;
|
||||||
|
|
||||||
|
for (i = 0; i < colNodes.length; i += 1) {
|
||||||
|
colNode = colNodes[i];
|
||||||
|
col = {
|
||||||
|
key: colNode.getAttribute('data-col'),
|
||||||
|
sortable: !colNode.getAttribute('data-nosort'),
|
||||||
|
type: colNode.getAttribute('data-type') || 'string'
|
||||||
|
};
|
||||||
|
cols.push(col);
|
||||||
|
if (col.sortable) {
|
||||||
|
col.defaultDescSort = col.type === 'number';
|
||||||
|
colNode.innerHTML =
|
||||||
|
colNode.innerHTML + '<span class="sorter"></span>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cols;
|
||||||
|
}
|
||||||
|
// attaches a data attribute to every tr element with an object
|
||||||
|
// of data values keyed by column name
|
||||||
|
function loadRowData(tableRow) {
|
||||||
|
var tableCols = tableRow.querySelectorAll('td'),
|
||||||
|
colNode,
|
||||||
|
col,
|
||||||
|
data = {},
|
||||||
|
i,
|
||||||
|
val;
|
||||||
|
for (i = 0; i < tableCols.length; i += 1) {
|
||||||
|
colNode = tableCols[i];
|
||||||
|
col = cols[i];
|
||||||
|
val = colNode.getAttribute('data-value');
|
||||||
|
if (col.type === 'number') {
|
||||||
|
val = Number(val);
|
||||||
|
}
|
||||||
|
data[col.key] = val;
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
// loads all row data
|
||||||
|
function loadData() {
|
||||||
|
var rows = getTableBody().querySelectorAll('tr'),
|
||||||
|
i;
|
||||||
|
|
||||||
|
for (i = 0; i < rows.length; i += 1) {
|
||||||
|
rows[i].data = loadRowData(rows[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// sorts the table using the data for the ith column
|
||||||
|
function sortByIndex(index, desc) {
|
||||||
|
var key = cols[index].key,
|
||||||
|
sorter = function(a, b) {
|
||||||
|
a = a.data[key];
|
||||||
|
b = b.data[key];
|
||||||
|
return a < b ? -1 : a > b ? 1 : 0;
|
||||||
|
},
|
||||||
|
finalSorter = sorter,
|
||||||
|
tableBody = document.querySelector('.coverage-summary tbody'),
|
||||||
|
rowNodes = tableBody.querySelectorAll('tr'),
|
||||||
|
rows = [],
|
||||||
|
i;
|
||||||
|
|
||||||
|
if (desc) {
|
||||||
|
finalSorter = function(a, b) {
|
||||||
|
return -1 * sorter(a, b);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < rowNodes.length; i += 1) {
|
||||||
|
rows.push(rowNodes[i]);
|
||||||
|
tableBody.removeChild(rowNodes[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
rows.sort(finalSorter);
|
||||||
|
|
||||||
|
for (i = 0; i < rows.length; i += 1) {
|
||||||
|
tableBody.appendChild(rows[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// removes sort indicators for current column being sorted
|
||||||
|
function removeSortIndicators() {
|
||||||
|
var col = getNthColumn(currentSort.index),
|
||||||
|
cls = col.className;
|
||||||
|
|
||||||
|
cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, '');
|
||||||
|
col.className = cls;
|
||||||
|
}
|
||||||
|
// adds sort indicators for current column being sorted
|
||||||
|
function addSortIndicators() {
|
||||||
|
getNthColumn(currentSort.index).className += currentSort.desc
|
||||||
|
? ' sorted-desc'
|
||||||
|
: ' sorted';
|
||||||
|
}
|
||||||
|
// adds event listeners for all sorter widgets
|
||||||
|
function enableUI() {
|
||||||
|
var i,
|
||||||
|
el,
|
||||||
|
ithSorter = function ithSorter(i) {
|
||||||
|
var col = cols[i];
|
||||||
|
|
||||||
|
return function() {
|
||||||
|
var desc = col.defaultDescSort;
|
||||||
|
|
||||||
|
if (currentSort.index === i) {
|
||||||
|
desc = !currentSort.desc;
|
||||||
|
}
|
||||||
|
sortByIndex(i, desc);
|
||||||
|
removeSortIndicators();
|
||||||
|
currentSort.index = i;
|
||||||
|
currentSort.desc = desc;
|
||||||
|
addSortIndicators();
|
||||||
|
};
|
||||||
|
};
|
||||||
|
for (i = 0; i < cols.length; i += 1) {
|
||||||
|
if (cols[i].sortable) {
|
||||||
|
// add the click event handler on the th so users
|
||||||
|
// dont have to click on those tiny arrows
|
||||||
|
el = getNthColumn(i).querySelector('.sorter').parentElement;
|
||||||
|
if (el.addEventListener) {
|
||||||
|
el.addEventListener('click', ithSorter(i));
|
||||||
|
} else {
|
||||||
|
el.attachEvent('onclick', ithSorter(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// adds sorting functionality to the UI
|
||||||
|
return function() {
|
||||||
|
if (!getTable()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
cols = loadColumns();
|
||||||
|
loadData();
|
||||||
|
addSearchBox();
|
||||||
|
addSortIndicators();
|
||||||
|
enableUI();
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
window.addEventListener('load', addSorting);
|
||||||
190
coverage/src/cli.ts.html
Normal file
190
coverage/src/cli.ts.html
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src/cli.ts</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../index.html">All files</a> / <a href="index.html">src</a> cli.ts</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>0/28</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>1/1</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>1/1</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>0/28</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line low'></div>
|
||||||
|
<pre><table class="coverage">
|
||||||
|
<tr><td class="line-count quiet"><a name='L1'></a><a href='#L1'>1</a>
|
||||||
|
<a name='L2'></a><a href='#L2'>2</a>
|
||||||
|
<a name='L3'></a><a href='#L3'>3</a>
|
||||||
|
<a name='L4'></a><a href='#L4'>4</a>
|
||||||
|
<a name='L5'></a><a href='#L5'>5</a>
|
||||||
|
<a name='L6'></a><a href='#L6'>6</a>
|
||||||
|
<a name='L7'></a><a href='#L7'>7</a>
|
||||||
|
<a name='L8'></a><a href='#L8'>8</a>
|
||||||
|
<a name='L9'></a><a href='#L9'>9</a>
|
||||||
|
<a name='L10'></a><a href='#L10'>10</a>
|
||||||
|
<a name='L11'></a><a href='#L11'>11</a>
|
||||||
|
<a name='L12'></a><a href='#L12'>12</a>
|
||||||
|
<a name='L13'></a><a href='#L13'>13</a>
|
||||||
|
<a name='L14'></a><a href='#L14'>14</a>
|
||||||
|
<a name='L15'></a><a href='#L15'>15</a>
|
||||||
|
<a name='L16'></a><a href='#L16'>16</a>
|
||||||
|
<a name='L17'></a><a href='#L17'>17</a>
|
||||||
|
<a name='L18'></a><a href='#L18'>18</a>
|
||||||
|
<a name='L19'></a><a href='#L19'>19</a>
|
||||||
|
<a name='L20'></a><a href='#L20'>20</a>
|
||||||
|
<a name='L21'></a><a href='#L21'>21</a>
|
||||||
|
<a name='L22'></a><a href='#L22'>22</a>
|
||||||
|
<a name='L23'></a><a href='#L23'>23</a>
|
||||||
|
<a name='L24'></a><a href='#L24'>24</a>
|
||||||
|
<a name='L25'></a><a href='#L25'>25</a>
|
||||||
|
<a name='L26'></a><a href='#L26'>26</a>
|
||||||
|
<a name='L27'></a><a href='#L27'>27</a>
|
||||||
|
<a name='L28'></a><a href='#L28'>28</a>
|
||||||
|
<a name='L29'></a><a href='#L29'>29</a>
|
||||||
|
<a name='L30'></a><a href='#L30'>30</a>
|
||||||
|
<a name='L31'></a><a href='#L31'>31</a>
|
||||||
|
<a name='L32'></a><a href='#L32'>32</a>
|
||||||
|
<a name='L33'></a><a href='#L33'>33</a>
|
||||||
|
<a name='L34'></a><a href='#L34'>34</a>
|
||||||
|
<a name='L35'></a><a href='#L35'>35</a>
|
||||||
|
<a name='L36'></a><a href='#L36'>36</a></td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span></td><td class="text"><pre class="prettyprint lang-js">#!/usr/bin/env node
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >import { Command } from 'commander';</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" >import chalk from 'chalk';</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" >import { startServers } from './server.js';</span>
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >const program = new Command();</span>
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >console.log('Starting Arbiter...');</span>
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >program</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .name('arbiter')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .description('API proxy with OpenAPI generation and HAR export capabilities')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .version('1.0.0')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .requiredOption('-t, --target <url>', 'target API URL to proxy to')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('-p, --port <number>', 'port to run the proxy server on', '8080')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('-d, --docs-port <number>', 'port to run the documentation server on', '9000')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('-k, --key <string>', 'API key to add to proxied requests')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('--docs-only', 'run only the documentation server')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('--proxy-only', 'run only the proxy server')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .option('-v, --verbose', 'enable verbose logging')</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > .parse(process.argv);</span>
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >const options = program.opts();</span>
|
||||||
|
|
||||||
|
// Start the servers
|
||||||
|
<span class="cstat-no" title="statement not covered" >startServers({</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > target: options.target,</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > proxyPort: parseInt(options.port),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > docsPort: parseInt(options.docsPort),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > apiKey: options.key,</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > verbose: options.verbose</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" >}).catch((error) => {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > console.error(chalk.red('Failed to start servers:'), error);</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > process.exit(1);</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" >});</span></pre></td></tr></table></pre>
|
||||||
|
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../sorter.js"></script>
|
||||||
|
<script src="../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
131
coverage/src/index.html
Normal file
131
coverage/src/index.html
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../index.html">All files</a> src</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>0/264</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>2/2</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>2/2</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>0/264</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line low'></div>
|
||||||
|
<div class="pad1">
|
||||||
|
<table class="coverage-summary">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th data-col="file" data-fmt="html" data-html="true" class="file">File</th>
|
||||||
|
<th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th>
|
||||||
|
<th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th>
|
||||||
|
<th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th>
|
||||||
|
<th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th>
|
||||||
|
<th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th>
|
||||||
|
<th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody><tr>
|
||||||
|
<td class="file low" data-value="cli.ts"><a href="cli.ts.html">cli.ts</a></td>
|
||||||
|
<td data-value="0" class="pic low">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 0%"></div><div class="cover-empty" style="width: 100%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="28" class="abs low">0/28</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="28" class="abs low">0/28</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td class="file low" data-value="server.ts"><a href="server.ts.html">server.ts</a></td>
|
||||||
|
<td data-value="0" class="pic low">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 0%"></div><div class="cover-empty" style="width: 100%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="236" class="abs low">0/236</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="236" class="abs low">0/236</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../sorter.js"></script>
|
||||||
|
<script src="../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
148
coverage/src/middleware/apiDocGenerator.ts.html
Normal file
148
coverage/src/middleware/apiDocGenerator.ts.html
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src/middleware/apiDocGenerator.ts</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../../index.html">All files</a> / <a href="index.html">src/middleware</a> apiDocGenerator.ts</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>0/18</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>1/1</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>1/1</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">0% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>0/18</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line low'></div>
|
||||||
|
<pre><table class="coverage">
|
||||||
|
<tr><td class="line-count quiet"><a name='L1'></a><a href='#L1'>1</a>
|
||||||
|
<a name='L2'></a><a href='#L2'>2</a>
|
||||||
|
<a name='L3'></a><a href='#L3'>3</a>
|
||||||
|
<a name='L4'></a><a href='#L4'>4</a>
|
||||||
|
<a name='L5'></a><a href='#L5'>5</a>
|
||||||
|
<a name='L6'></a><a href='#L6'>6</a>
|
||||||
|
<a name='L7'></a><a href='#L7'>7</a>
|
||||||
|
<a name='L8'></a><a href='#L8'>8</a>
|
||||||
|
<a name='L9'></a><a href='#L9'>9</a>
|
||||||
|
<a name='L10'></a><a href='#L10'>10</a>
|
||||||
|
<a name='L11'></a><a href='#L11'>11</a>
|
||||||
|
<a name='L12'></a><a href='#L12'>12</a>
|
||||||
|
<a name='L13'></a><a href='#L13'>13</a>
|
||||||
|
<a name='L14'></a><a href='#L14'>14</a>
|
||||||
|
<a name='L15'></a><a href='#L15'>15</a>
|
||||||
|
<a name='L16'></a><a href='#L16'>16</a>
|
||||||
|
<a name='L17'></a><a href='#L17'>17</a>
|
||||||
|
<a name='L18'></a><a href='#L18'>18</a>
|
||||||
|
<a name='L19'></a><a href='#L19'>19</a>
|
||||||
|
<a name='L20'></a><a href='#L20'>20</a>
|
||||||
|
<a name='L21'></a><a href='#L21'>21</a>
|
||||||
|
<a name='L22'></a><a href='#L22'>22</a></td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span></td><td class="text"><pre class="prettyprint lang-js">import { Context } from 'hono';
|
||||||
|
<span class="cstat-no" title="statement not covered" >import { openApiStore } from '../store/openApiStore.js';</span>
|
||||||
|
|
||||||
|
<span class="cstat-no" title="statement not covered" >export const apiDocGenerator = async (c: Context, next: () => Promise<void>) => {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > await next();</span>
|
||||||
|
|
||||||
|
// Record the API call in OpenAPI format
|
||||||
|
<span class="cstat-no" title="statement not covered" > openApiStore.recordEndpoint(</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > c.req.path,</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > c.req.method.toLowerCase(),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > query: Object.fromEntries(new URL(c.req.url).searchParams),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > body: await c.req.json().catch(() => null),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > contentType: c.req.header('content-type') || 'application/json',</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > },</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > status: c.res.status,</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > body: await c.res.clone().json().catch(() => null),</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > contentType: c.res.headers.get('content-type') || 'application/json',</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > }</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > );</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" >}; </span></pre></td></tr></table></pre>
|
||||||
|
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../../sorter.js"></script>
|
||||||
|
<script src="../../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
364
coverage/src/middleware/harRecorder.ts.html
Normal file
364
coverage/src/middleware/harRecorder.ts.html
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src/middleware/harRecorder.ts</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../../index.html">All files</a> / <a href="index.html">src/middleware</a> harRecorder.ts</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">83.33% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>40/48</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">37.5% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>3/8</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>1/1</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">83.33% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>40/48</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line high'></div>
|
||||||
|
<pre><table class="coverage">
|
||||||
|
<tr><td class="line-count quiet"><a name='L1'></a><a href='#L1'>1</a>
|
||||||
|
<a name='L2'></a><a href='#L2'>2</a>
|
||||||
|
<a name='L3'></a><a href='#L3'>3</a>
|
||||||
|
<a name='L4'></a><a href='#L4'>4</a>
|
||||||
|
<a name='L5'></a><a href='#L5'>5</a>
|
||||||
|
<a name='L6'></a><a href='#L6'>6</a>
|
||||||
|
<a name='L7'></a><a href='#L7'>7</a>
|
||||||
|
<a name='L8'></a><a href='#L8'>8</a>
|
||||||
|
<a name='L9'></a><a href='#L9'>9</a>
|
||||||
|
<a name='L10'></a><a href='#L10'>10</a>
|
||||||
|
<a name='L11'></a><a href='#L11'>11</a>
|
||||||
|
<a name='L12'></a><a href='#L12'>12</a>
|
||||||
|
<a name='L13'></a><a href='#L13'>13</a>
|
||||||
|
<a name='L14'></a><a href='#L14'>14</a>
|
||||||
|
<a name='L15'></a><a href='#L15'>15</a>
|
||||||
|
<a name='L16'></a><a href='#L16'>16</a>
|
||||||
|
<a name='L17'></a><a href='#L17'>17</a>
|
||||||
|
<a name='L18'></a><a href='#L18'>18</a>
|
||||||
|
<a name='L19'></a><a href='#L19'>19</a>
|
||||||
|
<a name='L20'></a><a href='#L20'>20</a>
|
||||||
|
<a name='L21'></a><a href='#L21'>21</a>
|
||||||
|
<a name='L22'></a><a href='#L22'>22</a>
|
||||||
|
<a name='L23'></a><a href='#L23'>23</a>
|
||||||
|
<a name='L24'></a><a href='#L24'>24</a>
|
||||||
|
<a name='L25'></a><a href='#L25'>25</a>
|
||||||
|
<a name='L26'></a><a href='#L26'>26</a>
|
||||||
|
<a name='L27'></a><a href='#L27'>27</a>
|
||||||
|
<a name='L28'></a><a href='#L28'>28</a>
|
||||||
|
<a name='L29'></a><a href='#L29'>29</a>
|
||||||
|
<a name='L30'></a><a href='#L30'>30</a>
|
||||||
|
<a name='L31'></a><a href='#L31'>31</a>
|
||||||
|
<a name='L32'></a><a href='#L32'>32</a>
|
||||||
|
<a name='L33'></a><a href='#L33'>33</a>
|
||||||
|
<a name='L34'></a><a href='#L34'>34</a>
|
||||||
|
<a name='L35'></a><a href='#L35'>35</a>
|
||||||
|
<a name='L36'></a><a href='#L36'>36</a>
|
||||||
|
<a name='L37'></a><a href='#L37'>37</a>
|
||||||
|
<a name='L38'></a><a href='#L38'>38</a>
|
||||||
|
<a name='L39'></a><a href='#L39'>39</a>
|
||||||
|
<a name='L40'></a><a href='#L40'>40</a>
|
||||||
|
<a name='L41'></a><a href='#L41'>41</a>
|
||||||
|
<a name='L42'></a><a href='#L42'>42</a>
|
||||||
|
<a name='L43'></a><a href='#L43'>43</a>
|
||||||
|
<a name='L44'></a><a href='#L44'>44</a>
|
||||||
|
<a name='L45'></a><a href='#L45'>45</a>
|
||||||
|
<a name='L46'></a><a href='#L46'>46</a>
|
||||||
|
<a name='L47'></a><a href='#L47'>47</a>
|
||||||
|
<a name='L48'></a><a href='#L48'>48</a>
|
||||||
|
<a name='L49'></a><a href='#L49'>49</a>
|
||||||
|
<a name='L50'></a><a href='#L50'>50</a>
|
||||||
|
<a name='L51'></a><a href='#L51'>51</a>
|
||||||
|
<a name='L52'></a><a href='#L52'>52</a>
|
||||||
|
<a name='L53'></a><a href='#L53'>53</a>
|
||||||
|
<a name='L54'></a><a href='#L54'>54</a>
|
||||||
|
<a name='L55'></a><a href='#L55'>55</a>
|
||||||
|
<a name='L56'></a><a href='#L56'>56</a>
|
||||||
|
<a name='L57'></a><a href='#L57'>57</a>
|
||||||
|
<a name='L58'></a><a href='#L58'>58</a>
|
||||||
|
<a name='L59'></a><a href='#L59'>59</a>
|
||||||
|
<a name='L60'></a><a href='#L60'>60</a>
|
||||||
|
<a name='L61'></a><a href='#L61'>61</a>
|
||||||
|
<a name='L62'></a><a href='#L62'>62</a>
|
||||||
|
<a name='L63'></a><a href='#L63'>63</a>
|
||||||
|
<a name='L64'></a><a href='#L64'>64</a>
|
||||||
|
<a name='L65'></a><a href='#L65'>65</a>
|
||||||
|
<a name='L66'></a><a href='#L66'>66</a>
|
||||||
|
<a name='L67'></a><a href='#L67'>67</a>
|
||||||
|
<a name='L68'></a><a href='#L68'>68</a>
|
||||||
|
<a name='L69'></a><a href='#L69'>69</a>
|
||||||
|
<a name='L70'></a><a href='#L70'>70</a>
|
||||||
|
<a name='L71'></a><a href='#L71'>71</a>
|
||||||
|
<a name='L72'></a><a href='#L72'>72</a>
|
||||||
|
<a name='L73'></a><a href='#L73'>73</a>
|
||||||
|
<a name='L74'></a><a href='#L74'>74</a>
|
||||||
|
<a name='L75'></a><a href='#L75'>75</a>
|
||||||
|
<a name='L76'></a><a href='#L76'>76</a>
|
||||||
|
<a name='L77'></a><a href='#L77'>77</a>
|
||||||
|
<a name='L78'></a><a href='#L78'>78</a>
|
||||||
|
<a name='L79'></a><a href='#L79'>79</a>
|
||||||
|
<a name='L80'></a><a href='#L80'>80</a>
|
||||||
|
<a name='L81'></a><a href='#L81'>81</a>
|
||||||
|
<a name='L82'></a><a href='#L82'>82</a>
|
||||||
|
<a name='L83'></a><a href='#L83'>83</a>
|
||||||
|
<a name='L84'></a><a href='#L84'>84</a>
|
||||||
|
<a name='L85'></a><a href='#L85'>85</a>
|
||||||
|
<a name='L86'></a><a href='#L86'>86</a>
|
||||||
|
<a name='L87'></a><a href='#L87'>87</a>
|
||||||
|
<a name='L88'></a><a href='#L88'>88</a>
|
||||||
|
<a name='L89'></a><a href='#L89'>89</a>
|
||||||
|
<a name='L90'></a><a href='#L90'>90</a>
|
||||||
|
<a name='L91'></a><a href='#L91'>91</a>
|
||||||
|
<a name='L92'></a><a href='#L92'>92</a>
|
||||||
|
<a name='L93'></a><a href='#L93'>93</a>
|
||||||
|
<a name='L94'></a><a href='#L94'>94</a></td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">1x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">2x</span>
|
||||||
|
<span class="cline-any cline-yes">2x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">8x</span>
|
||||||
|
<span class="cline-any cline-yes">8x</span>
|
||||||
|
<span class="cline-any cline-yes">8x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-no"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-neutral"> </span>
|
||||||
|
<span class="cline-any cline-yes">4x</span>
|
||||||
|
<span class="cline-any cline-yes">4x</span></td><td class="text"><pre class="prettyprint lang-js">import { Context, Next } from 'hono';
|
||||||
|
import { openApiStore } from '../store/openApiStore.js';
|
||||||
|
|
||||||
|
interface HAREntry {
|
||||||
|
startedDateTime: string;
|
||||||
|
time: number;
|
||||||
|
request: {
|
||||||
|
method: string;
|
||||||
|
url: string;
|
||||||
|
httpVersion: string;
|
||||||
|
headers: Array<{ name: string; value: string }>;
|
||||||
|
queryString: Array<{ name: string; value: string }>;
|
||||||
|
postData?: {
|
||||||
|
mimeType: string;
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
status: number;
|
||||||
|
statusText: string;
|
||||||
|
httpVersion: string;
|
||||||
|
headers: Array<{ name: string; value: string }>;
|
||||||
|
content: {
|
||||||
|
size: number;
|
||||||
|
mimeType: string;
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function harRecorder(c: Context, next: Next) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Get request body if present
|
||||||
|
let requestBody: any;
|
||||||
|
if (c.req.method !== <span class="branch-0 cbranch-no" title="branch not covered" >'GET' && c.req.method !== 'HEAD') <span class="branch-0 cbranch-no" title="branch not covered" >{</span></span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > try {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > requestBody = await c.req.json();</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > } catch (e) {</span>
|
||||||
|
// Body might not be JSON
|
||||||
|
<span class="cstat-no" title="statement not covered" > requestBody = await c.req.text();</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > }</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > }</span>
|
||||||
|
|
||||||
|
// Get query parameters from URL
|
||||||
|
const url = new URL(c.req.url);
|
||||||
|
const queryParams: Record<string, string> = {};
|
||||||
|
for (const [key, value] of url.searchParams.entries()) {
|
||||||
|
queryParams[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all request headers
|
||||||
|
const requestHeaders: Record<string, string> = {};
|
||||||
|
Object.entries(c.req.header()).forEach(([key, value]) => {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
requestHeaders[key] = value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Call next middleware
|
||||||
|
await next();
|
||||||
|
|
||||||
|
// Calculate response time
|
||||||
|
const responseTime = Date.now() - startTime;
|
||||||
|
|
||||||
|
// Get response body
|
||||||
|
let responseBody: any;
|
||||||
|
try {
|
||||||
|
responseBody = await c.res.clone().json();
|
||||||
|
<span class="branch-0 cbranch-no" title="branch not covered" > } catch (e) {</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > responseBody = await c.res.clone().text();</span>
|
||||||
|
<span class="cstat-no" title="statement not covered" > }</span>
|
||||||
|
|
||||||
|
// Record the request/response in OpenAPI format
|
||||||
|
openApiStore.recordEndpoint(
|
||||||
|
c.req.path,
|
||||||
|
c.req.method.toLowerCase(),
|
||||||
|
{
|
||||||
|
query: queryParams,
|
||||||
|
body: requestBody,
|
||||||
|
contentType: c.req.header('content-type'<span class="branch-0 cbranch-no" title="branch not covered" >) || 'application/json',</span>
|
||||||
|
headers: requestHeaders
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: c.res.status,
|
||||||
|
body: responseBody,
|
||||||
|
contentType: c.res.headers.get('content-type'<span class="branch-0 cbranch-no" title="branch not covered" >) || 'application/json',</span>
|
||||||
|
headers: Object.fromEntries(c.res.headers.entries())
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set HAR data in context
|
||||||
|
c.set('har', openApiStore.generateHAR());
|
||||||
|
} </pre></td></tr></table></pre>
|
||||||
|
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../../sorter.js"></script>
|
||||||
|
<script src="../../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
131
coverage/src/middleware/index.html
Normal file
131
coverage/src/middleware/index.html
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src/middleware</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../../index.html">All files</a> src/middleware</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">60.6% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>40/66</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">44.44% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>4/9</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">100% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>2/2</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">60.6% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>40/66</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line medium'></div>
|
||||||
|
<div class="pad1">
|
||||||
|
<table class="coverage-summary">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th data-col="file" data-fmt="html" data-html="true" class="file">File</th>
|
||||||
|
<th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th>
|
||||||
|
<th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th>
|
||||||
|
<th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th>
|
||||||
|
<th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th>
|
||||||
|
<th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th>
|
||||||
|
<th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody><tr>
|
||||||
|
<td class="file low" data-value="apiDocGenerator.ts"><a href="apiDocGenerator.ts.html">apiDocGenerator.ts</a></td>
|
||||||
|
<td data-value="0" class="pic low">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 0%"></div><div class="cover-empty" style="width: 100%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="18" class="abs low">0/18</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="0" class="pct low">0%</td>
|
||||||
|
<td data-value="18" class="abs low">0/18</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td class="file high" data-value="harRecorder.ts"><a href="harRecorder.ts.html">harRecorder.ts</a></td>
|
||||||
|
<td data-value="83.33" class="pic high">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 83%"></div><div class="cover-empty" style="width: 17%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="83.33" class="pct high">83.33%</td>
|
||||||
|
<td data-value="48" class="abs high">40/48</td>
|
||||||
|
<td data-value="37.5" class="pct low">37.5%</td>
|
||||||
|
<td data-value="8" class="abs low">3/8</td>
|
||||||
|
<td data-value="100" class="pct high">100%</td>
|
||||||
|
<td data-value="1" class="abs high">1/1</td>
|
||||||
|
<td data-value="83.33" class="pct high">83.33%</td>
|
||||||
|
<td data-value="48" class="abs high">40/48</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../../sorter.js"></script>
|
||||||
|
<script src="../../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
1066
coverage/src/server.ts.html
Normal file
1066
coverage/src/server.ts.html
Normal file
File diff suppressed because it is too large
Load Diff
116
coverage/src/store/index.html
Normal file
116
coverage/src/store/index.html
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Code coverage report for src/store</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="stylesheet" href="../../prettify.css" />
|
||||||
|
<link rel="stylesheet" href="../../base.css" />
|
||||||
|
<link rel="shortcut icon" type="image/x-icon" href="../../favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type='text/css'>
|
||||||
|
.coverage-summary .sorter {
|
||||||
|
background-image: url(../../sort-arrow-sprite.png);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class='wrapper'>
|
||||||
|
<div class='pad1'>
|
||||||
|
<h1><a href="../../index.html">All files</a> src/store</h1>
|
||||||
|
<div class='clearfix'>
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">86.3% </span>
|
||||||
|
<span class="quiet">Statements</span>
|
||||||
|
<span class='fraction'>334/387</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">73.97% </span>
|
||||||
|
<span class="quiet">Branches</span>
|
||||||
|
<span class='fraction'>54/73</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">92.85% </span>
|
||||||
|
<span class="quiet">Functions</span>
|
||||||
|
<span class='fraction'>13/14</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class='fl pad1y space-right2'>
|
||||||
|
<span class="strong">86.3% </span>
|
||||||
|
<span class="quiet">Lines</span>
|
||||||
|
<span class='fraction'>334/387</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<p class="quiet">
|
||||||
|
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
|
||||||
|
</p>
|
||||||
|
<template id="filterTemplate">
|
||||||
|
<div class="quiet">
|
||||||
|
Filter:
|
||||||
|
<input type="search" id="fileSearch">
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div class='status-line high'></div>
|
||||||
|
<div class="pad1">
|
||||||
|
<table class="coverage-summary">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th data-col="file" data-fmt="html" data-html="true" class="file">File</th>
|
||||||
|
<th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th>
|
||||||
|
<th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th>
|
||||||
|
<th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th>
|
||||||
|
<th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th>
|
||||||
|
<th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
<th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th>
|
||||||
|
<th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody><tr>
|
||||||
|
<td class="file high" data-value="openApiStore.ts"><a href="openApiStore.ts.html">openApiStore.ts</a></td>
|
||||||
|
<td data-value="86.3" class="pic high">
|
||||||
|
<div class="chart"><div class="cover-fill" style="width: 86%"></div><div class="cover-empty" style="width: 14%"></div></div>
|
||||||
|
</td>
|
||||||
|
<td data-value="86.3" class="pct high">86.3%</td>
|
||||||
|
<td data-value="387" class="abs high">334/387</td>
|
||||||
|
<td data-value="73.97" class="pct medium">73.97%</td>
|
||||||
|
<td data-value="73" class="abs medium">54/73</td>
|
||||||
|
<td data-value="92.85" class="pct high">92.85%</td>
|
||||||
|
<td data-value="14" class="abs high">13/14</td>
|
||||||
|
<td data-value="86.3" class="pct high">86.3%</td>
|
||||||
|
<td data-value="387" class="abs high">334/387</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div class='push'></div><!-- for sticky footer -->
|
||||||
|
</div><!-- /wrapper -->
|
||||||
|
<div class='footer quiet pad2 space-top1 center small'>
|
||||||
|
Code coverage generated by
|
||||||
|
<a href="https://istanbul.js.org/" target="_blank" rel="noopener noreferrer">istanbul</a>
|
||||||
|
at 2025-03-19T22:40:14.575Z
|
||||||
|
</div>
|
||||||
|
<script src="../../prettify.js"></script>
|
||||||
|
<script>
|
||||||
|
window.onload = function () {
|
||||||
|
prettyPrint();
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<script src="../../sorter.js"></script>
|
||||||
|
<script src="../../block-navigation.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
1789
coverage/src/store/openApiStore.ts.html
Normal file
1789
coverage/src/store/openApiStore.ts.html
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/__tests__/cli.test.d.ts
vendored
Normal file
1
dist/__tests__/cli.test.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export {};
|
||||||
94
dist/__tests__/cli.test.js
vendored
Normal file
94
dist/__tests__/cli.test.js
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { Command } from 'commander';
|
||||||
|
describe('CLI Options', () => {
|
||||||
|
it('should require target URL', () => {
|
||||||
|
const program = new Command();
|
||||||
|
program
|
||||||
|
.name('arbiter')
|
||||||
|
.description('API proxy with OpenAPI generation and HAR export capabilities')
|
||||||
|
.version('1.0.0')
|
||||||
|
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||||
|
.option('-p, --port <number>', 'port to run the proxy server on', '8080')
|
||||||
|
.option('-d, --docs-port <number>', 'port to run the documentation server on', '9000')
|
||||||
|
.option('-k, --key <string>', 'API key to add to proxied requests')
|
||||||
|
.option('--docs-only', 'run only the documentation server')
|
||||||
|
.option('--proxy-only', 'run only the proxy server')
|
||||||
|
.option('-v, --verbose', 'enable verbose logging');
|
||||||
|
// Test without target URL
|
||||||
|
expect(() => program.parse(['node', 'arbiter'])).toThrow();
|
||||||
|
// Test with target URL
|
||||||
|
const options = program.parse(['node', 'arbiter', '-t', 'http://example.com']).opts();
|
||||||
|
expect(options.target).toBe('http://example.com');
|
||||||
|
expect(options.port).toBe('8080');
|
||||||
|
expect(options.docsPort).toBe('9000');
|
||||||
|
});
|
||||||
|
it('should handle custom ports', () => {
|
||||||
|
const program = new Command();
|
||||||
|
program
|
||||||
|
.name('arbiter')
|
||||||
|
.description('API proxy with OpenAPI generation and HAR export capabilities')
|
||||||
|
.version('1.0.0')
|
||||||
|
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||||
|
.option('-p, --port <number>', 'port to run the proxy server on', '8080')
|
||||||
|
.option('-d, --docs-port <number>', 'port to run the documentation server on', '9000');
|
||||||
|
const options = program.parse([
|
||||||
|
'node',
|
||||||
|
'arbiter',
|
||||||
|
'-t',
|
||||||
|
'http://example.com',
|
||||||
|
'-p',
|
||||||
|
'8081',
|
||||||
|
'-d',
|
||||||
|
'9001',
|
||||||
|
]).opts();
|
||||||
|
expect(options.port).toBe('8081');
|
||||||
|
expect(options.docsPort).toBe('9001');
|
||||||
|
});
|
||||||
|
it('should handle API key', () => {
|
||||||
|
const program = new Command();
|
||||||
|
program
|
||||||
|
.name('arbiter')
|
||||||
|
.description('API proxy with OpenAPI generation and HAR export capabilities')
|
||||||
|
.version('1.0.0')
|
||||||
|
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||||
|
.option('-k, --key <string>', 'API key to add to proxied requests');
|
||||||
|
const options = program.parse([
|
||||||
|
'node',
|
||||||
|
'arbiter',
|
||||||
|
'-t',
|
||||||
|
'http://example.com',
|
||||||
|
'-k',
|
||||||
|
'test-api-key',
|
||||||
|
]).opts();
|
||||||
|
expect(options.key).toBe('test-api-key');
|
||||||
|
});
|
||||||
|
it('should handle server mode options', () => {
|
||||||
|
const program = new Command();
|
||||||
|
program
|
||||||
|
.name('arbiter')
|
||||||
|
.description('API proxy with OpenAPI generation and HAR export capabilities')
|
||||||
|
.version('1.0.0')
|
||||||
|
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||||
|
.option('--docs-only', 'run only the documentation server')
|
||||||
|
.option('--proxy-only', 'run only the proxy server');
|
||||||
|
// Test docs-only mode
|
||||||
|
const docsOptions = program.parse([
|
||||||
|
'node',
|
||||||
|
'arbiter',
|
||||||
|
'-t',
|
||||||
|
'http://example.com',
|
||||||
|
'--docs-only',
|
||||||
|
]).opts();
|
||||||
|
expect(docsOptions.docsOnly).toBe(true);
|
||||||
|
// Test proxy-only mode
|
||||||
|
const proxyOptions = program.parse([
|
||||||
|
'node',
|
||||||
|
'arbiter',
|
||||||
|
'-t',
|
||||||
|
'http://example.com',
|
||||||
|
'--proxy-only',
|
||||||
|
]).opts();
|
||||||
|
expect(proxyOptions.proxyOnly).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=cli.test.js.map
|
||||||
1
dist/__tests__/cli.test.js.map
vendored
Normal file
1
dist/__tests__/cli.test.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"cli.test.js","sourceRoot":"","sources":["../../src/__tests__/cli.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,MAAM,EAAM,MAAM,QAAQ,CAAC;AAClD,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAEpC,QAAQ,CAAC,aAAa,EAAE,GAAG,EAAE;IAC3B,EAAE,CAAC,2BAA2B,EAAE,GAAG,EAAE;QACnC,MAAM,OAAO,GAAG,IAAI,OAAO,EAAE,CAAC;QAC9B,OAAO;aACJ,IAAI,CAAC,SAAS,CAAC;aACf,WAAW,CAAC,+DAA+D,CAAC;aAC5E,OAAO,CAAC,OAAO,CAAC;aAChB,cAAc,CAAC,oBAAoB,EAAE,4BAA4B,CAAC;aAClE,MAAM,CAAC,qBAAqB,EAAE,iCAAiC,EAAE,MAAM,CAAC;aACxE,MAAM,CAAC,0BAA0B,EAAE,yCAAyC,EAAE,MAAM,CAAC;aACrF,MAAM,CAAC,oBAAoB,EAAE,oCAAoC,CAAC;aAClE,MAAM,CAAC,aAAa,EAAE,mCAAmC,CAAC;aAC1D,MAAM,CAAC,cAAc,EAAE,2BAA2B,CAAC;aACnD,MAAM,CAAC,eAAe,EAAE,wBAAwB,CAAC,CAAC;QAErD,0BAA0B;QAC1B,MAAM,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC;QAE3D,uBAAuB;QACvB,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,oBAAoB,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;QACtF,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAClD,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACxC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,4BAA4B,EAAE,GAAG,EAAE;QACpC,MAAM,OAAO,GAAG,IAAI,OAAO,EAAE,CAAC;QAC9B,OAAO;aACJ,IAAI,CAAC,SAAS,CAAC;aACf,WAAW,CAAC,+DAA+D,CAAC;aAC5E,OAAO,CAAC,OAAO,CAAC;aAChB,cAAc,CAAC,oBAAoB,EAAE,4BAA4B,CAAC;aAClE,MAAM,CAAC,qBAAqB,EAAE,iCAAiC,EAAE,MAAM,CAAC;aACxE,MAAM,CAAC,0BAA0B,EAAE,yCAAyC,EAAE,MAAM,CAAC,CAAC;QAEzF,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;YAC5B,MAAM;YACN,SAAS;YACT,IAAI;YACJ,oBAAoB;YACpB,IAAI;YACJ,MAAM;YACN,IAAI;YACJ,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;QAEV,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACxC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,uBAAuB,EAAE,GAAG,EAAE;QAC/B,MAAM,OAAO,GAAG,IAAI,OAAO,EAAE,CAAC;QAC9B,OAAO;aACJ,IAAI,CAAC,SAAS,CAAC;aACf,WAAW,CAAC,+DAA+D,CAAC;aAC5E,OAAO,CAAC,OAAO,CAAC;aAChB,cAAc,CAAC,oBAAoB,EAAE,4BAA4B,CAAC;aAClE,MAAM,CAAC,oBAAoB,EAAE,oCAAoC,CAAC,CAAC;QAEtE,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;YAC5B,MAAM;YACN,SAAS;YACT,IAAI;YACJ,oBAAoB;YACpB,IAAI;YACJ,cAAc;SACf,CAAC,CAAC,IAAI,EAAE,CAAC;QAEV,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IAC3C,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,mCAAmC,EAAE,GAAG,EAAE;QAC3C,MAAM,OAAO,GAAG,IAAI,OAAO,EAAE,CAAC;QAC9B,OAAO;aACJ,IAAI,CAAC,SAAS,CAAC;aACf,WAAW,CAAC,+DAA+D,CAAC;aAC5E,OAAO,CAAC,OAAO,CAAC;aAChB,cAAc,CAAC,oBAAoB,EAAE,4BAA4B,CAAC;aAClE,MAAM,CAAC,aAAa,EAAE,mCAAmC,CAAC;aAC1D,MAAM,CAAC,cAAc,EAAE,2BAA2B,CAAC,CAAC;QAEvD,sBAAsB;QACtB,MAAM,WAAW,GAAG,OAAO,CAAC,KAAK,CAAC;YAChC,MAAM;YACN,SAAS;YACT,IAAI;YACJ,oBAAoB;YACpB,aAAa;SACd,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAExC,uBAAuB;QACvB,MAAM,YAAY,GAAG,OAAO,CAAC,KAAK,CAAC;YACjC,MAAM;YACN,SAAS;YACT,IAAI;YACJ,oBAAoB;YACpB,cAAc;SACf,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
2
dist/cli.d.ts
vendored
Normal file
2
dist/cli.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
export {};
|
||||||
29
dist/cli.js
vendored
Normal file
29
dist/cli.js
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import { startServers } from './server.js';
|
||||||
|
const program = new Command();
|
||||||
|
console.log('Starting Arbiter...');
|
||||||
|
program
|
||||||
|
.name('arbiter')
|
||||||
|
.description('API proxy with OpenAPI generation and HAR export capabilities')
|
||||||
|
.version('1.0.0')
|
||||||
|
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||||
|
.option('-p, --port <number>', 'port to run the proxy server on', '8080')
|
||||||
|
.option('-d, --docs-port <number>', 'port to run the documentation server on', '9000')
|
||||||
|
.option('--docs-only', 'run only the documentation server')
|
||||||
|
.option('--proxy-only', 'run only the proxy server')
|
||||||
|
.option('-v, --verbose', 'enable verbose logging')
|
||||||
|
.parse(process.argv);
|
||||||
|
const options = program.opts();
|
||||||
|
// Start the servers
|
||||||
|
startServers({
|
||||||
|
target: options.target,
|
||||||
|
proxyPort: parseInt(options.port),
|
||||||
|
docsPort: parseInt(options.docsPort),
|
||||||
|
verbose: options.verbose
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error(chalk.red('Failed to start servers:'), error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=cli.js.map
|
||||||
1
dist/cli.js.map
vendored
Normal file
1
dist/cli.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAE3C,MAAM,OAAO,GAAG,IAAI,OAAO,EAAE,CAAC;AAE9B,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;AAEnC,OAAO;KACJ,IAAI,CAAC,SAAS,CAAC;KACf,WAAW,CAAC,+DAA+D,CAAC;KAC5E,OAAO,CAAC,OAAO,CAAC;KAChB,cAAc,CAAC,oBAAoB,EAAE,4BAA4B,CAAC;KAClE,MAAM,CAAC,qBAAqB,EAAE,iCAAiC,EAAE,MAAM,CAAC;KACxE,MAAM,CAAC,0BAA0B,EAAE,yCAAyC,EAAE,MAAM,CAAC;KACrF,MAAM,CAAC,aAAa,EAAE,mCAAmC,CAAC;KAC1D,MAAM,CAAC,cAAc,EAAE,2BAA2B,CAAC;KACnD,MAAM,CAAC,eAAe,EAAE,wBAAwB,CAAC;KACjD,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAEvB,MAAM,OAAO,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;AAE/B,oBAAoB;AACpB,YAAY,CAAC;IACX,MAAM,EAAE,OAAO,CAAC,MAAM;IACtB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC;IACjC,QAAQ,EAAE,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC;IACpC,OAAO,EAAE,OAAO,CAAC,OAAO;CACzB,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IACjB,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,0BAA0B,CAAC,EAAE,KAAK,CAAC,CAAC;IAC5D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC"}
|
||||||
1
dist/middleware/__tests__/harRecorder.test.d.ts
vendored
Normal file
1
dist/middleware/__tests__/harRecorder.test.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export {};
|
||||||
178
dist/middleware/__tests__/harRecorder.test.js
vendored
Normal file
178
dist/middleware/__tests__/harRecorder.test.js
vendored
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import { harRecorder } from '../harRecorder.js';
|
||||||
|
import { openApiStore } from '../../store/openApiStore.js';
|
||||||
|
describe('HAR Recorder Middleware', () => {
|
||||||
|
let mockContext;
|
||||||
|
let mockNext;
|
||||||
|
beforeEach(() => {
|
||||||
|
// Clear the openApiStore before each test
|
||||||
|
openApiStore.clear();
|
||||||
|
// Create a store for context values
|
||||||
|
const store = new Map();
|
||||||
|
// Create a mock request with proper header function
|
||||||
|
const mockReq = {
|
||||||
|
method: 'GET',
|
||||||
|
url: 'http://localhost:3000/test',
|
||||||
|
header: (name) => {
|
||||||
|
if (name === 'content-type')
|
||||||
|
return 'application/json';
|
||||||
|
if (name === 'accept')
|
||||||
|
return 'application/json';
|
||||||
|
if (name === undefined)
|
||||||
|
return { 'content-type': 'application/json', 'accept': 'application/json' };
|
||||||
|
return undefined;
|
||||||
|
},
|
||||||
|
json: async () => ({ test: 'data' }),
|
||||||
|
path: '/test'
|
||||||
|
};
|
||||||
|
// Create a mock response
|
||||||
|
const mockRes = new Response(JSON.stringify({ success: true }), {
|
||||||
|
status: 200,
|
||||||
|
statusText: 'OK',
|
||||||
|
headers: {
|
||||||
|
'content-type': 'application/json'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Create a complete mock context
|
||||||
|
mockContext = {
|
||||||
|
req: mockReq,
|
||||||
|
res: mockRes,
|
||||||
|
set: (key, value) => { store.set(key, value); },
|
||||||
|
get: (key) => store.get(key),
|
||||||
|
header: () => '',
|
||||||
|
redirect: () => { },
|
||||||
|
json: () => { },
|
||||||
|
text: () => { },
|
||||||
|
html: () => { },
|
||||||
|
stream: () => { },
|
||||||
|
blob: () => { },
|
||||||
|
arrayBuffer: () => { },
|
||||||
|
formData: () => { },
|
||||||
|
cookie: () => { },
|
||||||
|
notFound: () => { },
|
||||||
|
status: () => { },
|
||||||
|
headers: () => { },
|
||||||
|
body: () => { },
|
||||||
|
param: () => '',
|
||||||
|
query: () => '',
|
||||||
|
setCookie: () => { },
|
||||||
|
getCookie: () => '',
|
||||||
|
deleteCookie: () => { },
|
||||||
|
vary: () => { },
|
||||||
|
etag: () => { },
|
||||||
|
lastModified: () => { },
|
||||||
|
type: () => { },
|
||||||
|
attachment: () => { },
|
||||||
|
download: () => { },
|
||||||
|
send: () => { },
|
||||||
|
jsonT: () => { },
|
||||||
|
textT: () => { },
|
||||||
|
htmlT: () => { },
|
||||||
|
streamT: () => { },
|
||||||
|
blobT: () => { },
|
||||||
|
arrayBufferT: () => { },
|
||||||
|
formDataT: () => { },
|
||||||
|
cookieT: () => { },
|
||||||
|
notFoundT: () => { },
|
||||||
|
statusT: () => { },
|
||||||
|
headersT: () => { },
|
||||||
|
bodyT: () => { },
|
||||||
|
paramT: () => '',
|
||||||
|
queryT: () => '',
|
||||||
|
setCookieT: () => { },
|
||||||
|
getCookieT: () => '',
|
||||||
|
deleteCookieT: () => { },
|
||||||
|
prettyT: () => { },
|
||||||
|
varyT: () => { },
|
||||||
|
etagT: () => { },
|
||||||
|
lastModifiedT: () => { },
|
||||||
|
typeT: () => { },
|
||||||
|
attachmentT: () => { },
|
||||||
|
downloadT: () => { },
|
||||||
|
sendT: () => { },
|
||||||
|
env: {},
|
||||||
|
finalized: false,
|
||||||
|
error: null,
|
||||||
|
event: null,
|
||||||
|
executionCtx: null,
|
||||||
|
matchedRoute: null,
|
||||||
|
params: {},
|
||||||
|
path: '',
|
||||||
|
validated: {},
|
||||||
|
validator: null
|
||||||
|
};
|
||||||
|
mockNext = async () => {
|
||||||
|
// Simulate middleware next behavior
|
||||||
|
return Promise.resolve();
|
||||||
|
};
|
||||||
|
});
|
||||||
|
it('should record request and response details', async () => {
|
||||||
|
await harRecorder(mockContext, mockNext);
|
||||||
|
const har = mockContext.get('har');
|
||||||
|
expect(har).toBeDefined();
|
||||||
|
expect(har.log.entries).toHaveLength(1);
|
||||||
|
expect(har.log.entries[0].request.method).toBe('GET');
|
||||||
|
expect(har.log.entries[0].request.url).toBe('http://localhost:3000/test');
|
||||||
|
expect(har.log.entries[0].response.status).toBe(200);
|
||||||
|
expect(har.log.entries[0].response.content.text).toBe('{"success":true}');
|
||||||
|
});
|
||||||
|
it('should handle query parameters', async () => {
|
||||||
|
// Create a new context with query parameters
|
||||||
|
const store = new Map();
|
||||||
|
const queryContext = {
|
||||||
|
...mockContext,
|
||||||
|
req: {
|
||||||
|
...mockContext.req,
|
||||||
|
url: 'http://localhost:3000/test?param1=value1¶m2=value2',
|
||||||
|
path: '/test',
|
||||||
|
method: 'GET',
|
||||||
|
header: (name) => {
|
||||||
|
if (name === 'content-type')
|
||||||
|
return 'application/json';
|
||||||
|
if (name === 'accept')
|
||||||
|
return 'application/json';
|
||||||
|
if (name === undefined)
|
||||||
|
return { 'content-type': 'application/json', 'accept': 'application/json' };
|
||||||
|
return undefined;
|
||||||
|
},
|
||||||
|
json: async () => ({ test: 'data' })
|
||||||
|
},
|
||||||
|
set: (key, value) => { store.set(key, value); },
|
||||||
|
get: (key) => store.get(key)
|
||||||
|
};
|
||||||
|
await harRecorder(queryContext, mockNext);
|
||||||
|
const har = queryContext.get('har');
|
||||||
|
expect(har.log.entries[0].request.queryString).toHaveLength(2);
|
||||||
|
expect(har.log.entries[0].request.queryString[0]).toEqual({
|
||||||
|
name: 'param1',
|
||||||
|
value: 'value1'
|
||||||
|
});
|
||||||
|
expect(har.log.entries[0].request.queryString[1]).toEqual({
|
||||||
|
name: 'param2',
|
||||||
|
value: 'value2'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle request headers', async () => {
|
||||||
|
await harRecorder(mockContext, mockNext);
|
||||||
|
const har = mockContext.get('har');
|
||||||
|
expect(har.log.entries[0].request.headers).toHaveLength(2);
|
||||||
|
expect(har.log.entries[0].request.headers).toContainEqual({
|
||||||
|
name: 'content-type',
|
||||||
|
value: 'application/json'
|
||||||
|
});
|
||||||
|
expect(har.log.entries[0].request.headers).toContainEqual({
|
||||||
|
name: 'accept',
|
||||||
|
value: 'application/json'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle response headers', async () => {
|
||||||
|
await harRecorder(mockContext, mockNext);
|
||||||
|
const har = mockContext.get('har');
|
||||||
|
expect(har.log.entries[0].response.headers).toHaveLength(1);
|
||||||
|
expect(har.log.entries[0].response.headers[0]).toEqual({
|
||||||
|
name: 'content-type',
|
||||||
|
value: 'application/json'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=harRecorder.test.js.map
|
||||||
1
dist/middleware/__tests__/harRecorder.test.js.map
vendored
Normal file
1
dist/middleware/__tests__/harRecorder.test.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
2
dist/middleware/apiDocGenerator.d.ts
vendored
Normal file
2
dist/middleware/apiDocGenerator.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
import { Context } from 'hono';
|
||||||
|
export declare const apiDocGenerator: (c: Context, next: () => Promise<void>) => Promise<void>;
|
||||||
15
dist/middleware/apiDocGenerator.js
vendored
Normal file
15
dist/middleware/apiDocGenerator.js
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { openApiStore } from '../store/openApiStore.js';
|
||||||
|
export const apiDocGenerator = async (c, next) => {
|
||||||
|
await next();
|
||||||
|
// Record the API call in OpenAPI format
|
||||||
|
openApiStore.recordEndpoint(c.req.path, c.req.method.toLowerCase(), {
|
||||||
|
query: Object.fromEntries(new URL(c.req.url).searchParams),
|
||||||
|
body: await c.req.json().catch(() => null),
|
||||||
|
contentType: c.req.header('content-type') || 'application/json',
|
||||||
|
}, {
|
||||||
|
status: c.res.status,
|
||||||
|
body: await c.res.clone().json().catch(() => null),
|
||||||
|
contentType: c.res.headers.get('content-type') || 'application/json',
|
||||||
|
});
|
||||||
|
};
|
||||||
|
//# sourceMappingURL=apiDocGenerator.js.map
|
||||||
1
dist/middleware/apiDocGenerator.js.map
vendored
Normal file
1
dist/middleware/apiDocGenerator.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"apiDocGenerator.js","sourceRoot":"","sources":["../../src/middleware/apiDocGenerator.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,MAAM,CAAC,MAAM,eAAe,GAAG,KAAK,EAAE,CAAU,EAAE,IAAyB,EAAE,EAAE;IAC7E,MAAM,IAAI,EAAE,CAAC;IAEb,wCAAwC;IACxC,YAAY,CAAC,cAAc,CACzB,CAAC,CAAC,GAAG,CAAC,IAAI,EACV,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,WAAW,EAAE,EAC1B;QACE,KAAK,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,YAAY,CAAC;QAC1D,IAAI,EAAE,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC;QAC1C,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,CAAC,IAAI,kBAAkB;KAChE,EACD;QACE,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,MAAM;QACpB,IAAI,EAAE,MAAM,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC;QAClD,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,kBAAkB;KACrE,CACF,CAAC;AACJ,CAAC,CAAC"}
|
||||||
2
dist/middleware/harRecorder.d.ts
vendored
Normal file
2
dist/middleware/harRecorder.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
import { Context, Next } from 'hono';
|
||||||
|
export declare function harRecorder(c: Context, next: Next): Promise<void>;
|
||||||
55
dist/middleware/harRecorder.js
vendored
Normal file
55
dist/middleware/harRecorder.js
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { openApiStore } from '../store/openApiStore.js';
|
||||||
|
export async function harRecorder(c, next) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
// Get request body if present
|
||||||
|
let requestBody;
|
||||||
|
if (c.req.method !== 'GET' && c.req.method !== 'HEAD') {
|
||||||
|
try {
|
||||||
|
requestBody = await c.req.json();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// Body might not be JSON
|
||||||
|
requestBody = await c.req.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Get query parameters from URL
|
||||||
|
const url = new URL(c.req.url);
|
||||||
|
const queryParams = {};
|
||||||
|
for (const [key, value] of url.searchParams.entries()) {
|
||||||
|
queryParams[key] = value;
|
||||||
|
}
|
||||||
|
// Get all request headers
|
||||||
|
const requestHeaders = {};
|
||||||
|
Object.entries(c.req.header()).forEach(([key, value]) => {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
requestHeaders[key] = value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Call next middleware
|
||||||
|
await next();
|
||||||
|
// Calculate response time
|
||||||
|
const responseTime = Date.now() - startTime;
|
||||||
|
// Get response body
|
||||||
|
let responseBody;
|
||||||
|
try {
|
||||||
|
responseBody = await c.res.clone().json();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
responseBody = await c.res.clone().text();
|
||||||
|
}
|
||||||
|
// Record the request/response in OpenAPI format
|
||||||
|
openApiStore.recordEndpoint(c.req.path, c.req.method.toLowerCase(), {
|
||||||
|
query: queryParams,
|
||||||
|
body: requestBody,
|
||||||
|
contentType: c.req.header('content-type') || 'application/json',
|
||||||
|
headers: requestHeaders
|
||||||
|
}, {
|
||||||
|
status: c.res.status,
|
||||||
|
body: responseBody,
|
||||||
|
contentType: c.res.headers.get('content-type') || 'application/json',
|
||||||
|
headers: Object.fromEntries(c.res.headers.entries())
|
||||||
|
});
|
||||||
|
// Set HAR data in context
|
||||||
|
c.set('har', openApiStore.generateHAR());
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=harRecorder.js.map
|
||||||
1
dist/middleware/harRecorder.js.map
vendored
Normal file
1
dist/middleware/harRecorder.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"harRecorder.js","sourceRoot":"","sources":["../../src/middleware/harRecorder.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AA6BxD,MAAM,CAAC,KAAK,UAAU,WAAW,CAAC,CAAU,EAAE,IAAU;IACtD,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAE7B,8BAA8B;IAC9B,IAAI,WAAgB,CAAC;IACrB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,KAAK,KAAK,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,KAAK,MAAM,EAAE,CAAC;QACtD,IAAI,CAAC;YACH,WAAW,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC;QACnC,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yBAAyB;YACzB,WAAW,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC;QACnC,CAAC;IACH,CAAC;IAED,gCAAgC;IAChC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAC/B,MAAM,WAAW,GAA2B,EAAE,CAAC;IAC/C,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,GAAG,CAAC,YAAY,CAAC,OAAO,EAAE,EAAE,CAAC;QACtD,WAAW,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE;QACtD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,CAAC;YAC9B,cAAc,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;QAC9B,CAAC;IACH,CAAC,CAAC,CAAC;IAEH,uBAAuB;IACvB,MAAM,IAAI,EAAE,CAAC;IAEb,0BAA0B;IAC1B,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;IAE5C,oBAAoB;IACpB,IAAI,YAAiB,CAAC;IACtB,IAAI,CAAC;QACH,YAAY,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC;IAC5C,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,YAAY,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC;IAC5C,CAAC;IAED,gDAAgD;IAChD,YAAY,CAAC,cAAc,CACzB,CAAC,CAAC,GAAG,CAAC,IAAI,EACV,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,WAAW,EAAE,EAC1B;QACE,KAAK,EAAE,WAAW;QAClB,IAAI,EAAE,WAAW;QACjB,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,CAAC,IAAI,kBAAkB;QAC/D,OAAO,EAAE,cAAc;KACxB,EACD;QACE,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,MAAM;QACpB,IAAI,EAAE,YAAY;QAClB,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,kBAAkB;QACpE,OAAO,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;KACrD,CACF,CAAC;IAEF,0BAA0B;IAC1B,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,YAAY,CAAC,WAAW,EAAE,CAAC,CAAC;AAC3C,CAAC"}
|
||||||
11
dist/server.d.ts
vendored
Normal file
11
dist/server.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Server } from 'node:http';
|
||||||
|
export interface ServerOptions {
|
||||||
|
target: string;
|
||||||
|
proxyPort: number;
|
||||||
|
docsPort: number;
|
||||||
|
verbose?: boolean;
|
||||||
|
}
|
||||||
|
export declare function startServers(options: ServerOptions): Promise<{
|
||||||
|
proxyServer: Server;
|
||||||
|
docsServer: Server;
|
||||||
|
}>;
|
||||||
281
dist/server.js
vendored
Normal file
281
dist/server.js
vendored
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
import { Hono } from 'hono';
|
||||||
|
import { logger } from 'hono/logger';
|
||||||
|
import { cors } from 'hono/cors';
|
||||||
|
import { prettyJSON } from 'hono/pretty-json';
|
||||||
|
import httpProxy from 'http-proxy';
|
||||||
|
import { openApiStore } from './store/openApiStore.js';
|
||||||
|
import { createServer } from 'node:http';
|
||||||
|
import { Agent } from 'node:https';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
export async function startServers(options) {
|
||||||
|
// Set the target URL in the OpenAPI store
|
||||||
|
openApiStore.setTargetUrl(options.target);
|
||||||
|
// Create two separate Hono apps
|
||||||
|
const proxyApp = new Hono();
|
||||||
|
const docsApp = new Hono();
|
||||||
|
// Create proxy server
|
||||||
|
const proxy = httpProxy.createProxyServer({
|
||||||
|
changeOrigin: true,
|
||||||
|
secure: false,
|
||||||
|
selfHandleResponse: true,
|
||||||
|
target: options.target,
|
||||||
|
headers: {
|
||||||
|
'Host': new URL(options.target).host
|
||||||
|
},
|
||||||
|
agent: new Agent({
|
||||||
|
rejectUnauthorized: false
|
||||||
|
})
|
||||||
|
});
|
||||||
|
// Set up error handlers
|
||||||
|
proxy.on('error', (err) => {
|
||||||
|
console.error('Proxy error:', err);
|
||||||
|
});
|
||||||
|
proxy.on('proxyReq', (proxyReq, req, res) => {
|
||||||
|
// Ensure we're using the correct protocol
|
||||||
|
proxyReq.protocol = new URL(options.target).protocol;
|
||||||
|
});
|
||||||
|
// Middleware for both apps
|
||||||
|
if (options.verbose) {
|
||||||
|
proxyApp.use('*', logger());
|
||||||
|
docsApp.use('*', logger());
|
||||||
|
}
|
||||||
|
proxyApp.use('*', cors());
|
||||||
|
proxyApp.use('*', prettyJSON());
|
||||||
|
docsApp.use('*', cors());
|
||||||
|
docsApp.use('*', prettyJSON());
|
||||||
|
// Documentation endpoints
|
||||||
|
docsApp.get('/docs', async (c) => {
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
return c.html(`
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>API Documentation</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<script
|
||||||
|
id="api-reference"
|
||||||
|
data-url="/openapi.json"
|
||||||
|
data-proxy-url="https://proxy.scalar.com"></script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
var configuration = {
|
||||||
|
theme: 'light',
|
||||||
|
title: 'API Documentation'
|
||||||
|
}
|
||||||
|
|
||||||
|
document.getElementById('api-reference').dataset.configuration =
|
||||||
|
JSON.stringify(configuration)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/@scalar/api-reference"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
docsApp.get('/openapi.json', (c) => {
|
||||||
|
return c.json(openApiStore.getOpenAPISpec());
|
||||||
|
});
|
||||||
|
docsApp.get('/openapi.yaml', (c) => {
|
||||||
|
return c.text(openApiStore.getOpenAPISpecAsYAML());
|
||||||
|
});
|
||||||
|
docsApp.get('/har', (c) => {
|
||||||
|
return c.json(openApiStore.generateHAR());
|
||||||
|
});
|
||||||
|
// Proxy all requests
|
||||||
|
proxyApp.all('*', async (c) => {
|
||||||
|
let requestBody;
|
||||||
|
let responseBody;
|
||||||
|
// Get request body if present
|
||||||
|
if (c.req.method !== 'GET' && c.req.method !== 'HEAD') {
|
||||||
|
try {
|
||||||
|
requestBody = await c.req.json();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// Body might not be JSON
|
||||||
|
requestBody = await c.req.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Create a new request object with the target URL
|
||||||
|
const targetUrl = new URL(c.req.path, options.target);
|
||||||
|
// Copy query parameters
|
||||||
|
const originalUrl = new URL(c.req.url);
|
||||||
|
originalUrl.searchParams.forEach((value, key) => {
|
||||||
|
targetUrl.searchParams.append(key, value);
|
||||||
|
});
|
||||||
|
const proxyReq = new Request(targetUrl.toString(), {
|
||||||
|
method: c.req.method,
|
||||||
|
headers: new Headers({
|
||||||
|
'content-type': c.req.header('content-type') || 'application/json',
|
||||||
|
'accept': c.req.header('accept') || 'application/json',
|
||||||
|
...Object.fromEntries(Object.entries(c.req.header())
|
||||||
|
.filter(([key]) => !['content-type', 'accept'].includes(key.toLowerCase()))),
|
||||||
|
}),
|
||||||
|
body: c.req.method !== 'GET' && c.req.method !== 'HEAD' ? requestBody : undefined,
|
||||||
|
});
|
||||||
|
// Forward the request to the target server
|
||||||
|
const proxyRes = await fetch(proxyReq);
|
||||||
|
// Get response body
|
||||||
|
const contentType = proxyRes.headers.get('content-type') || '';
|
||||||
|
if (contentType.includes('application/json')) {
|
||||||
|
responseBody = await proxyRes.json();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
responseBody = await proxyRes.text();
|
||||||
|
}
|
||||||
|
// Record the API call in OpenAPI format
|
||||||
|
openApiStore.recordEndpoint(c.req.path, c.req.method.toLowerCase(), {
|
||||||
|
query: Object.fromEntries(new URL(c.req.url).searchParams),
|
||||||
|
body: requestBody,
|
||||||
|
contentType: c.req.header('content-type') || 'application/json',
|
||||||
|
headers: Object.fromEntries(Object.entries(c.req.header()))
|
||||||
|
}, {
|
||||||
|
status: proxyRes.status,
|
||||||
|
body: responseBody,
|
||||||
|
contentType: proxyRes.headers.get('content-type') || 'application/json',
|
||||||
|
headers: Object.fromEntries(proxyRes.headers.entries())
|
||||||
|
});
|
||||||
|
// Create a new response with the correct content type and body
|
||||||
|
return new Response(JSON.stringify(responseBody), {
|
||||||
|
status: proxyRes.status,
|
||||||
|
headers: Object.fromEntries(proxyRes.headers.entries())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.error('Proxy request failed:', error);
|
||||||
|
return c.json({ error: 'Proxy error', details: error.message }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Function to check if a port is available
|
||||||
|
async function isPortAvailable(port) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const server = createServer()
|
||||||
|
.once('error', () => {
|
||||||
|
resolve(false);
|
||||||
|
})
|
||||||
|
.once('listening', () => {
|
||||||
|
server.close();
|
||||||
|
resolve(true);
|
||||||
|
})
|
||||||
|
.listen(port);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Function to find an available port
|
||||||
|
async function findAvailablePort(startPort) {
|
||||||
|
let port = startPort;
|
||||||
|
while (!(await isPortAvailable(port))) {
|
||||||
|
port++;
|
||||||
|
}
|
||||||
|
return port;
|
||||||
|
}
|
||||||
|
// Start servers
|
||||||
|
const availableProxyPort = await findAvailablePort(options.proxyPort);
|
||||||
|
const availableDocsPort = await findAvailablePort(options.docsPort);
|
||||||
|
if (availableProxyPort !== options.proxyPort) {
|
||||||
|
console.log(chalk.yellow(`Port ${options.proxyPort} is in use, using port ${availableProxyPort} instead`));
|
||||||
|
}
|
||||||
|
if (availableDocsPort !== options.docsPort) {
|
||||||
|
console.log(chalk.yellow(`Port ${options.docsPort} is in use, using port ${availableDocsPort} instead`));
|
||||||
|
}
|
||||||
|
console.log(chalk.blue(`Starting proxy server on port ${availableProxyPort}...`));
|
||||||
|
console.log(chalk.gray(`Proxying requests to: ${options.target}`));
|
||||||
|
console.log(chalk.blue(`Starting documentation server on port ${availableDocsPort}...`));
|
||||||
|
const proxyServer = createServer(async (req, res) => {
|
||||||
|
try {
|
||||||
|
const url = new URL(req.url || '/', `http://localhost:${availableProxyPort}`);
|
||||||
|
const request = new Request(url.toString(), {
|
||||||
|
method: req.method || 'GET',
|
||||||
|
headers: req.headers,
|
||||||
|
body: req.method !== 'GET' && req.method !== 'HEAD' ? req : undefined,
|
||||||
|
});
|
||||||
|
const response = await proxyApp.fetch(request);
|
||||||
|
res.statusCode = response.status;
|
||||||
|
res.statusMessage = response.statusText;
|
||||||
|
// Copy all headers from the response
|
||||||
|
for (const [key, value] of response.headers.entries()) {
|
||||||
|
res.setHeader(key, value);
|
||||||
|
}
|
||||||
|
// Stream the response body
|
||||||
|
if (response.body) {
|
||||||
|
const reader = response.body.getReader();
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done)
|
||||||
|
break;
|
||||||
|
res.write(value);
|
||||||
|
}
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.error('Proxy request failed:', error);
|
||||||
|
res.statusCode = 500;
|
||||||
|
res.end(JSON.stringify({ error: 'Proxy error', details: error.message }));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const docsServer = createServer(async (req, res) => {
|
||||||
|
try {
|
||||||
|
const url = new URL(req.url || '/', `http://localhost:${availableDocsPort}`);
|
||||||
|
const request = new Request(url.toString(), {
|
||||||
|
method: req.method || 'GET',
|
||||||
|
headers: req.headers,
|
||||||
|
body: req.method !== 'GET' && req.method !== 'HEAD' ? req : undefined,
|
||||||
|
});
|
||||||
|
const response = await docsApp.fetch(request);
|
||||||
|
res.statusCode = response.status;
|
||||||
|
res.statusMessage = response.statusText;
|
||||||
|
for (const [key, value] of response.headers.entries()) {
|
||||||
|
res.setHeader(key, value);
|
||||||
|
}
|
||||||
|
if (response.body) {
|
||||||
|
const reader = response.body.getReader();
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done)
|
||||||
|
break;
|
||||||
|
res.write(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.error('Documentation request failed:', error);
|
||||||
|
res.statusCode = 500;
|
||||||
|
res.end(JSON.stringify({ error: 'Documentation error', details: error.message }));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
proxyServer.once('error', reject);
|
||||||
|
proxyServer.listen(availableProxyPort, '0.0.0.0', () => {
|
||||||
|
console.log(chalk.green(`✓ Proxy server running on port ${availableProxyPort}`));
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
docsServer.once('error', reject);
|
||||||
|
docsServer.listen(availableDocsPort, '0.0.0.0', () => {
|
||||||
|
console.log(chalk.green(`✓ Documentation server running on port ${availableDocsPort}`));
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// Print startup message
|
||||||
|
console.log('\n' + chalk.green('Arbiter is running! 🚀'));
|
||||||
|
console.log('\n' + chalk.bold('Proxy Server:'));
|
||||||
|
console.log(chalk.cyan(` URL: http://localhost:${availableProxyPort}`));
|
||||||
|
console.log(chalk.gray(` Target: ${options.target}`));
|
||||||
|
console.log('\n' + chalk.bold('Documentation:'));
|
||||||
|
console.log(chalk.cyan(` API Reference: http://localhost:${availableDocsPort}/docs`));
|
||||||
|
console.log('\n' + chalk.bold('Exports:'));
|
||||||
|
console.log(chalk.cyan(` HAR Export: http://localhost:${availableDocsPort}/har`));
|
||||||
|
console.log(chalk.cyan(` OpenAPI JSON: http://localhost:${availableDocsPort}/openapi.json`));
|
||||||
|
console.log(chalk.cyan(` OpenAPI YAML: http://localhost:${availableDocsPort}/openapi.yaml`));
|
||||||
|
console.log('\n' + chalk.yellow('Press Ctrl+C to stop'));
|
||||||
|
return { proxyServer, docsServer };
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=server.js.map
|
||||||
1
dist/server.js.map
vendored
Normal file
1
dist/server.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
1
dist/store/__tests__/openApiStore.test.d.ts
vendored
Normal file
1
dist/store/__tests__/openApiStore.test.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export {};
|
||||||
371
dist/store/__tests__/openApiStore.test.js
vendored
Normal file
371
dist/store/__tests__/openApiStore.test.js
vendored
Normal file
@@ -0,0 +1,371 @@
|
|||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import { openApiStore } from '../openApiStore.js';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
describe('OpenAPI Store', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset the store before each test
|
||||||
|
openApiStore.clear();
|
||||||
|
});
|
||||||
|
it('should record a new endpoint', () => {
|
||||||
|
const path = '/test';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(path, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
expect(paths).toBeDefined();
|
||||||
|
expect(paths[path]).toBeDefined();
|
||||||
|
expect(paths[path]?.[method]).toBeDefined();
|
||||||
|
const operation = paths[path]?.[method];
|
||||||
|
expect(operation).toBeDefined();
|
||||||
|
const responses = operation.responses;
|
||||||
|
expect(responses).toBeDefined();
|
||||||
|
expect(responses['200']).toBeDefined();
|
||||||
|
const responseObj = responses['200'];
|
||||||
|
expect(responseObj.content).toBeDefined();
|
||||||
|
const content = responseObj.content;
|
||||||
|
expect(content['application/json']).toBeDefined();
|
||||||
|
expect(content['application/json'].schema).toBeDefined();
|
||||||
|
});
|
||||||
|
it('should handle multiple endpoints', () => {
|
||||||
|
const endpoints = [
|
||||||
|
{ path: '/test1', method: 'get', response: { status: 200, body: { success: true }, contentType: 'application/json' } },
|
||||||
|
{ path: '/test2', method: 'post', response: { status: 201, body: { id: 1 }, contentType: 'application/json' } }
|
||||||
|
];
|
||||||
|
endpoints.forEach(({ path, method, response }) => {
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(path, method, request, response);
|
||||||
|
});
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
expect(paths).toBeDefined();
|
||||||
|
expect(Object.keys(paths)).toHaveLength(2);
|
||||||
|
const test1Path = paths['/test1'];
|
||||||
|
const test2Path = paths['/test2'];
|
||||||
|
expect(test1Path).toBeDefined();
|
||||||
|
expect(test2Path).toBeDefined();
|
||||||
|
expect(test1Path?.get).toBeDefined();
|
||||||
|
expect(test2Path?.post).toBeDefined();
|
||||||
|
});
|
||||||
|
it('should generate HAR format', () => {
|
||||||
|
// Record an endpoint first
|
||||||
|
const path = '/test';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'content-type': 'application/json'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(path, method, request, response);
|
||||||
|
// Generate HAR format
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
expect(har.log.entries).toHaveLength(1);
|
||||||
|
expect(har.log.entries[0].request.method).toBe(method.toUpperCase());
|
||||||
|
expect(har.log.entries[0].request.url).toContain(path);
|
||||||
|
expect(har.log.entries[0].response.status).toBe(response.status);
|
||||||
|
expect(har.log.entries[0].response.content.text).toBe(JSON.stringify(response.body));
|
||||||
|
expect(har.log.entries[0].response.headers).toContainEqual({
|
||||||
|
name: 'content-type',
|
||||||
|
value: 'application/json'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should generate YAML spec', () => {
|
||||||
|
const endpointPath = '/test';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const yamlSpec = openApiStore.getOpenAPISpecAsYAML();
|
||||||
|
expect(yamlSpec).toBeDefined();
|
||||||
|
expect(yamlSpec).toContain('openapi: 3.1.0');
|
||||||
|
expect(yamlSpec).toContain('paths:');
|
||||||
|
expect(yamlSpec).toContain('/test:');
|
||||||
|
});
|
||||||
|
it('should save both JSON and YAML specs', () => {
|
||||||
|
const testDir = path.join(process.cwd(), 'test-output');
|
||||||
|
// Clean up test directory if it exists
|
||||||
|
if (fs.existsSync(testDir)) {
|
||||||
|
fs.rmSync(testDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
const endpointPath = '/test';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
openApiStore.saveOpenAPISpec(testDir);
|
||||||
|
// Check if files were created
|
||||||
|
expect(fs.existsSync(path.join(testDir, 'openapi.json'))).toBe(true);
|
||||||
|
expect(fs.existsSync(path.join(testDir, 'openapi.yaml'))).toBe(true);
|
||||||
|
// Clean up
|
||||||
|
fs.rmSync(testDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
describe('Security Schemes', () => {
|
||||||
|
it('should handle API Key authentication', () => {
|
||||||
|
const endpointPath = '/secure';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'X-API-Key': 'test-api-key'
|
||||||
|
},
|
||||||
|
security: [{
|
||||||
|
type: 'apiKey',
|
||||||
|
name: 'X-API-Key',
|
||||||
|
in: 'header'
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
const operation = paths[endpointPath]?.[method];
|
||||||
|
expect(operation.security).toBeDefined();
|
||||||
|
expect(operation.security?.[0]).toHaveProperty('apiKey_');
|
||||||
|
const securitySchemes = spec.components?.securitySchemes;
|
||||||
|
expect(securitySchemes).toBeDefined();
|
||||||
|
expect(securitySchemes?.['apiKey_']).toEqual({
|
||||||
|
type: 'apiKey',
|
||||||
|
name: 'X-API-Key',
|
||||||
|
in: 'header'
|
||||||
|
});
|
||||||
|
// Check HAR entry
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
const entry = har.log.entries[0];
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'x-api-key',
|
||||||
|
value: 'test-api-key'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle OAuth2 authentication', () => {
|
||||||
|
const endpointPath = '/oauth';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'Authorization': 'Bearer test-token'
|
||||||
|
},
|
||||||
|
security: [{
|
||||||
|
type: 'oauth2',
|
||||||
|
flows: {
|
||||||
|
authorizationCode: {
|
||||||
|
authorizationUrl: 'https://example.com/oauth/authorize',
|
||||||
|
tokenUrl: 'https://example.com/oauth/token',
|
||||||
|
scopes: {
|
||||||
|
'read': 'Read access',
|
||||||
|
'write': 'Write access'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
const operation = paths[endpointPath]?.[method];
|
||||||
|
expect(operation.security).toBeDefined();
|
||||||
|
expect(operation.security?.[0]).toHaveProperty('oauth2_');
|
||||||
|
const securitySchemes = spec.components?.securitySchemes;
|
||||||
|
expect(securitySchemes).toBeDefined();
|
||||||
|
expect(securitySchemes?.['oauth2_']).toEqual({
|
||||||
|
type: 'oauth2',
|
||||||
|
flows: {
|
||||||
|
authorizationCode: {
|
||||||
|
authorizationUrl: 'https://example.com/oauth/authorize',
|
||||||
|
tokenUrl: 'https://example.com/oauth/token',
|
||||||
|
scopes: {
|
||||||
|
'read': 'Read access',
|
||||||
|
'write': 'Write access'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Check HAR entry
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
const entry = har.log.entries[0];
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'authorization',
|
||||||
|
value: 'Bearer test-token'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle HTTP Basic authentication', () => {
|
||||||
|
const endpointPath = '/basic';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'Authorization': 'Basic dXNlcm5hbWU6cGFzc3dvcmQ='
|
||||||
|
},
|
||||||
|
security: [{
|
||||||
|
type: 'http',
|
||||||
|
scheme: 'basic'
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
const operation = paths[endpointPath]?.[method];
|
||||||
|
expect(operation.security).toBeDefined();
|
||||||
|
expect(operation.security?.[0]).toHaveProperty('http_');
|
||||||
|
const securitySchemes = spec.components?.securitySchemes;
|
||||||
|
expect(securitySchemes).toBeDefined();
|
||||||
|
expect(securitySchemes?.['http_']).toEqual({
|
||||||
|
type: 'http',
|
||||||
|
scheme: 'basic'
|
||||||
|
});
|
||||||
|
// Check HAR entry
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
const entry = har.log.entries[0];
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'authorization',
|
||||||
|
value: 'Basic dXNlcm5hbWU6cGFzc3dvcmQ='
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle OpenID Connect authentication', () => {
|
||||||
|
const endpointPath = '/oidc';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'Authorization': 'Bearer test-oidc-token'
|
||||||
|
},
|
||||||
|
security: [{
|
||||||
|
type: 'openIdConnect',
|
||||||
|
openIdConnectUrl: 'https://example.com/.well-known/openid-configuration'
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
const operation = paths[endpointPath]?.[method];
|
||||||
|
expect(operation.security).toBeDefined();
|
||||||
|
expect(operation.security?.[0]).toHaveProperty('openIdConnect_');
|
||||||
|
const securitySchemes = spec.components?.securitySchemes;
|
||||||
|
expect(securitySchemes).toBeDefined();
|
||||||
|
expect(securitySchemes?.['openIdConnect_']).toEqual({
|
||||||
|
type: 'openIdConnect',
|
||||||
|
openIdConnectUrl: 'https://example.com/.well-known/openid-configuration'
|
||||||
|
});
|
||||||
|
// Check HAR entry
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
const entry = har.log.entries[0];
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'authorization',
|
||||||
|
value: 'Bearer test-oidc-token'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('should handle multiple security schemes', () => {
|
||||||
|
const endpointPath = '/multi-auth';
|
||||||
|
const method = 'get';
|
||||||
|
const request = {
|
||||||
|
query: {},
|
||||||
|
body: null,
|
||||||
|
contentType: 'application/json',
|
||||||
|
headers: {
|
||||||
|
'X-API-Key': 'test-api-key',
|
||||||
|
'Authorization': 'Bearer test-token'
|
||||||
|
},
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
type: 'apiKey',
|
||||||
|
name: 'X-API-Key',
|
||||||
|
in: 'header'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'http',
|
||||||
|
scheme: 'bearer'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
const response = {
|
||||||
|
status: 200,
|
||||||
|
body: { success: true },
|
||||||
|
contentType: 'application/json'
|
||||||
|
};
|
||||||
|
openApiStore.recordEndpoint(endpointPath, method, request, response);
|
||||||
|
const spec = openApiStore.getOpenAPISpec();
|
||||||
|
const paths = spec.paths;
|
||||||
|
const operation = paths[endpointPath]?.[method];
|
||||||
|
expect(operation.security).toBeDefined();
|
||||||
|
expect(operation.security).toHaveLength(2);
|
||||||
|
expect(operation.security?.[0]).toHaveProperty('apiKey_');
|
||||||
|
expect(operation.security?.[1]).toHaveProperty('http_');
|
||||||
|
// Check HAR entry
|
||||||
|
const har = openApiStore.generateHAR();
|
||||||
|
const entry = har.log.entries[0];
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'x-api-key',
|
||||||
|
value: 'test-api-key'
|
||||||
|
});
|
||||||
|
expect(entry.request.headers).toContainEqual({
|
||||||
|
name: 'authorization',
|
||||||
|
value: 'Bearer test-token'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=openApiStore.test.js.map
|
||||||
1
dist/store/__tests__/openApiStore.test.js.map
vendored
Normal file
1
dist/store/__tests__/openApiStore.test.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
63
dist/store/openApiStore.d.ts
vendored
Normal file
63
dist/store/openApiStore.d.ts
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import type { OpenAPIV3_1 } from 'openapi-types';
|
||||||
|
interface SecurityInfo {
|
||||||
|
type: 'apiKey' | 'oauth2' | 'http' | 'openIdConnect';
|
||||||
|
scheme?: 'bearer' | 'basic' | 'digest';
|
||||||
|
name?: string;
|
||||||
|
in?: 'header' | 'query' | 'cookie';
|
||||||
|
flows?: {
|
||||||
|
implicit?: {
|
||||||
|
authorizationUrl: string;
|
||||||
|
scopes: Record<string, string>;
|
||||||
|
};
|
||||||
|
authorizationCode?: {
|
||||||
|
authorizationUrl: string;
|
||||||
|
tokenUrl: string;
|
||||||
|
scopes: Record<string, string>;
|
||||||
|
};
|
||||||
|
clientCredentials?: {
|
||||||
|
tokenUrl: string;
|
||||||
|
scopes: Record<string, string>;
|
||||||
|
};
|
||||||
|
password?: {
|
||||||
|
tokenUrl: string;
|
||||||
|
scopes: Record<string, string>;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
openIdConnectUrl?: string;
|
||||||
|
}
|
||||||
|
interface RequestInfo {
|
||||||
|
query: Record<string, string>;
|
||||||
|
body: any;
|
||||||
|
contentType: string;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
security?: SecurityInfo[];
|
||||||
|
}
|
||||||
|
interface ResponseInfo {
|
||||||
|
status: number;
|
||||||
|
body: any;
|
||||||
|
contentType: string;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
}
|
||||||
|
declare class OpenAPIStore {
|
||||||
|
private endpoints;
|
||||||
|
private harEntries;
|
||||||
|
private targetUrl;
|
||||||
|
private examples;
|
||||||
|
private schemaCache;
|
||||||
|
private securitySchemes;
|
||||||
|
constructor(targetUrl?: string);
|
||||||
|
setTargetUrl(url: string): void;
|
||||||
|
clear(): void;
|
||||||
|
private deepMergeSchemas;
|
||||||
|
private generateJsonSchema;
|
||||||
|
private recordHAREntry;
|
||||||
|
private buildQueryString;
|
||||||
|
private addSecurityScheme;
|
||||||
|
recordEndpoint(path: string, method: string, request: RequestInfo, response: ResponseInfo): void;
|
||||||
|
getOpenAPISpec(): OpenAPIV3_1.Document;
|
||||||
|
getOpenAPISpecAsYAML(): string;
|
||||||
|
saveOpenAPISpec(outputDir: string): void;
|
||||||
|
generateHAR(): any;
|
||||||
|
}
|
||||||
|
export declare const openApiStore: OpenAPIStore;
|
||||||
|
export {};
|
||||||
418
dist/store/openApiStore.js
vendored
Normal file
418
dist/store/openApiStore.js
vendored
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { stringify } from 'yaml';
|
||||||
|
class OpenAPIStore {
|
||||||
|
endpoints;
|
||||||
|
harEntries;
|
||||||
|
targetUrl;
|
||||||
|
examples;
|
||||||
|
schemaCache;
|
||||||
|
securitySchemes;
|
||||||
|
constructor(targetUrl = 'http://localhost:8080') {
|
||||||
|
this.endpoints = new Map();
|
||||||
|
this.harEntries = [];
|
||||||
|
this.targetUrl = targetUrl;
|
||||||
|
this.examples = new Map();
|
||||||
|
this.schemaCache = new Map();
|
||||||
|
this.securitySchemes = new Map();
|
||||||
|
}
|
||||||
|
setTargetUrl(url) {
|
||||||
|
this.targetUrl = url;
|
||||||
|
}
|
||||||
|
clear() {
|
||||||
|
this.endpoints.clear();
|
||||||
|
this.harEntries = [];
|
||||||
|
this.examples.clear();
|
||||||
|
}
|
||||||
|
deepMergeSchemas(schemas) {
|
||||||
|
if (schemas.length === 0)
|
||||||
|
return { type: 'object' };
|
||||||
|
if (schemas.length === 1)
|
||||||
|
return schemas[0];
|
||||||
|
// If all schemas are objects, merge their properties
|
||||||
|
if (schemas.every(s => s.type === 'object')) {
|
||||||
|
const mergedProperties = {};
|
||||||
|
const mergedRequired = [];
|
||||||
|
schemas.forEach(schema => {
|
||||||
|
if (schema.properties) {
|
||||||
|
Object.entries(schema.properties).forEach(([key, value]) => {
|
||||||
|
if (!mergedProperties[key]) {
|
||||||
|
mergedProperties[key] = value;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// If property exists, merge its schemas
|
||||||
|
mergedProperties[key] = this.deepMergeSchemas([mergedProperties[key], value]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
type: 'object',
|
||||||
|
properties: mergedProperties
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// If schemas are different types, use oneOf with unique schemas
|
||||||
|
const uniqueSchemas = schemas.filter((schema, index, self) => index === self.findIndex(s => JSON.stringify(s) === JSON.stringify(schema)));
|
||||||
|
if (uniqueSchemas.length === 1) {
|
||||||
|
return uniqueSchemas[0];
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
type: 'object',
|
||||||
|
oneOf: uniqueSchemas
|
||||||
|
};
|
||||||
|
}
|
||||||
|
generateJsonSchema(obj) {
|
||||||
|
if (obj === null)
|
||||||
|
return { type: 'null' };
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
if (obj.length === 0)
|
||||||
|
return { type: 'array', items: { type: 'object' } };
|
||||||
|
// Generate schemas for all items
|
||||||
|
const itemSchemas = obj.map(item => this.generateJsonSchema(item));
|
||||||
|
// If all items have the same schema, use that
|
||||||
|
if (itemSchemas.every(s => JSON.stringify(s) === JSON.stringify(itemSchemas[0]))) {
|
||||||
|
return {
|
||||||
|
type: 'array',
|
||||||
|
items: itemSchemas[0],
|
||||||
|
example: obj
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// If items have different schemas, use oneOf
|
||||||
|
return {
|
||||||
|
type: 'array',
|
||||||
|
items: {
|
||||||
|
type: 'object',
|
||||||
|
oneOf: itemSchemas
|
||||||
|
},
|
||||||
|
example: obj
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (typeof obj === 'object') {
|
||||||
|
const properties = {};
|
||||||
|
for (const [key, value] of Object.entries(obj)) {
|
||||||
|
properties[key] = this.generateJsonSchema(value);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
type: 'object',
|
||||||
|
properties,
|
||||||
|
example: obj
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Map JavaScript types to OpenAPI types
|
||||||
|
const typeMap = {
|
||||||
|
'string': 'string',
|
||||||
|
'number': 'number',
|
||||||
|
'boolean': 'boolean',
|
||||||
|
'bigint': 'integer',
|
||||||
|
'symbol': 'string',
|
||||||
|
'undefined': 'string',
|
||||||
|
'function': 'string'
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
type: typeMap[typeof obj] || 'string',
|
||||||
|
example: obj
|
||||||
|
};
|
||||||
|
}
|
||||||
|
recordHAREntry(path, method, request, response) {
|
||||||
|
const now = new Date();
|
||||||
|
const entry = {
|
||||||
|
startedDateTime: now.toISOString(),
|
||||||
|
time: 0,
|
||||||
|
request: {
|
||||||
|
method: method.toUpperCase(),
|
||||||
|
url: `${this.targetUrl}${path}${this.buildQueryString(request.query)}`,
|
||||||
|
httpVersion: 'HTTP/1.1',
|
||||||
|
headers: Object.entries(request.headers || {})
|
||||||
|
.map(([name, value]) => ({
|
||||||
|
name: name.toLowerCase(), // Normalize header names
|
||||||
|
value: String(value) // Ensure value is a string
|
||||||
|
})),
|
||||||
|
queryString: Object.entries(request.query || {})
|
||||||
|
.map(([name, value]) => ({
|
||||||
|
name,
|
||||||
|
value: String(value) // Ensure value is a string
|
||||||
|
})),
|
||||||
|
postData: request.body ? {
|
||||||
|
mimeType: request.contentType,
|
||||||
|
text: JSON.stringify(request.body)
|
||||||
|
} : undefined
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.status === 200 ? 'OK' : 'Error',
|
||||||
|
httpVersion: 'HTTP/1.1',
|
||||||
|
headers: Object.entries(response.headers || {})
|
||||||
|
.map(([name, value]) => ({
|
||||||
|
name: name.toLowerCase(), // Normalize header names
|
||||||
|
value: String(value) // Ensure value is a string
|
||||||
|
})),
|
||||||
|
content: {
|
||||||
|
size: response.body ? JSON.stringify(response.body).length : 0,
|
||||||
|
mimeType: response.contentType || 'application/json',
|
||||||
|
text: response.body ? JSON.stringify(response.body) : ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
this.harEntries.push(entry);
|
||||||
|
}
|
||||||
|
buildQueryString(query) {
|
||||||
|
if (!query || Object.keys(query).length === 0) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
Object.entries(query).forEach(([key, value]) => {
|
||||||
|
params.append(key, value);
|
||||||
|
});
|
||||||
|
return `?${params.toString()}`;
|
||||||
|
}
|
||||||
|
addSecurityScheme(security) {
|
||||||
|
// Use a consistent name based on the type with an underscore suffix
|
||||||
|
const schemeName = `${security.type}_`;
|
||||||
|
let scheme;
|
||||||
|
switch (security.type) {
|
||||||
|
case 'apiKey':
|
||||||
|
scheme = {
|
||||||
|
type: 'apiKey',
|
||||||
|
name: security.name || 'X-API-Key',
|
||||||
|
in: security.in || 'header'
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
case 'oauth2':
|
||||||
|
scheme = {
|
||||||
|
type: 'oauth2',
|
||||||
|
flows: security.flows || {
|
||||||
|
implicit: {
|
||||||
|
authorizationUrl: 'https://example.com/oauth/authorize',
|
||||||
|
scopes: {
|
||||||
|
'read': 'Read access',
|
||||||
|
'write': 'Write access'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
case 'http':
|
||||||
|
scheme = {
|
||||||
|
type: 'http',
|
||||||
|
scheme: security.scheme || 'bearer'
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
case 'openIdConnect':
|
||||||
|
scheme = {
|
||||||
|
type: 'openIdConnect',
|
||||||
|
openIdConnectUrl: security.openIdConnectUrl || 'https://example.com/.well-known/openid-configuration'
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported security type: ${security.type}`);
|
||||||
|
}
|
||||||
|
this.securitySchemes.set(schemeName, scheme);
|
||||||
|
return schemeName;
|
||||||
|
}
|
||||||
|
recordEndpoint(path, method, request, response) {
|
||||||
|
const key = `${method}:${path}`;
|
||||||
|
const endpoint = this.endpoints.get(key) || {
|
||||||
|
path,
|
||||||
|
method,
|
||||||
|
responses: {},
|
||||||
|
parameters: [],
|
||||||
|
requestBody: method.toLowerCase() === 'get' ? undefined : {
|
||||||
|
required: false,
|
||||||
|
content: {}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Add security schemes if present
|
||||||
|
if (request.security) {
|
||||||
|
endpoint.security = request.security.map(security => {
|
||||||
|
const schemeName = this.addSecurityScheme(security);
|
||||||
|
return { [schemeName]: ['read'] }; // Add default scope
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Convert path parameters to OpenAPI format
|
||||||
|
const openApiPath = path.replace(/:(\w+)/g, '{$1}');
|
||||||
|
// Add path parameters
|
||||||
|
const pathParams = path.match(/:(\w+)/g) || [];
|
||||||
|
pathParams.forEach(param => {
|
||||||
|
const paramName = param.slice(1);
|
||||||
|
if (!endpoint.parameters.some(p => p.name === paramName)) {
|
||||||
|
endpoint.parameters.push({
|
||||||
|
name: paramName,
|
||||||
|
in: 'path',
|
||||||
|
required: true,
|
||||||
|
schema: {
|
||||||
|
type: 'string',
|
||||||
|
example: paramName // Use the parameter name as an example
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Add query parameters and headers
|
||||||
|
Object.entries(request.query).forEach(([key, value]) => {
|
||||||
|
if (!endpoint.parameters.some(p => p.name === key)) {
|
||||||
|
endpoint.parameters.push({
|
||||||
|
name: key,
|
||||||
|
in: 'query',
|
||||||
|
required: false,
|
||||||
|
schema: {
|
||||||
|
type: 'string',
|
||||||
|
example: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Add request headers as parameters
|
||||||
|
if (request.headers) {
|
||||||
|
Object.entries(request.headers).forEach(([name, value]) => {
|
||||||
|
if (!endpoint.parameters.some(p => p.name === name)) {
|
||||||
|
endpoint.parameters.push({
|
||||||
|
name: name,
|
||||||
|
in: 'header',
|
||||||
|
required: false,
|
||||||
|
schema: {
|
||||||
|
type: 'string',
|
||||||
|
example: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Add request body schema if present and not a GET request
|
||||||
|
if (request.body && method.toLowerCase() !== 'get') {
|
||||||
|
const contentType = request.contentType || 'application/json';
|
||||||
|
if (endpoint.requestBody && !endpoint.requestBody.content[contentType]) {
|
||||||
|
const schema = this.generateJsonSchema(request.body);
|
||||||
|
endpoint.requestBody.content[contentType] = {
|
||||||
|
schema
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Add response schema
|
||||||
|
const responseContentType = response.contentType || 'application/json';
|
||||||
|
// Initialize response object if it doesn't exist
|
||||||
|
if (!endpoint.responses[response.status]) {
|
||||||
|
endpoint.responses[response.status] = {
|
||||||
|
description: `Response for ${method.toUpperCase()} ${path}`,
|
||||||
|
content: {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Ensure content object exists
|
||||||
|
const responseObj = endpoint.responses[response.status];
|
||||||
|
if (!responseObj.content) {
|
||||||
|
responseObj.content = {};
|
||||||
|
}
|
||||||
|
// Generate schema for the current response
|
||||||
|
const currentSchema = this.generateJsonSchema(response.body);
|
||||||
|
// Get existing schemas for this endpoint and status code
|
||||||
|
const schemaKey = `${key}:${response.status}:${responseContentType}`;
|
||||||
|
const existingSchemas = this.schemaCache.get(schemaKey) || [];
|
||||||
|
// Add the current schema to the cache
|
||||||
|
existingSchemas.push(currentSchema);
|
||||||
|
this.schemaCache.set(schemaKey, existingSchemas);
|
||||||
|
// Merge all schemas for this endpoint and status code
|
||||||
|
const mergedSchema = this.deepMergeSchemas(existingSchemas);
|
||||||
|
// Update the content with the merged schema
|
||||||
|
responseObj.content[responseContentType] = {
|
||||||
|
schema: mergedSchema
|
||||||
|
};
|
||||||
|
// Add response headers
|
||||||
|
if (response.headers && Object.keys(response.headers).length > 0) {
|
||||||
|
endpoint.responses[response.status].headers = Object.entries(response.headers).reduce((acc, [name, value]) => {
|
||||||
|
acc[name] = {
|
||||||
|
schema: {
|
||||||
|
type: 'string',
|
||||||
|
example: value
|
||||||
|
},
|
||||||
|
description: `Response header ${name}`
|
||||||
|
};
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
this.endpoints.set(key, endpoint);
|
||||||
|
// Record in HAR
|
||||||
|
this.recordHAREntry(path, method, request, response);
|
||||||
|
}
|
||||||
|
getOpenAPISpec() {
|
||||||
|
const paths = Array.from(this.endpoints.entries()).reduce((acc, [key, info]) => {
|
||||||
|
const [method, path] = key.split(':');
|
||||||
|
if (!acc[path]) {
|
||||||
|
acc[path] = {};
|
||||||
|
}
|
||||||
|
const operation = {
|
||||||
|
summary: `${method.toUpperCase()} ${path}`,
|
||||||
|
responses: info.responses,
|
||||||
|
};
|
||||||
|
// Only include parameters if there are any
|
||||||
|
if (info.parameters.length > 0) {
|
||||||
|
operation.parameters = info.parameters;
|
||||||
|
}
|
||||||
|
// Only include requestBody if it exists
|
||||||
|
if (info.requestBody) {
|
||||||
|
operation.requestBody = info.requestBody;
|
||||||
|
}
|
||||||
|
// Add security if it exists
|
||||||
|
if (info.security) {
|
||||||
|
operation.security = info.security;
|
||||||
|
}
|
||||||
|
acc[path][method] = operation;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
const spec = {
|
||||||
|
openapi: '3.1.0',
|
||||||
|
info: {
|
||||||
|
title: 'Generated API Documentation',
|
||||||
|
version: '1.0.0',
|
||||||
|
description: 'Automatically generated API documentation from proxy traffic',
|
||||||
|
},
|
||||||
|
servers: [{
|
||||||
|
url: this.targetUrl,
|
||||||
|
}],
|
||||||
|
paths
|
||||||
|
};
|
||||||
|
// Only add components if there are security schemes
|
||||||
|
if (this.securitySchemes.size > 0) {
|
||||||
|
if (!spec.components) {
|
||||||
|
spec.components = {};
|
||||||
|
}
|
||||||
|
if (!spec.components.securitySchemes) {
|
||||||
|
spec.components.securitySchemes = {};
|
||||||
|
}
|
||||||
|
spec.components.securitySchemes = Object.fromEntries(this.securitySchemes);
|
||||||
|
}
|
||||||
|
return spec;
|
||||||
|
}
|
||||||
|
getOpenAPISpecAsYAML() {
|
||||||
|
const spec = this.getOpenAPISpec();
|
||||||
|
return stringify(spec, {
|
||||||
|
indent: 2,
|
||||||
|
simpleKeys: true,
|
||||||
|
aliasDuplicateObjects: false,
|
||||||
|
strict: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
saveOpenAPISpec(outputDir) {
|
||||||
|
const spec = this.getOpenAPISpec();
|
||||||
|
const yamlSpec = this.getOpenAPISpecAsYAML();
|
||||||
|
// Ensure output directory exists
|
||||||
|
if (!fs.existsSync(outputDir)) {
|
||||||
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
|
}
|
||||||
|
// Save JSON spec
|
||||||
|
fs.writeFileSync(path.join(outputDir, 'openapi.json'), JSON.stringify(spec, null, 2));
|
||||||
|
// Save YAML spec
|
||||||
|
fs.writeFileSync(path.join(outputDir, 'openapi.yaml'), yamlSpec);
|
||||||
|
}
|
||||||
|
generateHAR() {
|
||||||
|
return {
|
||||||
|
log: {
|
||||||
|
version: '1.2',
|
||||||
|
creator: {
|
||||||
|
name: 'Arbiter',
|
||||||
|
version: '1.0.0',
|
||||||
|
},
|
||||||
|
entries: this.harEntries,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const openApiStore = new OpenAPIStore();
|
||||||
|
//# sourceMappingURL=openApiStore.js.map
|
||||||
1
dist/store/openApiStore.js.map
vendored
Normal file
1
dist/store/openApiStore.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
225
integration/__tests__/proxy.test.ts
Normal file
225
integration/__tests__/proxy.test.ts
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||||
|
import { Hono } from 'hono';
|
||||||
|
import { serve } from '@hono/node-server';
|
||||||
|
import { startServers } from '../../src/server.js';
|
||||||
|
import fetch, { RequestInit } from 'node-fetch';
|
||||||
|
import { OpenAPIV3_1 } from 'openapi-types';
|
||||||
|
|
||||||
|
interface HAREntry {
|
||||||
|
request: {
|
||||||
|
method: string;
|
||||||
|
url: string;
|
||||||
|
headers: Array<{ name: string; value: string }>;
|
||||||
|
queryString: Array<{ name: string; value: string }>;
|
||||||
|
postData?: {
|
||||||
|
text: string;
|
||||||
|
mimeType: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
status: number;
|
||||||
|
headers: Array<{ name: string; value: string }>;
|
||||||
|
content: {
|
||||||
|
text: string;
|
||||||
|
mimeType: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface HAR {
|
||||||
|
log: {
|
||||||
|
entries: HAREntry[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface User {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Arbiter Integration Tests', () => {
|
||||||
|
const targetPort = 3001;
|
||||||
|
const proxyPort = 3002;
|
||||||
|
const docsPort = 3003;
|
||||||
|
|
||||||
|
let targetServer: any;
|
||||||
|
let proxyServer: any;
|
||||||
|
let docsServer: any;
|
||||||
|
|
||||||
|
// Create a mock target API
|
||||||
|
const targetApi = new Hono();
|
||||||
|
|
||||||
|
// Setup test endpoints
|
||||||
|
targetApi.get('/users', (c) => {
|
||||||
|
return c.json([
|
||||||
|
{ id: 1, name: 'John Doe' },
|
||||||
|
{ id: 2, name: 'Jane Smith' }
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
targetApi.post('/users', async (c) => {
|
||||||
|
const body = await c.req.json();
|
||||||
|
return c.json({ id: 3, ...body }, 201);
|
||||||
|
});
|
||||||
|
|
||||||
|
targetApi.get('/users/:id', (c) => {
|
||||||
|
const id = c.req.param('id');
|
||||||
|
return c.json({ id: parseInt(id), name: 'John Doe' });
|
||||||
|
});
|
||||||
|
|
||||||
|
targetApi.get('/secure', (c) => {
|
||||||
|
const apiKey = c.req.header('x-api-key');
|
||||||
|
if (apiKey !== 'test-key') {
|
||||||
|
return c.json({ error: 'Unauthorized' }, 401);
|
||||||
|
}
|
||||||
|
return c.json({ message: 'Secret data' });
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Start the target API server
|
||||||
|
targetServer = serve({
|
||||||
|
fetch: targetApi.fetch,
|
||||||
|
port: targetPort
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start Arbiter servers
|
||||||
|
const servers = await startServers({
|
||||||
|
target: `http://localhost:${targetPort}`,
|
||||||
|
proxyPort,
|
||||||
|
docsPort,
|
||||||
|
verbose: false
|
||||||
|
});
|
||||||
|
|
||||||
|
proxyServer = servers.proxyServer;
|
||||||
|
docsServer = servers.docsServer;
|
||||||
|
|
||||||
|
// Wait a bit to ensure servers are ready
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
targetServer?.close();
|
||||||
|
proxyServer?.close();
|
||||||
|
docsServer?.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should proxy basic GET request and record in HAR', async () => {
|
||||||
|
const response = await fetch(`http://localhost:${proxyPort}/users`);
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const users = await response.json() as User[];
|
||||||
|
expect(users).toHaveLength(2);
|
||||||
|
expect(users[0].name).toBe('John Doe');
|
||||||
|
|
||||||
|
// Check HAR recording
|
||||||
|
const harResponse = await fetch(`http://localhost:${docsPort}/har`);
|
||||||
|
const har = await harResponse.json() as HAR;
|
||||||
|
|
||||||
|
expect(har.log.entries).toHaveLength(1);
|
||||||
|
expect(har.log.entries[0].request.method).toBe('GET');
|
||||||
|
expect(har.log.entries[0].request.url).toBe(`http://localhost:${targetPort}/users`);
|
||||||
|
expect(har.log.entries[0].response.status).toBe(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should record POST request with body in HAR', async () => {
|
||||||
|
const response = await fetch(`http://localhost:${proxyPort}/users`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ name: 'Bob Wilson' })
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toBe(201);
|
||||||
|
const newUser = await response.json() as User;
|
||||||
|
expect(newUser.name).toBe('Bob Wilson');
|
||||||
|
|
||||||
|
// Check HAR recording
|
||||||
|
const harResponse = await fetch(`http://localhost:${docsPort}/har`);
|
||||||
|
const har = await harResponse.json() as HAR;
|
||||||
|
|
||||||
|
const postEntry = har.log.entries.find(e => e.request.method === 'POST');
|
||||||
|
expect(postEntry).toBeDefined();
|
||||||
|
expect(postEntry?.request.postData?.text).toBe(JSON.stringify({ name: 'Bob Wilson' }));
|
||||||
|
expect(postEntry?.response.status).toBe(201);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate OpenAPI spec with paths and schemas', async () => {
|
||||||
|
// Make some requests to generate OpenAPI spec
|
||||||
|
await fetch(`http://localhost:${proxyPort}/users`);
|
||||||
|
await fetch(`http://localhost:${proxyPort}/users/1`);
|
||||||
|
await fetch(`http://localhost:${proxyPort}/users`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ name: 'Test User' })
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get OpenAPI spec
|
||||||
|
const specResponse = await fetch(`http://localhost:${docsPort}/openapi.json`);
|
||||||
|
const spec = await specResponse.json() as OpenAPIV3_1.Document;
|
||||||
|
|
||||||
|
// Validate paths
|
||||||
|
expect(spec.paths?.['/users']).toBeDefined();
|
||||||
|
expect(spec.paths?.['/users']?.get).toBeDefined();
|
||||||
|
expect(spec.paths?.['/users']?.post).toBeDefined();
|
||||||
|
expect(spec.paths?.['/users/{id}']?.get).toBeDefined();
|
||||||
|
|
||||||
|
// Validate schemas
|
||||||
|
expect(spec.components?.schemas).toBeDefined();
|
||||||
|
const userSchema = spec.components?.schemas?.User as OpenAPIV3_1.SchemaObject;
|
||||||
|
expect(userSchema).toBeDefined();
|
||||||
|
expect(userSchema.properties?.id).toBeDefined();
|
||||||
|
expect(userSchema.properties?.name).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle query parameters', async () => {
|
||||||
|
await fetch(`http://localhost:${proxyPort}/users?limit=10&offset=0`);
|
||||||
|
|
||||||
|
const harResponse = await fetch(`http://localhost:${docsPort}/har`);
|
||||||
|
const har = await harResponse.json() as HAR;
|
||||||
|
|
||||||
|
const entry = har.log.entries.find(e => e.request.url.includes('?limit=10'));
|
||||||
|
expect(entry).toBeDefined();
|
||||||
|
expect(entry?.request.queryString).toEqual([
|
||||||
|
{ name: 'limit', value: '10' },
|
||||||
|
{ name: 'offset', value: '0' }
|
||||||
|
]);
|
||||||
|
|
||||||
|
const specResponse = await fetch(`http://localhost:${docsPort}/openapi.json`);
|
||||||
|
const spec = await specResponse.json() as OpenAPIV3_1.Document;
|
||||||
|
|
||||||
|
const parameters = spec.paths?.['/users']?.get?.parameters as OpenAPIV3_1.ParameterObject[];
|
||||||
|
expect(parameters).toBeDefined();
|
||||||
|
expect(parameters).toContainEqual({
|
||||||
|
name: 'limit',
|
||||||
|
in: 'query',
|
||||||
|
schema: { type: 'string' }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle security schemes', async () => {
|
||||||
|
await fetch(`http://localhost:${proxyPort}/secure`, {
|
||||||
|
headers: {
|
||||||
|
'x-api-key': 'test-key'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const specResponse = await fetch(`http://localhost:${docsPort}/openapi.json`);
|
||||||
|
const spec = await specResponse.json() as OpenAPIV3_1.Document;
|
||||||
|
|
||||||
|
// Check security scheme definition
|
||||||
|
expect(spec.components?.securitySchemes).toBeDefined();
|
||||||
|
const apiKeyAuth = spec.components?.securitySchemes?.apiKey_ as OpenAPIV3_1.ApiKeySecurityScheme;
|
||||||
|
expect(apiKeyAuth).toBeDefined();
|
||||||
|
expect(apiKeyAuth.type).toBe('apiKey');
|
||||||
|
expect(apiKeyAuth.in).toBe('header');
|
||||||
|
expect(apiKeyAuth.name).toBe('x-api-key');
|
||||||
|
|
||||||
|
// Check security requirement on endpoint
|
||||||
|
const securityRequirements = spec.paths?.['/secure']?.get?.security;
|
||||||
|
expect(securityRequirements).toBeDefined();
|
||||||
|
expect(securityRequirements).toContainEqual({
|
||||||
|
apiKey_: []
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../acorn/bin/acorn
|
||||||
1
node_modules/.bin/esbuild
generated
vendored
Symbolic link
1
node_modules/.bin/esbuild
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../esbuild/bin/esbuild
|
||||||
1
node_modules/.bin/mime
generated
vendored
Symbolic link
1
node_modules/.bin/mime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../mime/cli.js
|
||||||
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../mkdirp/bin/cmd.js
|
||||||
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../nanoid/bin/nanoid.cjs
|
||||||
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../which/bin/node-which
|
||||||
1
node_modules/.bin/parser
generated
vendored
Symbolic link
1
node_modules/.bin/parser
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../@babel/parser/bin/babel-parser.js
|
||||||
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../resolve/bin/resolve
|
||||||
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../rimraf/bin.js
|
||||||
1
node_modules/.bin/rollup
generated
vendored
Symbolic link
1
node_modules/.bin/rollup
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../rollup/dist/bin/rollup
|
||||||
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../semver/bin/semver.js
|
||||||
1
node_modules/.bin/tree-kill
generated
vendored
Symbolic link
1
node_modules/.bin/tree-kill
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../tree-kill/cli.js
|
||||||
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin.js
|
||||||
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin-cwd.js
|
||||||
1
node_modules/.bin/ts-node-dev
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-dev
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node-dev/lib/bin.js
|
||||||
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin-esm.js
|
||||||
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin-script.js
|
||||||
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin-transpile.js
|
||||||
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node/dist/bin-script-deprecated.js
|
||||||
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsc
|
||||||
1
node_modules/.bin/tsnd
generated
vendored
Symbolic link
1
node_modules/.bin/tsnd
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ts-node-dev/lib/bin.js
|
||||||
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsserver
|
||||||
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../uuid/dist/bin/uuid
|
||||||
1
node_modules/.bin/vite
generated
vendored
Symbolic link
1
node_modules/.bin/vite
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../vite/bin/vite.js
|
||||||
1
node_modules/.bin/vite-node
generated
vendored
Symbolic link
1
node_modules/.bin/vite-node
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../vite-node/vite-node.mjs
|
||||||
1
node_modules/.bin/vitest
generated
vendored
Symbolic link
1
node_modules/.bin/vitest
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../vitest/vitest.mjs
|
||||||
1
node_modules/.bin/why-is-node-running
generated
vendored
Symbolic link
1
node_modules/.bin/why-is-node-running
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../why-is-node-running/cli.js
|
||||||
1
node_modules/.bin/yaml
generated
vendored
Symbolic link
1
node_modules/.bin/yaml
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../yaml/bin.mjs
|
||||||
7238
node_modules/.package-lock.json
generated
vendored
Normal file
7238
node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/.vite/results.json
generated
vendored
Normal file
1
node_modules/.vite/results.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":"3.0.9","results":[[":src/store/__tests__/openApiStore.test.ts",{"duration":14.902851999999996,"failed":false}],[":integration/__tests__/proxy.test.ts",{"duration":1075.252637,"failed":false}],[":src/middleware/__tests__/harRecorder.test.ts",{"duration":8.526184,"failed":false}],[":src/__tests__/cli.test.ts",{"duration":4.952122999999972,"failed":false}]]}
|
||||||
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
218
node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
218
node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
# @ampproject/remapping
|
||||||
|
|
||||||
|
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||||
|
|
||||||
|
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||||
|
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||||
|
with webpack", all pointing to the correct location in your original source code.
|
||||||
|
|
||||||
|
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||||
|
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||||
|
transformations (think a find-and-replace).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @ampproject/remapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
function remapping(
|
||||||
|
map: SourceMap | SourceMap[],
|
||||||
|
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||||
|
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||||
|
): SourceMap;
|
||||||
|
|
||||||
|
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||||
|
// "source" location (where child sources are resolved relative to, or the location of original
|
||||||
|
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||||
|
// output sourcemap.
|
||||||
|
type LoaderContext = {
|
||||||
|
readonly importer: string;
|
||||||
|
readonly depth: number;
|
||||||
|
source: string;
|
||||||
|
content: string | null | undefined;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||||
|
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||||
|
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||||
|
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// Babel transformed "helloworld.js" into "transformed.js"
|
||||||
|
const transformedMap = JSON.stringify({
|
||||||
|
file: 'transformed.js',
|
||||||
|
// 1st column of 2nd line of output file translates into the 1st source
|
||||||
|
// file, line 3, column 2
|
||||||
|
mappings: ';CAEE',
|
||||||
|
sources: ['helloworld.js'],
|
||||||
|
version: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||||
|
const minifiedTransformedMap = JSON.stringify({
|
||||||
|
file: 'transformed.min.js',
|
||||||
|
// 0th column of 1st line of output file translates into the 1st source
|
||||||
|
// file, line 2, column 1.
|
||||||
|
mappings: 'AACC',
|
||||||
|
names: [],
|
||||||
|
sources: ['transformed.js'],
|
||||||
|
version: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
// The "transformed.js" file is an transformed file.
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// The root importer is empty.
|
||||||
|
console.assert(ctx.importer === '');
|
||||||
|
// The depth in the sourcemap tree we're currently loading.
|
||||||
|
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||||
|
console.assert(ctx.depth === 1);
|
||||||
|
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loader will be called to load transformedMap's source file pointers as well.
|
||||||
|
console.assert(file === 'helloworld.js');
|
||||||
|
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||||
|
console.assert(ctx.importer === 'transformed.js');
|
||||||
|
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||||
|
console.assert(ctx.depth === 2);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// file: 'transpiled.min.js',
|
||||||
|
// mappings: 'AAEE',
|
||||||
|
// sources: ['helloworld.js'],
|
||||||
|
// version: 3,
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `loader` will be called twice:
|
||||||
|
|
||||||
|
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||||
|
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||||
|
be traced through it into the source files it represents.
|
||||||
|
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||||
|
we return `null`.
|
||||||
|
|
||||||
|
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||||
|
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||||
|
column of the 2nd line of the file `helloworld.js`".
|
||||||
|
|
||||||
|
### Multiple transformations of a file
|
||||||
|
|
||||||
|
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||||
|
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||||
|
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||||
|
written as:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
[minifiedTransformedMap, transformedMap],
|
||||||
|
() => null
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// file: 'transpiled.min.js',
|
||||||
|
// mappings: 'AAEE',
|
||||||
|
// sources: ['helloworld.js'],
|
||||||
|
// version: 3,
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced control of the loading graph
|
||||||
|
|
||||||
|
#### `source`
|
||||||
|
|
||||||
|
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||||
|
for an original source file, it allows us to change the location to the original source regardless
|
||||||
|
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||||
|
relative resolving location for child sources of the loaded sourcemap.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||||
|
// source files are loaded, they will now be relative to `src/`.
|
||||||
|
ctx.source = 'src/transformed.js';
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.assert(file === 'src/helloworld.js');
|
||||||
|
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||||
|
// itself. This will be reflected in the remapped sourcemap.
|
||||||
|
ctx.source = 'src/nested/transformed.js';
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// …,
|
||||||
|
// sources: ['src/nested/helloworld.js'],
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
#### `content`
|
||||||
|
|
||||||
|
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||||
|
you to manually provide the source content of the original file regardless of whether the
|
||||||
|
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||||
|
the source content.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||||
|
// would not include any `sourcesContent` values.
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.assert(file === 'helloworld.js');
|
||||||
|
// We can read the file to provide the source content.
|
||||||
|
ctx.content = fs.readFileSync(file, 'utf8');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// …,
|
||||||
|
// sourcesContent: [
|
||||||
|
// 'console.log("Hello world!")',
|
||||||
|
// ],
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
#### excludeContent
|
||||||
|
|
||||||
|
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||||
|
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||||
|
the size out the sourcemap.
|
||||||
|
|
||||||
|
#### decodedMappings
|
||||||
|
|
||||||
|
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||||
|
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||||
|
encoding into a VLQ string.
|
||||||
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||||
|
const EMPTY_SOURCES = [];
|
||||||
|
function SegmentObject(source, line, column, name, content, ignore) {
|
||||||
|
return { source, line, column, name, content, ignore };
|
||||||
|
}
|
||||||
|
function Source(map, sources, source, content, ignore) {
|
||||||
|
return {
|
||||||
|
map,
|
||||||
|
sources,
|
||||||
|
source,
|
||||||
|
content,
|
||||||
|
ignore,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
function MapSource(map, sources) {
|
||||||
|
return Source(map, sources, '', null, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
function OriginalSource(source, content, ignore) {
|
||||||
|
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
function traceMappings(tree) {
|
||||||
|
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||||
|
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||||
|
const gen = new GenMapping({ file: tree.map.file });
|
||||||
|
const { sources: rootSources, map } = tree;
|
||||||
|
const rootNames = map.names;
|
||||||
|
const rootMappings = decodedMappings(map);
|
||||||
|
for (let i = 0; i < rootMappings.length; i++) {
|
||||||
|
const segments = rootMappings[i];
|
||||||
|
for (let j = 0; j < segments.length; j++) {
|
||||||
|
const segment = segments[j];
|
||||||
|
const genCol = segment[0];
|
||||||
|
let traced = SOURCELESS_MAPPING;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length !== 1) {
|
||||||
|
const source = rootSources[segment[1]];
|
||||||
|
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||||
|
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||||
|
// respective segment into an original source.
|
||||||
|
if (traced == null)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const { column, line, name, content, source, ignore } = traced;
|
||||||
|
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||||
|
if (source && content != null)
|
||||||
|
setSourceContent(gen, source, content);
|
||||||
|
if (ignore)
|
||||||
|
setIgnore(gen, source, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(source, line, column, name) {
|
||||||
|
if (!source.map) {
|
||||||
|
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||||
|
}
|
||||||
|
const segment = traceSegment(source.map, line, column);
|
||||||
|
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||||
|
if (segment == null)
|
||||||
|
return null;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length === 1)
|
||||||
|
return SOURCELESS_MAPPING;
|
||||||
|
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function asArray(value) {
|
||||||
|
if (Array.isArray(value))
|
||||||
|
return value;
|
||||||
|
return [value];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
function buildSourceMapTree(input, loader) {
|
||||||
|
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||||
|
const map = maps.pop();
|
||||||
|
for (let i = 0; i < maps.length; i++) {
|
||||||
|
if (maps[i].sources.length > 1) {
|
||||||
|
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||||
|
'Did you specify these with the most recent transformation maps first?');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let tree = build(map, loader, '', 0);
|
||||||
|
for (let i = maps.length - 1; i >= 0; i--) {
|
||||||
|
tree = MapSource(maps[i], [tree]);
|
||||||
|
}
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
function build(map, loader, importer, importerDepth) {
|
||||||
|
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||||
|
const depth = importerDepth + 1;
|
||||||
|
const children = resolvedSources.map((sourceFile, i) => {
|
||||||
|
// The loading context gives the loader more information about why this file is being loaded
|
||||||
|
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||||
|
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||||
|
// an unmodified source file.
|
||||||
|
const ctx = {
|
||||||
|
importer,
|
||||||
|
depth,
|
||||||
|
source: sourceFile || '',
|
||||||
|
content: undefined,
|
||||||
|
ignore: undefined,
|
||||||
|
};
|
||||||
|
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||||
|
// TODO: We should eventually support async loading of sourcemap files.
|
||||||
|
const sourceMap = loader(ctx.source, ctx);
|
||||||
|
const { source, content, ignore } = ctx;
|
||||||
|
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||||
|
if (sourceMap)
|
||||||
|
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||||
|
// Else, it's an unmodified source file.
|
||||||
|
// The contents of this unmodified source file can be overridden via the loader context,
|
||||||
|
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||||
|
// the importing sourcemap's `sourcesContent` field.
|
||||||
|
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||||
|
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||||
|
return OriginalSource(source, sourceContent, ignored);
|
||||||
|
});
|
||||||
|
return MapSource(map, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
class SourceMap {
|
||||||
|
constructor(map, options) {
|
||||||
|
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
||||||
|
this.version = out.version; // SourceMap spec says this should be first.
|
||||||
|
this.file = out.file;
|
||||||
|
this.mappings = out.mappings;
|
||||||
|
this.names = out.names;
|
||||||
|
this.ignoreList = out.ignoreList;
|
||||||
|
this.sourceRoot = out.sourceRoot;
|
||||||
|
this.sources = out.sources;
|
||||||
|
if (!options.excludeContent) {
|
||||||
|
this.sourcesContent = out.sourcesContent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
function remapping(input, loader, options) {
|
||||||
|
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||||
|
const tree = buildSourceMapTree(input, loader);
|
||||||
|
return new SourceMap(traceMappings(tree), opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { remapping as default };
|
||||||
|
//# sourceMappingURL=remapping.mjs.map
|
||||||
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||||
|
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||||
|
|
||||||
|
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||||
|
const EMPTY_SOURCES = [];
|
||||||
|
function SegmentObject(source, line, column, name, content, ignore) {
|
||||||
|
return { source, line, column, name, content, ignore };
|
||||||
|
}
|
||||||
|
function Source(map, sources, source, content, ignore) {
|
||||||
|
return {
|
||||||
|
map,
|
||||||
|
sources,
|
||||||
|
source,
|
||||||
|
content,
|
||||||
|
ignore,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
function MapSource(map, sources) {
|
||||||
|
return Source(map, sources, '', null, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
function OriginalSource(source, content, ignore) {
|
||||||
|
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
function traceMappings(tree) {
|
||||||
|
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||||
|
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||||
|
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||||
|
const { sources: rootSources, map } = tree;
|
||||||
|
const rootNames = map.names;
|
||||||
|
const rootMappings = traceMapping.decodedMappings(map);
|
||||||
|
for (let i = 0; i < rootMappings.length; i++) {
|
||||||
|
const segments = rootMappings[i];
|
||||||
|
for (let j = 0; j < segments.length; j++) {
|
||||||
|
const segment = segments[j];
|
||||||
|
const genCol = segment[0];
|
||||||
|
let traced = SOURCELESS_MAPPING;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length !== 1) {
|
||||||
|
const source = rootSources[segment[1]];
|
||||||
|
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||||
|
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||||
|
// respective segment into an original source.
|
||||||
|
if (traced == null)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const { column, line, name, content, source, ignore } = traced;
|
||||||
|
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||||
|
if (source && content != null)
|
||||||
|
genMapping.setSourceContent(gen, source, content);
|
||||||
|
if (ignore)
|
||||||
|
genMapping.setIgnore(gen, source, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(source, line, column, name) {
|
||||||
|
if (!source.map) {
|
||||||
|
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||||
|
}
|
||||||
|
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||||
|
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||||
|
if (segment == null)
|
||||||
|
return null;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length === 1)
|
||||||
|
return SOURCELESS_MAPPING;
|
||||||
|
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function asArray(value) {
|
||||||
|
if (Array.isArray(value))
|
||||||
|
return value;
|
||||||
|
return [value];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
function buildSourceMapTree(input, loader) {
|
||||||
|
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||||
|
const map = maps.pop();
|
||||||
|
for (let i = 0; i < maps.length; i++) {
|
||||||
|
if (maps[i].sources.length > 1) {
|
||||||
|
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||||
|
'Did you specify these with the most recent transformation maps first?');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let tree = build(map, loader, '', 0);
|
||||||
|
for (let i = maps.length - 1; i >= 0; i--) {
|
||||||
|
tree = MapSource(maps[i], [tree]);
|
||||||
|
}
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
function build(map, loader, importer, importerDepth) {
|
||||||
|
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||||
|
const depth = importerDepth + 1;
|
||||||
|
const children = resolvedSources.map((sourceFile, i) => {
|
||||||
|
// The loading context gives the loader more information about why this file is being loaded
|
||||||
|
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||||
|
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||||
|
// an unmodified source file.
|
||||||
|
const ctx = {
|
||||||
|
importer,
|
||||||
|
depth,
|
||||||
|
source: sourceFile || '',
|
||||||
|
content: undefined,
|
||||||
|
ignore: undefined,
|
||||||
|
};
|
||||||
|
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||||
|
// TODO: We should eventually support async loading of sourcemap files.
|
||||||
|
const sourceMap = loader(ctx.source, ctx);
|
||||||
|
const { source, content, ignore } = ctx;
|
||||||
|
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||||
|
if (sourceMap)
|
||||||
|
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||||
|
// Else, it's an unmodified source file.
|
||||||
|
// The contents of this unmodified source file can be overridden via the loader context,
|
||||||
|
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||||
|
// the importing sourcemap's `sourcesContent` field.
|
||||||
|
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||||
|
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||||
|
return OriginalSource(source, sourceContent, ignored);
|
||||||
|
});
|
||||||
|
return MapSource(map, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
class SourceMap {
|
||||||
|
constructor(map, options) {
|
||||||
|
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
|
||||||
|
this.version = out.version; // SourceMap spec says this should be first.
|
||||||
|
this.file = out.file;
|
||||||
|
this.mappings = out.mappings;
|
||||||
|
this.names = out.names;
|
||||||
|
this.ignoreList = out.ignoreList;
|
||||||
|
this.sourceRoot = out.sourceRoot;
|
||||||
|
this.sources = out.sources;
|
||||||
|
if (!options.excludeContent) {
|
||||||
|
this.sourcesContent = out.sourcesContent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
function remapping(input, loader, options) {
|
||||||
|
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||||
|
const tree = buildSourceMapTree(input, loader);
|
||||||
|
return new SourceMap(traceMappings(tree), opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
return remapping;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=remapping.umd.js.map
|
||||||
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||||
|
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
||||||
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import SourceMap from './source-map';
|
||||||
|
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||||
|
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||||
|
export type { SourceMap };
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
||||||
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||||
|
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||||
|
export declare type SourceMapSegmentObject = {
|
||||||
|
column: number;
|
||||||
|
line: number;
|
||||||
|
name: string;
|
||||||
|
source: string;
|
||||||
|
content: string | null;
|
||||||
|
ignore: boolean;
|
||||||
|
};
|
||||||
|
export declare type OriginalSource = {
|
||||||
|
map: null;
|
||||||
|
sources: Sources[];
|
||||||
|
source: string;
|
||||||
|
content: string | null;
|
||||||
|
ignore: boolean;
|
||||||
|
};
|
||||||
|
export declare type MapSource = {
|
||||||
|
map: TraceMap;
|
||||||
|
sources: Sources[];
|
||||||
|
source: string;
|
||||||
|
content: null;
|
||||||
|
ignore: false;
|
||||||
|
};
|
||||||
|
export declare type Sources = OriginalSource | MapSource;
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
||||||
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
export default class SourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||||
|
sourceRoot?: string;
|
||||||
|
names: string[];
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList: number[] | undefined;
|
||||||
|
constructor(map: GenMapping, options: Options);
|
||||||
|
toString(): string;
|
||||||
|
}
|
||||||
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||||
|
export type { SourceMapInput };
|
||||||
|
export declare type LoaderContext = {
|
||||||
|
readonly importer: string;
|
||||||
|
readonly depth: number;
|
||||||
|
source: string;
|
||||||
|
content: string | null | undefined;
|
||||||
|
ignore: boolean | undefined;
|
||||||
|
};
|
||||||
|
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||||
|
export declare type Options = {
|
||||||
|
excludeContent?: boolean;
|
||||||
|
decodedMappings?: boolean;
|
||||||
|
};
|
||||||
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
TraceMap,
|
||||||
|
originalPositionFor,
|
||||||
|
generatedPositionFor,
|
||||||
|
sourceContentFor,
|
||||||
|
isIgnored,
|
||||||
|
} from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['content of input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
ignoreList: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = sourceContentFor(tracer, traced.source);
|
||||||
|
assert.strictEqual(content, 'content for input.js');
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ignored = isIgnored(tracer, 'input.js');
|
||||||
|
assert.equal(ignored, false);
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 562400 bytes
|
||||||
|
trace-mapping encoded 5706544 bytes
|
||||||
|
source-map-js 10717664 bytes
|
||||||
|
source-map-0.6.1 17446384 bytes
|
||||||
|
source-map-0.8.0 9701757 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 89832 bytes
|
||||||
|
trace-mapping encoded 35474640 bytes
|
||||||
|
source-map-js 51257176 bytes
|
||||||
|
source-map-0.6.1 63515664 bytes
|
||||||
|
source-map-0.8.0 42933752 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 37128 bytes
|
||||||
|
trace-mapping encoded 247280 bytes
|
||||||
|
source-map-js 1143536 bytes
|
||||||
|
source-map-0.6.1 1290992 bytes
|
||||||
|
source-map-0.8.0 96544 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 16176 bytes
|
||||||
|
trace-mapping encoded 681552 bytes
|
||||||
|
source-map-js 2418352 bytes
|
||||||
|
source-map-0.6.1 2443672 bytes
|
||||||
|
source-map-0.8.0 111768 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
||||||
580
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,580 @@
|
|||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
||||||
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,600 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.AnyMap = AnyMap;
|
||||||
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
|
exports.TraceMap = TraceMap;
|
||||||
|
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||||
|
exports.decodedMap = decodedMap;
|
||||||
|
exports.decodedMappings = decodedMappings;
|
||||||
|
exports.eachMapping = eachMapping;
|
||||||
|
exports.encodedMap = encodedMap;
|
||||||
|
exports.encodedMappings = encodedMappings;
|
||||||
|
exports.generatedPositionFor = generatedPositionFor;
|
||||||
|
exports.isIgnored = isIgnored;
|
||||||
|
exports.originalPositionFor = originalPositionFor;
|
||||||
|
exports.presortedDecodedMap = presortedDecodedMap;
|
||||||
|
exports.sourceContentFor = sourceContentFor;
|
||||||
|
exports.traceSegment = traceSegment;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||||
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import { TraceMap } from './trace-mapping';
|
||||||
|
import type { SectionedSourceMapInput } from './types';
|
||||||
|
type AnyMap = {
|
||||||
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
};
|
||||||
|
export declare const AnyMap: AnyMap;
|
||||||
|
export {};
|
||||||
32
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
|
export type MemoState = {
|
||||||
|
lastKey: number;
|
||||||
|
lastNeedle: number;
|
||||||
|
lastIndex: number;
|
||||||
|
};
|
||||||
|
export declare let found: boolean;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||||
|
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function memoizedState(): MemoState;
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
||||||
7
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { MemoState } from './binary-search';
|
||||||
|
export type Source = {
|
||||||
|
__proto__: null;
|
||||||
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
|
};
|
||||||
|
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
||||||
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
||||||
2
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
||||||
16
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
type GeneratedColumn = number;
|
||||||
|
type SourcesIndex = number;
|
||||||
|
type SourceLine = number;
|
||||||
|
type SourceColumn = number;
|
||||||
|
type NamesIndex = number;
|
||||||
|
type GeneratedLine = number;
|
||||||
|
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export declare const REV_GENERATED_LINE = 1;
|
||||||
|
export declare const REV_GENERATED_COLUMN = 2;
|
||||||
|
export {};
|
||||||
4
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
export default function stripFilename(path: string | undefined | null): string;
|
||||||
79
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||||
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
export { AnyMap } from './any-map';
|
||||||
|
export declare class TraceMap implements SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
resolvedSources: string[];
|
||||||
|
private _encoded;
|
||||||
|
private _decoded;
|
||||||
|
private _decodedMemo;
|
||||||
|
private _bySources;
|
||||||
|
private _bySourceMemos;
|
||||||
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
||||||
99
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList?: number[];
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Section {
|
||||||
|
offset: {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||||
|
}
|
||||||
|
export interface SectionedSourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
sections: Section[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export type OriginalMapping = {
|
||||||
|
source: string | null;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export type InvalidOriginalMapping = {
|
||||||
|
source: null;
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
name: null;
|
||||||
|
};
|
||||||
|
export type GeneratedMapping = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
export type InvalidGeneratedMapping = {
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
};
|
||||||
|
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||||
|
export type XInput = {
|
||||||
|
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||||
|
};
|
||||||
|
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||||
|
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||||
|
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||||
|
sections: SectionXInput[];
|
||||||
|
};
|
||||||
|
export type SectionXInput = Omit<Section, 'map'> & {
|
||||||
|
map: SectionedSourceMapInput;
|
||||||
|
};
|
||||||
|
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||||
|
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||||
|
export type Needle = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type SourceNeedle = {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type EachMapping = {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: null;
|
||||||
|
originalLine: null;
|
||||||
|
originalColumn: null;
|
||||||
|
name: null;
|
||||||
|
} | {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: string | null;
|
||||||
|
originalLine: number;
|
||||||
|
originalColumn: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare abstract class SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: SourceMapV3['sources'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user