mirror of
https://github.com/LukeHagar/arbiter.git
synced 2025-12-06 04:19:14 +00:00
adding publishing, updating readme, updated CLI, added Sqlite
This commit is contained in:
67
.github/workflows/publish.yml
vendored
Normal file
67
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Publish to npm
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch: {}
|
||||
|
||||
concurrency:
|
||||
group: publish-npm
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Determine publish necessity
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
PKG_NAME=$(node -p "require('./package.json').name")
|
||||
PKG_VERSION=$(node -p "require('./package.json').version")
|
||||
echo "Package: $PKG_NAME@$PKG_VERSION"
|
||||
PUBLISHED_VERSION=$(npm view "$PKG_NAME" version || echo "0.0.0")
|
||||
echo "Published: $PUBLISHED_VERSION"
|
||||
if [ "$PKG_VERSION" = "$PUBLISHED_VERSION" ]; then
|
||||
echo "should_publish=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should_publish=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Publish to npm
|
||||
if: steps.check.outputs.should_publish == 'true'
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: |
|
||||
# Enable provenance for npm (requires id-token permission)
|
||||
npm publish --provenance --access public
|
||||
|
||||
- name: Skip publish (version unchanged)
|
||||
if: steps.check.outputs.should_publish != 'true'
|
||||
run: echo "Skipping publish because version has not changed."
|
||||
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -1,12 +1,17 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# Install build tools needed for better-sqlite3
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 make g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm install
|
||||
RUN npm install --omit=dev
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
@@ -17,5 +22,8 @@ RUN npm run build
|
||||
# Expose ports for proxy and docs servers
|
||||
EXPOSE 8080 9000
|
||||
|
||||
# Persistent data directory
|
||||
VOLUME ["/data"]
|
||||
|
||||
# Set default command
|
||||
CMD ["node", "dist/src/cli.js"]
|
||||
47
README.md
47
README.md
@@ -19,6 +19,34 @@ Arbiter is a powerful API proxy and documentation generator that automatically c
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
npm install -g arbiter
|
||||
```
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Start Arbiter by pointing it to your target API:
|
||||
|
||||
```bash
|
||||
arbiter --target https://api.example.com
|
||||
# with persistence
|
||||
arbiter --target https://api.example.com --db-path ./arbiter.db
|
||||
```
|
||||
|
||||
Then send requests through the proxy:
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/users
|
||||
```
|
||||
|
||||
And view the automatically generated documentation:
|
||||
|
||||
```bash
|
||||
open http://localhost:9000/docs
|
||||
```
|
||||
|
||||
### Docker Usage
|
||||
|
||||
You can run Arbiter using Docker:
|
||||
@@ -27,8 +55,13 @@ You can run Arbiter using Docker:
|
||||
# Build the Docker image
|
||||
docker build -t arbiter .
|
||||
|
||||
# Run the container
|
||||
# Run the container (ephemeral)
|
||||
docker run -p 8080:8080 -p 9000:9000 arbiter --target https://api.example.com
|
||||
|
||||
# Run the container with persistent storage
|
||||
docker run -p 8080:8080 -p 9000:9000 \
|
||||
-v $(pwd)/data:/data \
|
||||
arbiter --target https://api.example.com --db-path /data/arbiter.db
|
||||
```
|
||||
|
||||
The container exposes:
|
||||
@@ -42,6 +75,7 @@ docker run -p 3000:3000 -p 3001:3001 arbiter \
|
||||
--target https://api.example.com \
|
||||
--port 3000 \
|
||||
--docs-port 3001 \
|
||||
--db-path /data/arbiter.db \
|
||||
--verbose
|
||||
```
|
||||
|
||||
@@ -52,6 +86,7 @@ docker run -p 3000:3000 -p 3001:3001 arbiter \
|
||||
| `-t, --target <url>` | Target API URL to proxy to | (required) |
|
||||
| `-p, --port <number>` | Port to run the proxy server on | 8080 |
|
||||
| `-d, --docs-port <number>` | Port to run the documentation server on | 9000 |
|
||||
| `--db-path <path>` | Path to SQLite database file for persistence | (disabled) |
|
||||
| `--docs-only` | Run only the documentation server | false |
|
||||
| `--proxy-only` | Run only the proxy server | false |
|
||||
| `-v, --verbose` | Enable verbose logging | false |
|
||||
@@ -149,6 +184,16 @@ app.listen(3000);
|
||||
|
||||
Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
## Publishing (maintainers)
|
||||
|
||||
This repo auto-publishes to npm on push to `main` if the version in `package.json` is newer than the version on npm.
|
||||
|
||||
Setup (one-time):
|
||||
- Add a repository secret `NPM_TOKEN` with publish rights for the package.
|
||||
|
||||
Manual run:
|
||||
- You can also trigger the workflow manually from the Actions tab (workflow_dispatch).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
"http-proxy-middleware": "^3.0.3",
|
||||
"serve-static": "^1.16.2",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"yaml": "^2.7.0"
|
||||
"yaml": "^2.7.0",
|
||||
"better-sqlite3": "^9.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.3.0",
|
||||
|
||||
@@ -16,6 +16,7 @@ program
|
||||
.requiredOption('-t, --target <url>', 'target API URL to proxy to')
|
||||
.option('-p, --port <number>', 'port to run the proxy server on', '8080')
|
||||
.option('-d, --docs-port <number>', 'port to run the documentation server on', '9000')
|
||||
.option('--db-path <path>', 'path to SQLite database file for persistence')
|
||||
.option('--docs-only', 'run only the documentation server')
|
||||
.option('--proxy-only', 'run only the proxy server')
|
||||
.option('-v, --verbose', 'enable verbose logging')
|
||||
@@ -29,6 +30,7 @@ startServers({
|
||||
proxyPort: parseInt(options.port as string, 10),
|
||||
docsPort: parseInt(options.docsPort as string, 10),
|
||||
verbose: options.verbose as boolean,
|
||||
dbPath: options.dbPath as string | undefined,
|
||||
}).catch((error: Error) => {
|
||||
console.error(chalk.red('Failed to start servers:'), error.message);
|
||||
process.exit(1);
|
||||
|
||||
126
src/server.ts
126
src/server.ts
@@ -3,7 +3,8 @@ import { createProxyMiddleware } from 'http-proxy-middleware';
|
||||
import { createServer } from 'http';
|
||||
import cors from 'cors';
|
||||
import zlib from 'zlib';
|
||||
import { openApiStore } from './store/openApiStore.js';
|
||||
import { openApiStore, OpenAPIStore } from './store/openApiStore.js';
|
||||
import { initStorage, storage } from './storage/index.js';
|
||||
import chalk from 'chalk';
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { SecurityInfo } from './store/openApiStore.js';
|
||||
@@ -129,6 +130,7 @@ export interface ServerOptions {
|
||||
proxyPort: number;
|
||||
docsPort: number;
|
||||
verbose?: boolean;
|
||||
dbPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -139,10 +141,38 @@ export async function startServers({
|
||||
proxyPort,
|
||||
docsPort,
|
||||
verbose = false,
|
||||
dbPath,
|
||||
}: ServerOptions): Promise<{
|
||||
proxyServer: ReturnType<typeof createServer>;
|
||||
docsServer: ReturnType<typeof createServer>;
|
||||
}> {
|
||||
// Initialize persistent storage if dbPath provided
|
||||
if (dbPath) {
|
||||
try {
|
||||
await initStorage(dbPath);
|
||||
if (verbose) console.log(`Initialized SQLite storage at ${dbPath}`);
|
||||
// Hydrate OpenAPI store with persisted endpoints (minimal info)
|
||||
const persisted = await storage().getAllEndpoints();
|
||||
for (const ep of persisted) {
|
||||
try {
|
||||
openApiStore.recordEndpoint(
|
||||
ep.path,
|
||||
ep.method.toLowerCase(),
|
||||
ep.data.request,
|
||||
{
|
||||
status: ep.data.response?.status || 200,
|
||||
headers: ep.data.response?.headers || {},
|
||||
contentType: ep.data.response?.contentType || 'application/json',
|
||||
body: '[Raw data stored]',
|
||||
rawData: Buffer.alloc(0),
|
||||
}
|
||||
);
|
||||
} catch {}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to initialize storage:', e);
|
||||
}
|
||||
}
|
||||
// Set the target URL in the OpenAPI store
|
||||
openApiStore.setTargetUrl(target);
|
||||
|
||||
@@ -368,8 +398,19 @@ export async function startServers({
|
||||
_rawResponseBuffer: buffer, // Store for later processing if needed
|
||||
};
|
||||
|
||||
// Add the HAR entry to the store
|
||||
// Add the HAR entry to the store and persist if enabled
|
||||
harStore.addEntry(harEntry);
|
||||
if (dbPath) {
|
||||
storage().saveHarEntry({
|
||||
startedDateTime: harEntry.startedDateTime,
|
||||
time: harEntry.time,
|
||||
request: harEntry.request,
|
||||
response: {
|
||||
...harEntry.response,
|
||||
// Do not persist raw buffer reference
|
||||
},
|
||||
}).catch(() => {});
|
||||
}
|
||||
|
||||
// Extract security schemes from headers - minimal work
|
||||
const securitySchemes: SecurityInfo[] = [];
|
||||
@@ -415,6 +456,26 @@ export async function startServers({
|
||||
}
|
||||
);
|
||||
|
||||
// Persist endpoint minimal info for reconstruction
|
||||
if (dbPath) {
|
||||
storage().upsertEndpoint(path, method.toLowerCase(), {
|
||||
path,
|
||||
method: method.toLowerCase(),
|
||||
request: {
|
||||
query: queryParams,
|
||||
headers: requestHeaders,
|
||||
contentType: requestHeaders['content-type'] || 'application/json',
|
||||
body: requestBody,
|
||||
security: securitySchemes,
|
||||
},
|
||||
response: {
|
||||
status: proxyRes.statusCode || 500,
|
||||
headers: responseHeaders,
|
||||
contentType: responseHeaders['content-type'] || 'application/json',
|
||||
},
|
||||
}).catch(() => {});
|
||||
}
|
||||
|
||||
if (verbose) {
|
||||
console.log(`${method} ${path} -> ${proxyRes.statusCode}`);
|
||||
}
|
||||
@@ -432,18 +493,73 @@ export async function startServers({
|
||||
docsApp.use(cors());
|
||||
|
||||
// Create documentation endpoints
|
||||
docsApp.get('/har', (req, res) => {
|
||||
docsApp.get('/har', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
if (dbPath) {
|
||||
try {
|
||||
const log = await storage().getHarLog();
|
||||
res.send(JSON.stringify(log));
|
||||
return;
|
||||
} catch {}
|
||||
}
|
||||
res.send(JSON.stringify(harStore.getHAR()));
|
||||
});
|
||||
|
||||
docsApp.get('/openapi.json', (req, res) => {
|
||||
docsApp.get('/openapi.json', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
if (dbPath) {
|
||||
try {
|
||||
const persisted = await storage().getAllEndpoints();
|
||||
const tempStore = new OpenAPIStore();
|
||||
tempStore.setTargetUrl(target);
|
||||
for (const ep of persisted) {
|
||||
try {
|
||||
tempStore.recordEndpoint(
|
||||
ep.path,
|
||||
ep.method.toLowerCase(),
|
||||
ep.data.request,
|
||||
{
|
||||
status: ep.data.response?.status || 200,
|
||||
headers: ep.data.response?.headers || {},
|
||||
contentType: ep.data.response?.contentType || 'application/json',
|
||||
body: '[Raw data stored]'
|
||||
}
|
||||
);
|
||||
} catch {}
|
||||
}
|
||||
res.send(JSON.stringify(tempStore.getOpenAPISpec()));
|
||||
return;
|
||||
} catch {}
|
||||
}
|
||||
res.send(JSON.stringify(openApiStore.getOpenAPISpec()));
|
||||
});
|
||||
|
||||
docsApp.get('/openapi.yaml', (req, res) => {
|
||||
docsApp.get('/openapi.yaml', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'text/plain');
|
||||
if (dbPath) {
|
||||
try {
|
||||
const persisted = await storage().getAllEndpoints();
|
||||
const tempStore = new OpenAPIStore();
|
||||
tempStore.setTargetUrl(target);
|
||||
for (const ep of persisted) {
|
||||
try {
|
||||
tempStore.recordEndpoint(
|
||||
ep.path,
|
||||
ep.method.toLowerCase(),
|
||||
ep.data.request,
|
||||
{
|
||||
status: ep.data.response?.status || 200,
|
||||
headers: ep.data.response?.headers || {},
|
||||
contentType: ep.data.response?.contentType || 'application/json',
|
||||
body: '[Raw data stored]'
|
||||
}
|
||||
);
|
||||
} catch {}
|
||||
}
|
||||
res.send(tempStore.getOpenAPISpecAsYAML());
|
||||
return;
|
||||
} catch {}
|
||||
}
|
||||
res.send(openApiStore.getOpenAPISpecAsYAML());
|
||||
});
|
||||
|
||||
|
||||
15
src/storage/index.ts
Normal file
15
src/storage/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { sqliteStorage } from './sqlite.js';
|
||||
import type { StorageAdapter } from './types.js';
|
||||
|
||||
let storageInstance: StorageAdapter = sqliteStorage;
|
||||
|
||||
export async function initStorage(dbPath: string): Promise<StorageAdapter> {
|
||||
await storageInstance.init(dbPath);
|
||||
return storageInstance;
|
||||
}
|
||||
|
||||
export function storage(): StorageAdapter {
|
||||
return storageInstance;
|
||||
}
|
||||
|
||||
|
||||
96
src/storage/sqlite.ts
Normal file
96
src/storage/sqlite.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import type { StorageAdapter } from './types.js';
|
||||
|
||||
export class SQLiteStorage implements StorageAdapter {
|
||||
private db: Database.Database | null = null;
|
||||
|
||||
async init(dbPath: string): Promise<void> {
|
||||
this.db = new Database(dbPath);
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
this.db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS har_entries (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
startedDateTime TEXT NOT NULL,
|
||||
time INTEGER NOT NULL,
|
||||
request TEXT NOT NULL,
|
||||
response TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_har_started ON har_entries(startedDateTime);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS endpoints (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
path TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
UNIQUE(path, method)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_endpoints_path_method ON endpoints(path, method);
|
||||
`);
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
return this.db !== null;
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
this.db = null;
|
||||
}
|
||||
}
|
||||
|
||||
async saveHarEntry(entry: any): Promise<void> {
|
||||
if (!this.db) return;
|
||||
const stmt = this.db.prepare(
|
||||
'INSERT INTO har_entries (startedDateTime, time, request, response) VALUES (?, ?, ?, ?)'
|
||||
);
|
||||
stmt.run(
|
||||
entry.startedDateTime,
|
||||
entry.time,
|
||||
JSON.stringify(entry.request),
|
||||
JSON.stringify(entry.response)
|
||||
);
|
||||
}
|
||||
|
||||
async getHarLog(): Promise<any> {
|
||||
if (!this.db) return { log: { version: '1.2', creator: { name: 'Arbiter', version: '1.0.0' }, entries: [] } };
|
||||
const rows = this.db.prepare('SELECT startedDateTime, time, request, response FROM har_entries ORDER BY id ASC').all();
|
||||
const entries = rows.map((r) => ({
|
||||
startedDateTime: r.startedDateTime,
|
||||
time: r.time,
|
||||
request: JSON.parse(r.request),
|
||||
response: JSON.parse(r.response),
|
||||
}));
|
||||
return {
|
||||
log: {
|
||||
version: '1.2',
|
||||
creator: { name: 'Arbiter', version: '1.0.0' },
|
||||
entries,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async clearHar(): Promise<void> {
|
||||
if (!this.db) return;
|
||||
this.db.prepare('DELETE FROM har_entries').run();
|
||||
}
|
||||
|
||||
async upsertEndpoint(path: string, method: string, data: any): Promise<void> {
|
||||
if (!this.db) return;
|
||||
const stmt = this.db.prepare(
|
||||
'INSERT INTO endpoints (path, method, data) VALUES (?, ?, ?)
|
||||
ON CONFLICT(path, method) DO UPDATE SET data=excluded.data'
|
||||
);
|
||||
stmt.run(path, method.toLowerCase(), JSON.stringify(data));
|
||||
}
|
||||
|
||||
async getAllEndpoints(): Promise<Array<{ path: string; method: string; data: any }>> {
|
||||
if (!this.db) return [];
|
||||
const rows = this.db.prepare('SELECT path, method, data FROM endpoints').all();
|
||||
return rows.map((r) => ({ path: r.path, method: r.method, data: JSON.parse(r.data) }));
|
||||
}
|
||||
}
|
||||
|
||||
export const sqliteStorage = new SQLiteStorage();
|
||||
|
||||
|
||||
16
src/storage/types.ts
Normal file
16
src/storage/types.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
export interface StorageAdapter {
|
||||
init(dbPath: string): Promise<void>;
|
||||
isReady(): boolean;
|
||||
close(): Promise<void>;
|
||||
|
||||
// HAR persistence
|
||||
saveHarEntry(entry: any): Promise<void>;
|
||||
getHarLog(): Promise<any>;
|
||||
clearHar(): Promise<void>;
|
||||
|
||||
// Endpoint persistence
|
||||
upsertEndpoint(path: string, method: string, data: any): Promise<void>;
|
||||
getAllEndpoints(): Promise<Array<{ path: string; method: string; data: any }>>;
|
||||
}
|
||||
|
||||
|
||||
@@ -144,6 +144,17 @@ export class OpenAPIStore {
|
||||
this.rawDataCache.clear();
|
||||
}
|
||||
|
||||
// Persist/restore helpers
|
||||
public getEndpoint(path: string, method: string): EndpointInfo | undefined {
|
||||
const key = `${method.toLowerCase()} ${path}`;
|
||||
return this.endpoints.get(key);
|
||||
}
|
||||
|
||||
public importEndpoint(path: string, method: string, data: EndpointInfo): void {
|
||||
const key = `${method.toLowerCase()} ${path}`;
|
||||
this.endpoints.set(key, data);
|
||||
}
|
||||
|
||||
private deepMergeSchemas(schemas: OpenAPIV3_1.SchemaObject[]): OpenAPIV3_1.SchemaObject {
|
||||
if (schemas.length === 0) return { type: 'object' };
|
||||
if (schemas.length === 1) return schemas[0];
|
||||
|
||||
Reference in New Issue
Block a user