From a42b708dd0619a7b92a21b445c36c1be16137bf3 Mon Sep 17 00:00:00 2001 From: Luke Hagar Date: Wed, 13 Aug 2025 11:12:32 -0500 Subject: [PATCH] adding publishing, updating readme, updated CLI, added Sqlite --- .github/workflows/publish.yml | 67 ++++++++++++++++++ Dockerfile | 10 ++- README.md | 47 ++++++++++++- package.json | 3 +- src/cli.ts | 2 + src/server.ts | 126 ++++++++++++++++++++++++++++++++-- src/storage/index.ts | 15 ++++ src/storage/sqlite.ts | 96 ++++++++++++++++++++++++++ src/storage/types.ts | 16 +++++ src/store/openApiStore.ts | 11 +++ 10 files changed, 385 insertions(+), 8 deletions(-) create mode 100644 .github/workflows/publish.yml create mode 100644 src/storage/index.ts create mode 100644 src/storage/sqlite.ts create mode 100644 src/storage/types.ts diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..30f5888d --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,67 @@ +name: Publish to npm + +on: + push: + branches: + - main + workflow_dispatch: {} + +concurrency: + group: publish-npm + cancel-in-progress: false + +jobs: + publish: + name: Publish + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Use Node.js 20 + uses: actions/setup-node@v4 + with: + node-version: 20 + registry-url: 'https://registry.npmjs.org' + + - name: Install dependencies + run: npm ci + + - name: Run tests + run: npm test + + - name: Build + run: npm run build + + - name: Determine publish necessity + id: check + shell: bash + run: | + set -euo pipefail + PKG_NAME=$(node -p "require('./package.json').name") + PKG_VERSION=$(node -p "require('./package.json').version") + echo "Package: $PKG_NAME@$PKG_VERSION" + PUBLISHED_VERSION=$(npm view "$PKG_NAME" version || echo "0.0.0") + echo "Published: $PUBLISHED_VERSION" + if [ "$PKG_VERSION" = "$PUBLISHED_VERSION" ]; then + echo "should_publish=false" >> $GITHUB_OUTPUT + else + echo "should_publish=true" >> $GITHUB_OUTPUT + fi + + - name: Publish to npm + if: steps.check.outputs.should_publish == 'true' + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + # Enable provenance for npm (requires id-token permission) + npm publish --provenance --access public + + - name: Skip publish (version unchanged) + if: steps.check.outputs.should_publish != 'true' + run: echo "Skipping publish because version has not changed." + + diff --git a/Dockerfile b/Dockerfile index 1ff17973..7a3afee4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,17 @@ FROM node:20-slim +# Install build tools needed for better-sqlite3 +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 make g++ \ + && rm -rf /var/lib/apt/lists/* + WORKDIR /app # Copy package files COPY package*.json ./ # Install dependencies -RUN npm install +RUN npm install --omit=dev # Copy source code COPY . . @@ -17,5 +22,8 @@ RUN npm run build # Expose ports for proxy and docs servers EXPOSE 8080 9000 +# Persistent data directory +VOLUME ["/data"] + # Set default command CMD ["node", "dist/src/cli.js"] \ No newline at end of file diff --git a/README.md b/README.md index fdcf7e5f..c4710617 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,34 @@ Arbiter is a powerful API proxy and documentation generator that automatically c ## Getting Started +### Installation + +```bash +npm install -g arbiter +``` + +### Basic Usage + +Start Arbiter by pointing it to your target API: + +```bash +arbiter --target https://api.example.com +# with persistence +arbiter --target https://api.example.com --db-path ./arbiter.db +``` + +Then send requests through the proxy: + +```bash +curl http://localhost:8080/users +``` + +And view the automatically generated documentation: + +```bash +open http://localhost:9000/docs +``` + ### Docker Usage You can run Arbiter using Docker: @@ -27,8 +55,13 @@ You can run Arbiter using Docker: # Build the Docker image docker build -t arbiter . -# Run the container +# Run the container (ephemeral) docker run -p 8080:8080 -p 9000:9000 arbiter --target https://api.example.com + +# Run the container with persistent storage +docker run -p 8080:8080 -p 9000:9000 \ + -v $(pwd)/data:/data \ + arbiter --target https://api.example.com --db-path /data/arbiter.db ``` The container exposes: @@ -42,6 +75,7 @@ docker run -p 3000:3000 -p 3001:3001 arbiter \ --target https://api.example.com \ --port 3000 \ --docs-port 3001 \ + --db-path /data/arbiter.db \ --verbose ``` @@ -52,6 +86,7 @@ docker run -p 3000:3000 -p 3001:3001 arbiter \ | `-t, --target ` | Target API URL to proxy to | (required) | | `-p, --port ` | Port to run the proxy server on | 8080 | | `-d, --docs-port ` | Port to run the documentation server on | 9000 | +| `--db-path ` | Path to SQLite database file for persistence | (disabled) | | `--docs-only` | Run only the documentation server | false | | `--proxy-only` | Run only the proxy server | false | | `-v, --verbose` | Enable verbose logging | false | @@ -149,6 +184,16 @@ app.listen(3000); Contributions are welcome! Please feel free to submit a Pull Request. +## Publishing (maintainers) + +This repo auto-publishes to npm on push to `main` if the version in `package.json` is newer than the version on npm. + +Setup (one-time): +- Add a repository secret `NPM_TOKEN` with publish rights for the package. + +Manual run: +- You can also trigger the workflow manually from the Actions tab (workflow_dispatch). + ## License This project is licensed under the MIT License - see the LICENSE file for details. diff --git a/package.json b/package.json index e448d8a1..d32044fe 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,8 @@ "http-proxy-middleware": "^3.0.3", "serve-static": "^1.16.2", "swagger-ui-express": "^5.0.1", - "yaml": "^2.7.0" + "yaml": "^2.7.0", + "better-sqlite3": "^9.6.0" }, "devDependencies": { "@eslint/eslintrc": "^3.3.0", diff --git a/src/cli.ts b/src/cli.ts index 2c1fd560..f1eb4771 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -16,6 +16,7 @@ program .requiredOption('-t, --target ', 'target API URL to proxy to') .option('-p, --port ', 'port to run the proxy server on', '8080') .option('-d, --docs-port ', 'port to run the documentation server on', '9000') + .option('--db-path ', 'path to SQLite database file for persistence') .option('--docs-only', 'run only the documentation server') .option('--proxy-only', 'run only the proxy server') .option('-v, --verbose', 'enable verbose logging') @@ -29,6 +30,7 @@ startServers({ proxyPort: parseInt(options.port as string, 10), docsPort: parseInt(options.docsPort as string, 10), verbose: options.verbose as boolean, + dbPath: options.dbPath as string | undefined, }).catch((error: Error) => { console.error(chalk.red('Failed to start servers:'), error.message); process.exit(1); diff --git a/src/server.ts b/src/server.ts index f6974928..479037ee 100644 --- a/src/server.ts +++ b/src/server.ts @@ -3,7 +3,8 @@ import { createProxyMiddleware } from 'http-proxy-middleware'; import { createServer } from 'http'; import cors from 'cors'; import zlib from 'zlib'; -import { openApiStore } from './store/openApiStore.js'; +import { openApiStore, OpenAPIStore } from './store/openApiStore.js'; +import { initStorage, storage } from './storage/index.js'; import chalk from 'chalk'; import { IncomingMessage, ServerResponse } from 'http'; import type { SecurityInfo } from './store/openApiStore.js'; @@ -129,6 +130,7 @@ export interface ServerOptions { proxyPort: number; docsPort: number; verbose?: boolean; + dbPath?: string; } /** @@ -139,10 +141,38 @@ export async function startServers({ proxyPort, docsPort, verbose = false, + dbPath, }: ServerOptions): Promise<{ proxyServer: ReturnType; docsServer: ReturnType; }> { + // Initialize persistent storage if dbPath provided + if (dbPath) { + try { + await initStorage(dbPath); + if (verbose) console.log(`Initialized SQLite storage at ${dbPath}`); + // Hydrate OpenAPI store with persisted endpoints (minimal info) + const persisted = await storage().getAllEndpoints(); + for (const ep of persisted) { + try { + openApiStore.recordEndpoint( + ep.path, + ep.method.toLowerCase(), + ep.data.request, + { + status: ep.data.response?.status || 200, + headers: ep.data.response?.headers || {}, + contentType: ep.data.response?.contentType || 'application/json', + body: '[Raw data stored]', + rawData: Buffer.alloc(0), + } + ); + } catch {} + } + } catch (e) { + console.error('Failed to initialize storage:', e); + } + } // Set the target URL in the OpenAPI store openApiStore.setTargetUrl(target); @@ -368,8 +398,19 @@ export async function startServers({ _rawResponseBuffer: buffer, // Store for later processing if needed }; - // Add the HAR entry to the store + // Add the HAR entry to the store and persist if enabled harStore.addEntry(harEntry); + if (dbPath) { + storage().saveHarEntry({ + startedDateTime: harEntry.startedDateTime, + time: harEntry.time, + request: harEntry.request, + response: { + ...harEntry.response, + // Do not persist raw buffer reference + }, + }).catch(() => {}); + } // Extract security schemes from headers - minimal work const securitySchemes: SecurityInfo[] = []; @@ -415,6 +456,26 @@ export async function startServers({ } ); + // Persist endpoint minimal info for reconstruction + if (dbPath) { + storage().upsertEndpoint(path, method.toLowerCase(), { + path, + method: method.toLowerCase(), + request: { + query: queryParams, + headers: requestHeaders, + contentType: requestHeaders['content-type'] || 'application/json', + body: requestBody, + security: securitySchemes, + }, + response: { + status: proxyRes.statusCode || 500, + headers: responseHeaders, + contentType: responseHeaders['content-type'] || 'application/json', + }, + }).catch(() => {}); + } + if (verbose) { console.log(`${method} ${path} -> ${proxyRes.statusCode}`); } @@ -432,18 +493,73 @@ export async function startServers({ docsApp.use(cors()); // Create documentation endpoints - docsApp.get('/har', (req, res) => { + docsApp.get('/har', async (req, res) => { res.setHeader('Content-Type', 'application/json'); + if (dbPath) { + try { + const log = await storage().getHarLog(); + res.send(JSON.stringify(log)); + return; + } catch {} + } res.send(JSON.stringify(harStore.getHAR())); }); - docsApp.get('/openapi.json', (req, res) => { + docsApp.get('/openapi.json', async (req, res) => { res.setHeader('Content-Type', 'application/json'); + if (dbPath) { + try { + const persisted = await storage().getAllEndpoints(); + const tempStore = new OpenAPIStore(); + tempStore.setTargetUrl(target); + for (const ep of persisted) { + try { + tempStore.recordEndpoint( + ep.path, + ep.method.toLowerCase(), + ep.data.request, + { + status: ep.data.response?.status || 200, + headers: ep.data.response?.headers || {}, + contentType: ep.data.response?.contentType || 'application/json', + body: '[Raw data stored]' + } + ); + } catch {} + } + res.send(JSON.stringify(tempStore.getOpenAPISpec())); + return; + } catch {} + } res.send(JSON.stringify(openApiStore.getOpenAPISpec())); }); - docsApp.get('/openapi.yaml', (req, res) => { + docsApp.get('/openapi.yaml', async (req, res) => { res.setHeader('Content-Type', 'text/plain'); + if (dbPath) { + try { + const persisted = await storage().getAllEndpoints(); + const tempStore = new OpenAPIStore(); + tempStore.setTargetUrl(target); + for (const ep of persisted) { + try { + tempStore.recordEndpoint( + ep.path, + ep.method.toLowerCase(), + ep.data.request, + { + status: ep.data.response?.status || 200, + headers: ep.data.response?.headers || {}, + contentType: ep.data.response?.contentType || 'application/json', + body: '[Raw data stored]' + } + ); + } catch {} + } + res.send(tempStore.getOpenAPISpecAsYAML()); + return; + } catch {} + } res.send(openApiStore.getOpenAPISpecAsYAML()); }); diff --git a/src/storage/index.ts b/src/storage/index.ts new file mode 100644 index 00000000..79a354ff --- /dev/null +++ b/src/storage/index.ts @@ -0,0 +1,15 @@ +import { sqliteStorage } from './sqlite.js'; +import type { StorageAdapter } from './types.js'; + +let storageInstance: StorageAdapter = sqliteStorage; + +export async function initStorage(dbPath: string): Promise { + await storageInstance.init(dbPath); + return storageInstance; +} + +export function storage(): StorageAdapter { + return storageInstance; +} + + diff --git a/src/storage/sqlite.ts b/src/storage/sqlite.ts new file mode 100644 index 00000000..28e830b4 --- /dev/null +++ b/src/storage/sqlite.ts @@ -0,0 +1,96 @@ +import Database from 'better-sqlite3'; +import type { StorageAdapter } from './types.js'; + +export class SQLiteStorage implements StorageAdapter { + private db: Database.Database | null = null; + + async init(dbPath: string): Promise { + this.db = new Database(dbPath); + this.db.pragma('journal_mode = WAL'); + this.db.exec(` + CREATE TABLE IF NOT EXISTS har_entries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + startedDateTime TEXT NOT NULL, + time INTEGER NOT NULL, + request TEXT NOT NULL, + response TEXT NOT NULL + ); + CREATE INDEX IF NOT EXISTS idx_har_started ON har_entries(startedDateTime); + + CREATE TABLE IF NOT EXISTS endpoints ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + path TEXT NOT NULL, + method TEXT NOT NULL, + data TEXT NOT NULL, + UNIQUE(path, method) + ); + CREATE INDEX IF NOT EXISTS idx_endpoints_path_method ON endpoints(path, method); + `); + } + + isReady(): boolean { + return this.db !== null; + } + + async close(): Promise { + if (this.db) { + this.db.close(); + this.db = null; + } + } + + async saveHarEntry(entry: any): Promise { + if (!this.db) return; + const stmt = this.db.prepare( + 'INSERT INTO har_entries (startedDateTime, time, request, response) VALUES (?, ?, ?, ?)' + ); + stmt.run( + entry.startedDateTime, + entry.time, + JSON.stringify(entry.request), + JSON.stringify(entry.response) + ); + } + + async getHarLog(): Promise { + if (!this.db) return { log: { version: '1.2', creator: { name: 'Arbiter', version: '1.0.0' }, entries: [] } }; + const rows = this.db.prepare('SELECT startedDateTime, time, request, response FROM har_entries ORDER BY id ASC').all(); + const entries = rows.map((r) => ({ + startedDateTime: r.startedDateTime, + time: r.time, + request: JSON.parse(r.request), + response: JSON.parse(r.response), + })); + return { + log: { + version: '1.2', + creator: { name: 'Arbiter', version: '1.0.0' }, + entries, + }, + }; + } + + async clearHar(): Promise { + if (!this.db) return; + this.db.prepare('DELETE FROM har_entries').run(); + } + + async upsertEndpoint(path: string, method: string, data: any): Promise { + if (!this.db) return; + const stmt = this.db.prepare( + 'INSERT INTO endpoints (path, method, data) VALUES (?, ?, ?) + ON CONFLICT(path, method) DO UPDATE SET data=excluded.data' + ); + stmt.run(path, method.toLowerCase(), JSON.stringify(data)); + } + + async getAllEndpoints(): Promise> { + if (!this.db) return []; + const rows = this.db.prepare('SELECT path, method, data FROM endpoints').all(); + return rows.map((r) => ({ path: r.path, method: r.method, data: JSON.parse(r.data) })); + } +} + +export const sqliteStorage = new SQLiteStorage(); + + diff --git a/src/storage/types.ts b/src/storage/types.ts new file mode 100644 index 00000000..c95bc99d --- /dev/null +++ b/src/storage/types.ts @@ -0,0 +1,16 @@ +export interface StorageAdapter { + init(dbPath: string): Promise; + isReady(): boolean; + close(): Promise; + + // HAR persistence + saveHarEntry(entry: any): Promise; + getHarLog(): Promise; + clearHar(): Promise; + + // Endpoint persistence + upsertEndpoint(path: string, method: string, data: any): Promise; + getAllEndpoints(): Promise>; +} + + diff --git a/src/store/openApiStore.ts b/src/store/openApiStore.ts index 16344ffa..25c71871 100644 --- a/src/store/openApiStore.ts +++ b/src/store/openApiStore.ts @@ -144,6 +144,17 @@ export class OpenAPIStore { this.rawDataCache.clear(); } + // Persist/restore helpers + public getEndpoint(path: string, method: string): EndpointInfo | undefined { + const key = `${method.toLowerCase()} ${path}`; + return this.endpoints.get(key); + } + + public importEndpoint(path: string, method: string, data: EndpointInfo): void { + const key = `${method.toLowerCase()} ${path}`; + this.endpoints.set(key, data); + } + private deepMergeSchemas(schemas: OpenAPIV3_1.SchemaObject[]): OpenAPIV3_1.SchemaObject { if (schemas.length === 0) return { type: 'object' }; if (schemas.length === 1) return schemas[0];