mirror of
https://github.com/LukeHagar/redocly-cli.git
synced 2025-12-06 04:21:09 +00:00
chore: initial full rewrite commit
This commit is contained in:
11
.babelrc
11
.babelrc
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
["@babel/preset-env",
|
||||
{
|
||||
"targets": {
|
||||
"node": "current"
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
__tests__
|
||||
@@ -1,24 +0,0 @@
|
||||
env:
|
||||
browser: true
|
||||
es6: true
|
||||
node: true
|
||||
jest: true
|
||||
extends:
|
||||
- airbnb-base
|
||||
globals:
|
||||
Atomics: readonly
|
||||
SharedArrayBuffer: readonly
|
||||
parserOptions:
|
||||
ecmaVersion: 2018
|
||||
sourceType: module
|
||||
rules:
|
||||
class-methods-use-this: off
|
||||
max-len: [1, 120]
|
||||
global-require: off
|
||||
import/no-dynamic-require: off
|
||||
no-plusplus: off
|
||||
no-restricted-syntax: off
|
||||
no-console: off
|
||||
no-use-before-define: off
|
||||
no-await-in-loop: off
|
||||
|
||||
19
.github/workflows/unit-tests.yaml
vendored
Normal file
19
.github/workflows/unit-tests.yaml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Unit Tests
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build-and-unit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: cache node modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: npm-${{ hashFiles('package-lock.json') }}
|
||||
restore-keys: |
|
||||
npm-${{ hashFiles('package-lock.json') }}
|
||||
npm-
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,10 +1,10 @@
|
||||
.DS_Store
|
||||
|
||||
node_modules/
|
||||
|
||||
coverage/
|
||||
.vscode/
|
||||
.eslintcache
|
||||
test/specs/openapi/rebilly-full.yaml
|
||||
test/specs/openapi/rebilly-full (1).yaml
|
||||
|
||||
yarn.lock
|
||||
dist/
|
||||
test/
|
||||
lib/
|
||||
16
.npmignore
16
.npmignore
@@ -1,11 +1,5 @@
|
||||
node_modules/
|
||||
src/
|
||||
media/
|
||||
coverage/
|
||||
test/
|
||||
dist/__tests__
|
||||
dist/error/__tests__
|
||||
RULES.md
|
||||
README.md
|
||||
definitions/
|
||||
*/__tests__/*
|
||||
*
|
||||
!lib/*
|
||||
!package.json
|
||||
!README.md
|
||||
!LICENSE
|
||||
10
.travis.yml
10
.travis.yml
@@ -1,10 +0,0 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "12"
|
||||
cache:
|
||||
directories:
|
||||
- "node_modules"
|
||||
jobs:
|
||||
include:
|
||||
- stage: test
|
||||
script: npm run test
|
||||
171
RULES.md
171
RULES.md
@@ -2,173 +2,4 @@
|
||||
|
||||
All supported rules are listed below. To change your settings for any given rule, just add or modify a corresponding item in the `rules` section of the `.redocly.yaml` in your working directory.
|
||||
|
||||
### api-servers
|
||||
OpenAPI servers must be present and be a non-empty array.
|
||||
|
||||
### camel-case-names
|
||||
Schemas and parameters names should be in camelCase. This rule does a lot of string comparison and matching operations, so it may increase time of validation significantly.
|
||||
|
||||
### path-param-exists
|
||||
Each path parameter in the `parameters` section must be present in the path string.
|
||||
|
||||
### license-url
|
||||
License, if provided within the `info` section, must provide the `url` field.
|
||||
|
||||
### no-unused-schemas
|
||||
Unused schemas defined in `components` may indicate a mistake. This rule checks for that scenario.
|
||||
|
||||
### operation-2xx-response
|
||||
When designing an API it's usually expected to do something successfully, although it might fail. So, this rule validates, that there is at least one response in the operation with a 2xx status code.
|
||||
|
||||
### operation-description
|
||||
This rule enforces that a `description` field is included in `operation`s definitions.
|
||||
|
||||
### operation-operationId
|
||||
Enforce presence of the `operationId` field in each `operation`. This is a highly recommended practice.
|
||||
|
||||
### operation-operationId-unique
|
||||
The `operationId`s are expected to be unique to really identify operations. This rule checks this principle.
|
||||
|
||||
### operation-tags
|
||||
The `tags` field must be present and be a non-empty array in each `operation`.
|
||||
|
||||
### parameter-description
|
||||
The "parameter" object should contain "description" field.
|
||||
|
||||
### path-declarations-must-exist
|
||||
Define path parameters within the `operation` path definition. Each declaration of the parameter name within path must be a non-empty string. For example, `/api/user/{userId}/profie` is a valid definition with the `userId` parameter, but `/api/user/{}/profile` is not.
|
||||
|
||||
### path-keys-no-trailing-slash
|
||||
Endpoints are less confusing without trailing slashes in the path. Also, tooling may treat `example.com/api/users` and `example.com/api/users/` in the same way, so we suggest you be consistent in your API definition.
|
||||
|
||||
### provide-contact
|
||||
Info object must contain the `contact` field.
|
||||
|
||||
APIs are not perfect, and the contact field lets users know who can help.
|
||||
|
||||
### server-not-example
|
||||
The "server" object should not point to "example.com" domain.
|
||||
|
||||
### servers-no-trailing-slash
|
||||
The server URL must not have a trailing slash.
|
||||
|
||||
Tooling may treat `example.com` and `example.com/` in the same way. In the worst case, the latter option when joined with the operations paths might result in `example.com//api/users`.
|
||||
|
||||
### model-description
|
||||
The "model" object should contain "description" field.
|
||||
|
||||
### unique-parameter-names
|
||||
Parameters in `operation` objects must be `unique` definition wide.
|
||||
|
||||
### operations-tags-alpabetical
|
||||
Items in `tags` object of `operation`s should be sorted alphabetically.
|
||||
|
||||
### operation-tags-defined
|
||||
Items in `tags` object of `operation`s should be defined in the top level `tags` object.
|
||||
|
||||
### oas3-schema
|
||||
This rule enforces the structural validation of the OpenAPI definitions according to the OpenAPI Specification 3.0.2. It can be fine-tuned to disable or change the message level for each specific type of OpenAPI Objects (we call those sub-rules). For example, if you have a custom structure of the `servers` object, you prevent related error messages by updating your `.redocly.yaml` to the following pattern:
|
||||
|
||||
```yaml
|
||||
lint:
|
||||
codeframes: off
|
||||
rules:
|
||||
...other rules
|
||||
oas3-schema:
|
||||
servers:
|
||||
level: warning
|
||||
```
|
||||
Or even totally disabled:
|
||||
```yaml
|
||||
lint:
|
||||
codeframes: off
|
||||
rules:
|
||||
...other rules
|
||||
oas3-schema:
|
||||
servers: off
|
||||
```
|
||||
|
||||
Below, you can find the table of available sub-rules you can update:
|
||||
|
||||
| Sub-rule name | OpenAPI Object it corresponds to|
|
||||
|---|---|
|
||||
| root | [OpenAPI Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#oasObject) |
|
||||
| info | [OpenAPI Info Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#infoObject) |
|
||||
| contact | [OpenAPI Contact Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#contactObject) |
|
||||
| discriminator | [OpenAPI Discriminator Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminatorObject) |
|
||||
| encoding | [OpenAPI Encoding Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#encodingObject) |
|
||||
| example | [OpenAPI Example Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#exampleObject) |
|
||||
| external-docs | [OpenAPI External Documentation Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#externalDocumentationObject) |
|
||||
| header | [OpenAPI Header Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#headerObject) |
|
||||
| license | [OpenAPI License Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#licenseObject) |
|
||||
| link | [OpenAPI Link Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#linkObject) |
|
||||
| media-object | [OpenAPI Media Type Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#mediaTypeObject) |
|
||||
| operation | [OpenAPI Operation Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#operationObject) |
|
||||
| parameter | [OpenAPI Parameter Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#parameterObject) |
|
||||
| path | [OpenAPI Path Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#pathItemObject) |
|
||||
| request-body | [OpenAPI Request Body Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#requestBodyObject) |
|
||||
| response | [OpenAPI Response Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#responseObject) |
|
||||
| schema | [OpenAPI Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject) |
|
||||
| secuirty-schema | [OpenAPI Security Scheme Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#securitySchemeObject)|
|
||||
| auth-code-flow | [OpenAPI Flow Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#oauthFlowObject)|
|
||||
| client-creds-flow | [OpenAPI Flow Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#oauthFlowObject)|
|
||||
| implicit-flow | [OpenAPI Flow Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#oauthFlowObject)|
|
||||
| password-flow | [OpenAPI Flow Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#oauthFlowObject)|
|
||||
| server | [OpenAPI Server Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#serverObject) |
|
||||
| server-variable | [OpenAPI Server Variable Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#serverVariableObject) |
|
||||
| tag | [OpenAPI Tag Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#tagObject) |
|
||||
| xml | [OpenAPI XML Obejct](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#xmlObject) |
|
||||
|
||||
#### no-extra-fields
|
||||
By default, custom fields, not defined within OpenAPI specification can be included only using `x-` prefix. This rule enforces such policy.
|
||||
|
||||
### string-matcher
|
||||
Allows you to create custom Regexp based rules.
|
||||
|
||||
Each sub-rule should define OpenAPI type and its property on which it should be triggered and can also have an error message and a level (same as for generic rules).
|
||||
|
||||
Also, each entry of `rules` must have one of following fields:
|
||||
- startsWith
|
||||
- endsWith
|
||||
- regexp
|
||||
|
||||
If `regexp` is used, the rule will match the value of the `on` type's property against the regular expression provided and if it doesn't matches throw an error.
|
||||
|
||||
In case of `startsWith` and `endsWith` options, property's value must start or end with given values.
|
||||
|
||||
Also, you can provide a `not: true` to invert the rule. For example, it'll mean that regexp SHOULD NOT match the value, or string SHOULD NOT start with given parameter.
|
||||
|
||||
Usage example:
|
||||
```
|
||||
lint:
|
||||
rules:
|
||||
string-matcher:
|
||||
level: warning
|
||||
rules:
|
||||
UrlsNotExample:
|
||||
on: OpenAPIServer.url
|
||||
not: true
|
||||
startsWith: https://api-sandbox
|
||||
level: error
|
||||
message: 'Example servers should not be in api sandbox. God knows why.'
|
||||
ParameterNameStartCapital:
|
||||
on: OpenAPIParameter.name
|
||||
not: true
|
||||
regexp: 'internal'
|
||||
message: 'Parameter names not contain word "internal".'
|
||||
ExternalDocsHelpdesk:
|
||||
level: 'error'
|
||||
on: OpenAPIExternalDocumentation.url
|
||||
startsWith: docs.redoc.ly
|
||||
message: 'External docs must be only on corporate helpdesk.'
|
||||
OnlyOpensourceLicense:
|
||||
on: OpenAPILicense.name
|
||||
regexp: '^Rebilly$'
|
||||
message: 'Only one license can be used.'
|
||||
```
|
||||
|
||||
## Linting rules
|
||||
### suggest-possible-refs
|
||||
It is not uncommon to have a bad `$ref` in your definition. For example, instead of `#components/schemas/User` one might type `#components/schemas/Use`.
|
||||
|
||||
With this rule enabled, @redocly/openapi-cli will try to find the closest possible valid `$ref` address in the definition.
|
||||
TBD
|
||||
File diff suppressed because it is too large
Load Diff
23
benchmark/benches/recommended-oas3.bench.ts
Normal file
23
benchmark/benches/recommended-oas3.bench.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument } from '../../src/__tests__/utils';
|
||||
|
||||
import { LintConfig } from '../../src/config/config';
|
||||
|
||||
export const name = 'Validate with recommended rules';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config: new LintConfig({}),
|
||||
});
|
||||
}
|
||||
24
benchmark/benches/resolve-with-no-external.bench.ts
Normal file
24
benchmark/benches/resolve-with-no-external.bench.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import * as path from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
|
||||
import { resolveDocument, BaseResolver } from '../../src/resolve';
|
||||
import { parseYamlToDocument } from '../../src/__tests__/utils';
|
||||
import { DefinitionRootType } from '../../src/types';
|
||||
|
||||
export const name = 'Resolve with no external refs';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = path.resolve(path.join(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
const externalRefResolver = new BaseResolver();
|
||||
|
||||
export function measureAsync() {
|
||||
return resolveDocument({
|
||||
rootDocument: rebillyDocument,
|
||||
externalRefResolver,
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
}
|
||||
35
benchmark/benches/validate-with-many-rules.bench.ts
Normal file
35
benchmark/benches/validate-with-many-rules.bench.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument, makeConfigForRuleset } from '../../src/__tests__/utils';
|
||||
|
||||
export const name = 'Validate with 50 top-level rules';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
const ruleset: any = {};
|
||||
for (let i = 0; i < 50; i++) {
|
||||
ruleset['rule-' + i] = () => {
|
||||
let count = 0;
|
||||
return {
|
||||
Schema() {
|
||||
count++;
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const config = makeConfigForRuleset(ruleset);
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config,
|
||||
});
|
||||
}
|
||||
41
benchmark/benches/validate-with-nested-rule.bench.ts
Normal file
41
benchmark/benches/validate-with-nested-rule.bench.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument, makeConfigForRuleset } from '../../src/__tests__/utils';
|
||||
|
||||
export const name = 'Validate with single nested rule';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
const visitor = {
|
||||
test: () => {
|
||||
let count = 0;
|
||||
return {
|
||||
PathItem: {
|
||||
Parameter() {
|
||||
count++;
|
||||
},
|
||||
Operation: {
|
||||
Parameter() {
|
||||
count++;
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
const config = makeConfigForRuleset(visitor);
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config,
|
||||
});
|
||||
}
|
||||
23
benchmark/benches/validate-with-no-rules.bench.ts
Normal file
23
benchmark/benches/validate-with-no-rules.bench.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument, makeConfigForRuleset } from '../../src/__tests__/utils';
|
||||
|
||||
export const name = 'Validate with no rules';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
const config = makeConfigForRuleset({});
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config,
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument, makeConfigForRuleset } from '../../src/__tests__/utils';
|
||||
|
||||
export const name = 'Validate with single top-level rule and report';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
const config = makeConfigForRuleset({
|
||||
test: () => {
|
||||
return {
|
||||
Schema(schema, ctx) {
|
||||
if (schema.type === 'number') {
|
||||
ctx.report({
|
||||
message: 'type number is not allowed',
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config,
|
||||
});
|
||||
}
|
||||
32
benchmark/benches/validate-with-top-level-rule.bench.ts
Normal file
32
benchmark/benches/validate-with-top-level-rule.bench.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { validateDocument } from '../../src/validate';
|
||||
import { parseYamlToDocument, makeConfigForRuleset } from '../../src/__tests__/utils';
|
||||
|
||||
export const name = 'Validate with single top-level rule';
|
||||
export const count = 10;
|
||||
|
||||
const rebillyDefinitionRef = pathResolve(pathJoin(__dirname, 'rebilly.yaml'));
|
||||
const rebillyDocument = parseYamlToDocument(
|
||||
readFileSync(rebillyDefinitionRef, 'utf-8'),
|
||||
rebillyDefinitionRef,
|
||||
);
|
||||
|
||||
const config = makeConfigForRuleset({
|
||||
test: () => {
|
||||
let count = 0;
|
||||
return {
|
||||
Schema() {
|
||||
count++;
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
export function measureAsync() {
|
||||
return validateDocument({
|
||||
document: rebillyDocument,
|
||||
config,
|
||||
});
|
||||
}
|
||||
313
benchmark/benchmark.js
Normal file
313
benchmark/benchmark.js
Normal file
@@ -0,0 +1,313 @@
|
||||
'use strict';
|
||||
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const assert = require('assert');
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
const { red, green, yellow, cyan, grey } = require('./colors');
|
||||
const { sampleModule } = require('./fork');
|
||||
|
||||
const NS_PER_SEC = 1e9;
|
||||
const LOCAL = 'local';
|
||||
|
||||
// The maximum time in seconds a benchmark is allowed to run before finishing.
|
||||
const maxTime = 10;
|
||||
// The minimum sample size required to perform statistical analysis.
|
||||
const minSamples = 5;
|
||||
|
||||
function LOCAL_DIR(...paths) {
|
||||
return path.join(__dirname, '..', ...paths);
|
||||
}
|
||||
|
||||
// Build a benchmark-friendly environment for the given revision
|
||||
// and returns path to its 'dist' directory.
|
||||
function prepareRevision(revision) {
|
||||
console.log(`🍳 Preparing ${revision}...`);
|
||||
|
||||
if (revision === LOCAL) {
|
||||
return tscBuild(LOCAL_DIR());
|
||||
}
|
||||
|
||||
// Returns the complete git hash for a given git revision reference.
|
||||
const hash = exec(`git rev-parse "${revision}"`);
|
||||
|
||||
const dir = path.join(os.tmpdir(), 'openapi-cli-benchmark', hash);
|
||||
fs.rmdirSync(dir, { recursive: true });
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
|
||||
exec(`git archive "${hash}" | tar -xC "${dir}"`);
|
||||
exec('npm ci', { cwd: dir });
|
||||
|
||||
const to = path.join(dir, 'benchmark/benches');
|
||||
exec(`rm -rf ${to}`);
|
||||
exec(`cp -R ${LOCAL_DIR('benchmark/benches')} ${to}`);
|
||||
|
||||
return tscBuild(dir);
|
||||
}
|
||||
|
||||
function tscBuild(dir) {
|
||||
const oldCwd = process.cwd();
|
||||
process.chdir(dir);
|
||||
|
||||
execSync('rm -rf dist && npx tsc', { stdio: 'inherit' });
|
||||
exec(
|
||||
`cp ${path.join(dir, 'benchmark/benches/*.yaml')} ${path.join(dir, 'dist/benchmark/benches/')}`,
|
||||
);
|
||||
|
||||
process.chdir(oldCwd);
|
||||
return path.join(dir, 'dist/benchmark/benches');
|
||||
}
|
||||
|
||||
async function collectSamples(modulePath) {
|
||||
const samples = [];
|
||||
|
||||
// If time permits, increase sample size to reduce the margin of error.
|
||||
const start = Date.now();
|
||||
while (samples.length < minSamples || (Date.now() - start) / 1e3 < maxTime) {
|
||||
const { clocked, memUsed } = await sampleModule(modulePath);
|
||||
assert(clocked > 0);
|
||||
assert(memUsed > 0);
|
||||
samples.push({ clocked, memUsed });
|
||||
}
|
||||
return samples;
|
||||
}
|
||||
|
||||
// T-Distribution two-tailed critical values for 95% confidence.
|
||||
// See http://www.itl.nist.gov/div898/handbook/eda/section3/eda3672.htm.
|
||||
const tTable = /* prettier-ignore */ {
|
||||
'1': 12.706, '2': 4.303, '3': 3.182, '4': 2.776, '5': 2.571, '6': 2.447,
|
||||
'7': 2.365, '8': 2.306, '9': 2.262, '10': 2.228, '11': 2.201, '12': 2.179,
|
||||
'13': 2.16, '14': 2.145, '15': 2.131, '16': 2.12, '17': 2.11, '18': 2.101,
|
||||
'19': 2.093, '20': 2.086, '21': 2.08, '22': 2.074, '23': 2.069, '24': 2.064,
|
||||
'25': 2.06, '26': 2.056, '27': 2.052, '28': 2.048, '29': 2.045, '30': 2.042,
|
||||
infinity: 1.96,
|
||||
};
|
||||
|
||||
// Computes stats on benchmark results.
|
||||
function computeStats(samples) {
|
||||
assert(samples.length > 1);
|
||||
|
||||
// Compute the sample mean (estimate of the population mean).
|
||||
let mean = 0;
|
||||
let meanMemUsed = 0;
|
||||
for (const { clocked, memUsed } of samples) {
|
||||
mean += clocked;
|
||||
meanMemUsed += memUsed;
|
||||
}
|
||||
mean /= samples.length;
|
||||
meanMemUsed /= samples.length;
|
||||
|
||||
// Compute the sample variance (estimate of the population variance).
|
||||
let variance = 0;
|
||||
for (const { clocked } of samples) {
|
||||
variance += (clocked - mean) ** 2;
|
||||
}
|
||||
variance /= samples.length - 1;
|
||||
|
||||
// Compute the sample standard deviation (estimate of the population standard deviation).
|
||||
const sd = Math.sqrt(variance);
|
||||
|
||||
// Compute the standard error of the mean (a.k.a. the standard deviation of the sampling distribution of the sample mean).
|
||||
const sem = sd / Math.sqrt(samples.length);
|
||||
|
||||
// Compute the degrees of freedom.
|
||||
const df = samples.length - 1;
|
||||
|
||||
// Compute the critical value.
|
||||
const critical = tTable[df] || tTable.infinity;
|
||||
|
||||
// Compute the margin of error.
|
||||
const moe = sem * critical;
|
||||
|
||||
// The relative margin of error (expressed as a percentage of the mean).
|
||||
const rme = (moe / mean) * 100 || 0;
|
||||
|
||||
return {
|
||||
memPerOp: Math.floor(meanMemUsed),
|
||||
ops: NS_PER_SEC / mean,
|
||||
deviation: rme,
|
||||
numSamples: samples.length,
|
||||
};
|
||||
}
|
||||
|
||||
function beautifyBenchmark(results) {
|
||||
const nameMaxLen = maxBy(results, ({ name }) => name.length);
|
||||
const opsTop = maxBy(results, ({ ops }) => ops);
|
||||
const opsMaxLen = maxBy(results, ({ ops }) => beautifyNumber(ops).length);
|
||||
const memPerOpMaxLen = maxBy(results, ({ memPerOp }) => beautifyBytes(memPerOp).length);
|
||||
|
||||
for (const result of results) {
|
||||
printBench(result);
|
||||
}
|
||||
|
||||
function printBench(bench) {
|
||||
const { name, memPerOp, ops, deviation, numSamples } = bench;
|
||||
console.log(
|
||||
' ' +
|
||||
nameStr() +
|
||||
grey(' x ') +
|
||||
opsStr() +
|
||||
' ops/sec ' +
|
||||
grey('\xb1') +
|
||||
deviationStr() +
|
||||
cyan('%') +
|
||||
grey(' x ') +
|
||||
memPerOpStr() +
|
||||
'/op' +
|
||||
grey(' (' + numSamples + ' runs sampled)'),
|
||||
);
|
||||
|
||||
function nameStr() {
|
||||
const nameFmt = name.padEnd(nameMaxLen);
|
||||
return ops === opsTop ? green(nameFmt) : nameFmt;
|
||||
}
|
||||
|
||||
function opsStr() {
|
||||
const percent = ops / opsTop;
|
||||
const colorFn = percent > 0.95 ? green : percent > 0.8 ? yellow : red;
|
||||
return colorFn(beautifyNumber(ops).padStart(opsMaxLen));
|
||||
}
|
||||
|
||||
function deviationStr() {
|
||||
const colorFn = deviation > 5 ? red : deviation > 2 ? yellow : green;
|
||||
return colorFn(deviation.toFixed(2));
|
||||
}
|
||||
|
||||
function memPerOpStr() {
|
||||
return beautifyBytes(memPerOp).padStart(memPerOpMaxLen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function beautifyBytes(bytes) {
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log2(bytes) / 10);
|
||||
return beautifyNumber(bytes / 2 ** (i * 10)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
function beautifyNumber(num) {
|
||||
return Number(num.toFixed(num > 100 ? 0 : 2)).toLocaleString();
|
||||
}
|
||||
|
||||
function maxBy(array, fn) {
|
||||
return Math.max(...array.map(fn));
|
||||
}
|
||||
|
||||
// Prepare all revisions and run benchmarks matching a pattern against them.
|
||||
async function prepareAndRunBenchmarks(benchmarkPatterns, revisions) {
|
||||
const environments = revisions.map((revision) => ({
|
||||
revision,
|
||||
distPath: prepareRevision(revision),
|
||||
}));
|
||||
|
||||
console.log(`🚀 Starting benchmarks...`);
|
||||
|
||||
for (const benchmark of matchBenchmarks(benchmarkPatterns)) {
|
||||
const results = [];
|
||||
for (let i = 0; i < environments.length; ++i) {
|
||||
const environment = environments[i];
|
||||
const modulePath = path.join(environment.distPath, benchmark);
|
||||
|
||||
if (i === 0) {
|
||||
const { name } = await sampleModule(modulePath);
|
||||
console.log('⏱️ ' + name);
|
||||
}
|
||||
|
||||
try {
|
||||
const samples = await collectSamples(modulePath);
|
||||
|
||||
results.push({
|
||||
name: environment.revision,
|
||||
samples,
|
||||
...computeStats(samples),
|
||||
});
|
||||
process.stdout.write(' ' + cyan(i + 1) + ' tests completed.\u000D');
|
||||
} catch (error) {
|
||||
console.log(' ' + environment.revision + ': ' + red(String(error)));
|
||||
}
|
||||
}
|
||||
console.log('\n');
|
||||
|
||||
beautifyBenchmark(results);
|
||||
console.log('');
|
||||
}
|
||||
}
|
||||
|
||||
function findFiles(cwd, pattern) {
|
||||
const out = exec(`find . -path '${pattern}'`, { cwd });
|
||||
return out.split('\n').filter(Boolean);
|
||||
}
|
||||
|
||||
function removeTrailingNewLine(str) {
|
||||
if (str == null) {
|
||||
return str;
|
||||
}
|
||||
return str.split('\n').slice(0, -1).join('\n');
|
||||
}
|
||||
|
||||
function exec(command, options) {
|
||||
const output = execSync(command, {
|
||||
maxBuffer: 10 * 1024 * 1024, // 10MB
|
||||
encoding: 'utf-8',
|
||||
...options,
|
||||
});
|
||||
return removeTrailingNewLine(output);
|
||||
}
|
||||
|
||||
// Find all benchmark tests to be run.
|
||||
function matchBenchmarks(patterns) {
|
||||
let benchmarks = findFiles(LOCAL_DIR('dist/benchmark/benches'), '*.bench.js');
|
||||
if (patterns.length > 0) {
|
||||
benchmarks = benchmarks.filter((benchmark) =>
|
||||
patterns.some((pattern) => path.join('benchmark/benches', benchmark).includes(pattern)),
|
||||
);
|
||||
}
|
||||
|
||||
if (benchmarks.length === 0) {
|
||||
console.warn('No benchmarks matching: ' + patterns.map(bold).join(''));
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
function getArguments(argv) {
|
||||
const revsIdx = argv.indexOf('--revs');
|
||||
const revsArgs = revsIdx === -1 ? [] : argv.slice(revsIdx + 1);
|
||||
const benchmarkPatterns = revsIdx === -1 ? argv : argv.slice(0, revsIdx);
|
||||
let assumeArgs;
|
||||
let revisions;
|
||||
switch (revsArgs.length) {
|
||||
case 0:
|
||||
assumeArgs = [...benchmarkPatterns, '--revs', 'local', 'HEAD'];
|
||||
revisions = [LOCAL, 'HEAD'];
|
||||
break;
|
||||
case 1:
|
||||
if (revsArgs[0] === LOCAL) {
|
||||
assumeArgs = [...benchmarkPatterns, '--revs', revsArgs[0]];
|
||||
revisions = [revsArgs[0]];
|
||||
} else {
|
||||
assumeArgs = [...benchmarkPatterns, '--revs', 'local', revsArgs[0]];
|
||||
revisions = [LOCAL, revsArgs[0]];
|
||||
}
|
||||
break;
|
||||
default:
|
||||
revisions = revsArgs;
|
||||
break;
|
||||
}
|
||||
if (assumeArgs) {
|
||||
console.warn('Assuming you meant: ' + bold('benchmark ' + assumeArgs.join(' ')));
|
||||
}
|
||||
return { benchmarkPatterns, revisions };
|
||||
}
|
||||
|
||||
function bold(str) {
|
||||
return '\u001b[1m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
// Get the revisions and make things happen!
|
||||
if (require.main === module) {
|
||||
const { benchmarkPatterns, revisions } = getArguments(process.argv.slice(2));
|
||||
prepareAndRunBenchmarks(benchmarkPatterns, revisions);
|
||||
}
|
||||
29
benchmark/colors.js
Normal file
29
benchmark/colors.js
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
function red(str) {
|
||||
return '\u001b[31m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
function green(str) {
|
||||
return '\u001b[32m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
function yellow(str) {
|
||||
return '\u001b[33m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
function cyan(str) {
|
||||
return '\u001b[36m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
function grey(str) {
|
||||
return '\u001b[90m' + str + '\u001b[0m';
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
red,
|
||||
green,
|
||||
yellow,
|
||||
cyan,
|
||||
grey,
|
||||
};
|
||||
83
benchmark/fork.js
Normal file
83
benchmark/fork.js
Normal file
@@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const cp = require('child_process');
|
||||
|
||||
// Clocks the time taken to execute a test per cycle (secs).
|
||||
function clock(count, fn) {
|
||||
const start = process.hrtime.bigint();
|
||||
for (let i = 0; i < count; ++i) {
|
||||
fn();
|
||||
}
|
||||
return Number(process.hrtime.bigint() - start);
|
||||
}
|
||||
|
||||
async function clockAsync(count, fn) {
|
||||
const start = process.hrtime.bigint();
|
||||
for (let i = 0; i < count; ++i) {
|
||||
await fn();
|
||||
}
|
||||
return Number(process.hrtime.bigint() - start);
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const modulePath = process.env.BENCHMARK_MODULE_PATH;
|
||||
assert(typeof modulePath === 'string');
|
||||
assert(process.send);
|
||||
const module = require(modulePath);
|
||||
|
||||
if (module.measureAsync) {
|
||||
async function run() {
|
||||
await clockAsync(7, module.measureAsync); // warm up
|
||||
global.gc();
|
||||
process.nextTick(async () => {
|
||||
const memBaseline = process.memoryUsage().heapUsed;
|
||||
const clocked = await clockAsync(module.count, module.measureAsync);
|
||||
process.send({
|
||||
name: module.name,
|
||||
clocked: clocked / module.count,
|
||||
memUsed: (process.memoryUsage().heapUsed - memBaseline) / module.count,
|
||||
});
|
||||
});
|
||||
}
|
||||
run();
|
||||
} else {
|
||||
clock(7, module.measure); // warm up
|
||||
global.gc();
|
||||
process.nextTick(() => {
|
||||
const memBaseline = process.memoryUsage().heapUsed;
|
||||
const clocked = clock(module.count, module.measure);
|
||||
process.send({
|
||||
name: module.name,
|
||||
clocked: clocked / module.count,
|
||||
memUsed: (process.memoryUsage().heapUsed - memBaseline) / module.count,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function sampleModule(modulePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const env = {
|
||||
NODE_ENV: 'production',
|
||||
BENCHMARK_MODULE_PATH: modulePath,
|
||||
};
|
||||
const child = cp.fork(__filename, { env });
|
||||
let message;
|
||||
let error;
|
||||
|
||||
child.on('message', (msg) => (message = msg));
|
||||
child.on('error', (e) => (error = e));
|
||||
child.on('close', () => {
|
||||
if (message) {
|
||||
return resolve(message);
|
||||
}
|
||||
reject(error || new Error('Forked process closed without error'));
|
||||
});
|
||||
}).then((result) => {
|
||||
global.gc();
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { sampleModule };
|
||||
@@ -1,45 +0,0 @@
|
||||
apiDefinitions:
|
||||
sample: ./syntetic/to_bundle/bundle.yaml
|
||||
simpleDefinition: ./syntetic/syntetic.yaml
|
||||
rebilly: ./openapi-directory/rebilly-full.yaml
|
||||
|
||||
resolve:
|
||||
http:
|
||||
headers:
|
||||
- name: Client-Header
|
||||
value: sometestvalue
|
||||
matches: api.redoc.ly/.*
|
||||
- name: Custom
|
||||
envVariable: TEST_VAR
|
||||
matches: api.redoc.ly/.*
|
||||
|
||||
lint:
|
||||
codeframes: on
|
||||
rules:
|
||||
bundler: off
|
||||
debug-info: off
|
||||
|
||||
parameterPartial: warning
|
||||
parameterWithAllOf: warning
|
||||
|
||||
oas3-schema/parameter: off
|
||||
oas3-schema/external-docs:
|
||||
url: off
|
||||
|
||||
string-matcher: off
|
||||
|
||||
path-param-exists: on
|
||||
operation-2xx-response: on
|
||||
unique-parameter-names: on
|
||||
no-unused-schemas: on
|
||||
operation-operationId-unique: on
|
||||
path-declarations-must-exist: on
|
||||
|
||||
api-servers: on
|
||||
license-url: on
|
||||
no-extra-fields: on
|
||||
operation-description: off
|
||||
operation-operationId: off
|
||||
operation-tags: off
|
||||
provide-contact: off
|
||||
servers-no-trailing-slash: off
|
||||
@@ -1,18 +0,0 @@
|
||||
Contibutor:
|
||||
description: Contibutor Link
|
||||
properties:
|
||||
publicationId:
|
||||
description: >-
|
||||
An ID for the publication. This can be lifted from response of
|
||||
publications above
|
||||
type: string
|
||||
role:
|
||||
description: >-
|
||||
Role of the user identified by userId in the publication identified by
|
||||
`publicationId`. *editor* or *writer*
|
||||
type: string
|
||||
userId:
|
||||
description: A user ID of the contributor.
|
||||
type: string
|
||||
title: Contributor
|
||||
type: object
|
||||
@@ -1,3 +0,0 @@
|
||||
license:
|
||||
name: MIT
|
||||
url: google.com
|
||||
@@ -1,445 +0,0 @@
|
||||
swagger: '2.0'
|
||||
schemes:
|
||||
- https
|
||||
host: api.medium.com
|
||||
basePath: /v1
|
||||
info:
|
||||
contact:
|
||||
name: Hossain Khan
|
||||
url: 'https://github.com/amardeshbd/medium-api-specification'
|
||||
description: |
|
||||
Medium’s unofficial API documentation using OpenAPI specification.
|
||||
|
||||
# Official API
|
||||
Official API document can also be viewed for most up to date API spec at [https://github.com/Medium/medium-api-docs](https://github.com/Medium/medium-api-docs).
|
||||
|
||||
Developer Blog - [Welcome to the Medium API](https://medium.com/blog/welcome-to-the-medium-api-3418f956552)
|
||||
termsOfService: 'https://medium.com/@feerst/2b405a832a2f'
|
||||
license:
|
||||
$ref: ./linked-tags.yaml#/license
|
||||
title: Medium.com
|
||||
version: 1.0.0
|
||||
x-apisguru-categories:
|
||||
- media
|
||||
- social
|
||||
x-logo:
|
||||
url: 'https://raw.githubusercontent.com/Medium/medium-logos/master/monogram/Monogram.png'
|
||||
x-origin:
|
||||
- format: swagger
|
||||
url: 'https://raw.githubusercontent.com/amardeshbd/medium-api-specification/master/medium-api-specification.yaml'
|
||||
version: '2.0'
|
||||
x-providerName: medium.com
|
||||
x-tags:
|
||||
- blog
|
||||
- social journalism
|
||||
- publishing platform
|
||||
x-unofficialSpec: true
|
||||
externalDocs:
|
||||
url: 'https://github.com/Medium/medium-api-docs'
|
||||
produces:
|
||||
- application/json
|
||||
securityDefinitions:
|
||||
BearerToken:
|
||||
description: |
|
||||
Self-issued access tokens (described in user-facing copy as integration tokens) are explicitly designed for desktop integrations where implementing browser-based authentication is non-trivial, or software like plugins where it is impossible to secure a client secret. You should not request that a user give you an integration token if you don’t meet these criteria. Users will be cautioned within Medium to treat integration tokens like passwords, and dissuaded from making them generally available.
|
||||
|
||||
Users can generate an access token from the [Settings page](https://medium.com/me/settings) of their Medium account.
|
||||
|
||||
You should instruct your user to visit this URL and generate an integration token from the Integration Tokens section. You should suggest a description for this token - typically the name of your product or feature - and use it consistently for all users.
|
||||
|
||||
Self-issued access tokens currently grant the `basicProfile` and `publishPost` scope. A future iteration of the API will require a user to select the scope they wish to grant access to.
|
||||
|
||||
Self-issued access tokens do not expire, though they may be revoked by the user at any time.
|
||||
in: header
|
||||
name: Authorization
|
||||
type: apiKey
|
||||
OauthSecurity:
|
||||
authorizationUrl: 'https://medium.com/m/oauth/authorize'
|
||||
description: First you must register an application on Medium. Then we will supply you a clientId and a clientSecret with which you may access Medium’s API. Each integration should have its own clientId and clientSecret. The clientSecret should be treated like a password and stored securely.
|
||||
flow: accessCode
|
||||
scopes:
|
||||
basicProfile: Grants basic access to a user’s profile (not including their email).
|
||||
listPublications: Grants the ability to list publications related to the user.
|
||||
publishPost: Grants the ability to publish a post to the user’s profile.
|
||||
uploadImage: |
|
||||
Grants the ability to upload an image for use within a Medium post.
|
||||
|
||||
NOTE - This is an **extended permission**.
|
||||
|
||||
Integrations are not permitted to request extended scope from users without explicit prior permission from Medium. Attempting to request these permissions through the standard user authentication flow will result in an error if extended scope has not been authorized for an integration.
|
||||
tokenUrl: 'https://medium.com/v1/tokens'
|
||||
type: oauth2
|
||||
paths:
|
||||
/me:
|
||||
get:
|
||||
description: Returns details of the user who has granted permission to the application.
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
schema:
|
||||
$ref: './linked-definition.yaml#/Contibutor'
|
||||
'401':
|
||||
description: The `accessToken` is invalid or has been revoked.
|
||||
security:
|
||||
- BearerToken: []
|
||||
- OauthSecurity:
|
||||
- basicProfile
|
||||
summary: User details
|
||||
tags:
|
||||
- Users
|
||||
'/publications/{publicationId}/contributors':
|
||||
get:
|
||||
description: 'This endpoint returns a list of contributors for a given publication. In other words, a list of Medium users who are allowed to publish under a publication, as well as a description of their exact role in the publication (for now, either an editor or a writer).'
|
||||
parameters:
|
||||
- description: A unique identifier for the publication.
|
||||
in: path
|
||||
name: publicationId
|
||||
required: true
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/ContibutorResponse'
|
||||
'401':
|
||||
description: The `accessToken` is invalid or has been revoked.
|
||||
security:
|
||||
- BearerToken: []
|
||||
- OauthSecurity:
|
||||
- listPublications
|
||||
summary: Contributors of Publication
|
||||
tags:
|
||||
- Publications
|
||||
- Users
|
||||
'/publications/{publicationId}/posts':
|
||||
post:
|
||||
consumes:
|
||||
- application/json
|
||||
description: |
|
||||
creating a post and associating it with a publication on Medium. The request also shows this association, considering posts a collection of resources under a publication
|
||||
|
||||
There are additional rules around publishing that each request to this API must respect:
|
||||
- If the authenticated user is an 'editor' for the publication, they can create posts with any publish status. Posts published as 'public' or 'unlisted' will appear in collection immediately, while posts created as 'draft' will remain in pending state under publication.
|
||||
- If the authenticated user is a 'writer' for the chosen publication, they can only create a post as a 'draft'. That post will remain in pending state under publication until an editor for the publication approves it.
|
||||
- If the authenticated user is neither a 'writer' nor an 'editor', they are not allowed to create any posts in a publication.
|
||||
parameters:
|
||||
- description: Here publicationId is the id of the publication the post is being created under. The publicationId can be acquired from the API for listing user’s publications.
|
||||
in: path
|
||||
name: publicationId
|
||||
required: true
|
||||
type: string
|
||||
- description: Creates a post for publication.
|
||||
in: body
|
||||
name: body
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/Post'
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/PostDetails'
|
||||
'400':
|
||||
description: 'Required fields were invalid, not specified.'
|
||||
'401':
|
||||
description: The `accessToken` is invalid or has been revoked.
|
||||
'403':
|
||||
description: The `publicationId` in request path doesn’t point to a publication that the user can publish into.
|
||||
security:
|
||||
- BearerToken: []
|
||||
- OauthSecurity:
|
||||
- publishPost
|
||||
summary: Create Publication Post
|
||||
tags:
|
||||
- Posts
|
||||
- Publications
|
||||
'/users/{authorId}/posts':
|
||||
post:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Creates a post on the authenticated user’s profile.
|
||||
parameters:
|
||||
- description: authorId is the user id of the authenticated user.
|
||||
in: path
|
||||
name: authorId
|
||||
required: true
|
||||
type: string
|
||||
- description: Creates a post for user.
|
||||
in: body
|
||||
name: body
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/Post'
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/PostDetails'
|
||||
'400':
|
||||
description: 'Required fields were invalid, not specified.'
|
||||
'401':
|
||||
description: The `accessToken` is invalid or has been revoked.
|
||||
'403':
|
||||
description: 'The user does not have permission to publish, or the authorId in the request path points to wrong/non-existent user.'
|
||||
security:
|
||||
- BearerToken: []
|
||||
- OauthSecurity:
|
||||
- publishPost
|
||||
summary: Create User Post
|
||||
tags:
|
||||
- Users
|
||||
- Posts
|
||||
'/users/{userId}/publications':
|
||||
get:
|
||||
description: 'Returns a full list of publications that the user is related to in some way. This includes all publications the user is subscribed to, writes to, or edits.'
|
||||
parameters:
|
||||
- description: A unique identifier for the user.
|
||||
in: path
|
||||
name: userId
|
||||
required: true
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/PublicationResponse'
|
||||
'401':
|
||||
description: 'The `accessToken` is invalid, lacks the `listPublications` scope or has been revoked.'
|
||||
'403':
|
||||
description: The request attempts to list publications for another user.
|
||||
security:
|
||||
- BearerToken: []
|
||||
- OauthSecurity:
|
||||
- listPublications
|
||||
summary: User's publications
|
||||
tags:
|
||||
- Publications
|
||||
definitions:
|
||||
ContibutorResponse:
|
||||
description: list of contributors for a given publication
|
||||
example:
|
||||
data:
|
||||
- publicationId: b45573563f5a
|
||||
role: editor
|
||||
userId: 13a06af8f81849c64dafbce822cbafbfab7ed7cecf82135bca946807ea351290d
|
||||
- publicationId: b45573563f5a
|
||||
role: editor
|
||||
userId: 1c9c63b15b874d3e354340b7d7458d55e1dda0f6470074df1cc99608a372866ac
|
||||
- publicationId: b45573563f5a
|
||||
role: editor
|
||||
userId: 1cc07499453463518b77d31650c0b53609dc973ad8ebd33690c7be9236e9384ad
|
||||
- publicationId: b45573563f5a
|
||||
role: writer
|
||||
userId: 196f70942410555f4b3030debc4f199a0d5a0309a7b9df96c57b8ec6e4b5f11d7
|
||||
- publicationId: b45573563f5a
|
||||
role: writer
|
||||
userId: 14d4a581f21ff537d245461b8ff2ae9b271b57d9554e25d863e3df6ef03ddd480
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/definitions/Contibutor'
|
||||
type: array
|
||||
title: Contributors list for a publication
|
||||
Post:
|
||||
example:
|
||||
canonicalUrl: 'http://jamietalbot.com/posts/liverpool-fc'
|
||||
content: <h1>Liverpool FC</h1><p>You’ll never walk alone.</p>
|
||||
contentFormat: html
|
||||
publishStatus: public
|
||||
tags:
|
||||
- football
|
||||
- sport
|
||||
- Liverpool
|
||||
title: Liverpool FC
|
||||
properties:
|
||||
canonicalUrl:
|
||||
description: 'The original home of this content, if it was originally published elsewhere.'
|
||||
type: string
|
||||
content:
|
||||
description: 'The body of the post, in a valid, semantic, HTML fragment, or Markdown. Further markups may be supported in the future. For a full list of accepted HTML tags, see here. If you want your title to appear on the post page, you must also include it as part of the post content.'
|
||||
type: string
|
||||
contentFormat:
|
||||
description: 'The format of the "content" field. There are two valid values, "html", and "markdown"'
|
||||
type: string
|
||||
license:
|
||||
default: all-rights-reserved
|
||||
description: 'The license of the post. Valid values are `all-rights-reserved`, `cc-40-by`, `cc-40-by-sa`, `cc-40-by-nd`, `cc-40-by-nc`, `cc-40-by-nc-nd`, `cc-40-by-nc-sa`, `cc-40-zero`, `public-domain`. The default is `all-rights-reserved`.'
|
||||
enum:
|
||||
- all-rights-reserved
|
||||
- cc-40-by
|
||||
- cc-40-by-sa
|
||||
- cc-40-by-nd
|
||||
- cc-40-by-nc
|
||||
- cc-40-by-nc-nd
|
||||
- cc-40-by-nc-sa
|
||||
- cc-40-zero
|
||||
- public-domain
|
||||
type: string
|
||||
publishStatus:
|
||||
default: public
|
||||
description: 'The status of the post. Valid values are `public`, `draft`, or `unlisted`. The default is `public`.'
|
||||
enum:
|
||||
- public
|
||||
- draft
|
||||
- unlisted
|
||||
type: string
|
||||
tags:
|
||||
description: Tags to classify the post. Only the first three will be used. Tags longer than 25 characters will be ignored.
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
title:
|
||||
description: 'The title of the post. Note that this title is used for SEO and when rendering the post as a listing, but will not appear in the actual post—for that, the title must be specified in the content field as well. Titles longer than 100 characters will be ignored. In that case, a title will be synthesized from the first content in the post when it is published.'
|
||||
type: string
|
||||
required:
|
||||
- title
|
||||
- contentFormat
|
||||
- content
|
||||
title: Post
|
||||
type: object
|
||||
PostDetails:
|
||||
description: Details of Post
|
||||
example:
|
||||
data:
|
||||
authorId: 5303d74c64f66366f00cb9b2a94f3251bf5
|
||||
canonicalUrl: 'http://jamietalbot.com/posts/liverpool-fc'
|
||||
id: e6f36a
|
||||
license: all-rights-reserved
|
||||
licenseUrl: 'https://medium.com/policy/9db0094a1e0f'
|
||||
publishStatus: public
|
||||
publishedAt: 1442286338435
|
||||
tags:
|
||||
- football
|
||||
- sport
|
||||
- Liverpool
|
||||
title: Liverpool FC
|
||||
url: 'https://medium.com/@majelbstoat/liverpool-fc-e6f36a'
|
||||
properties:
|
||||
authorId:
|
||||
description: The userId of the post’s author
|
||||
type: string
|
||||
canonicalUrl:
|
||||
description: 'The canonical URL of the post. If canonicalUrl was not specified in the creation of the post, this field will not be present.'
|
||||
type: string
|
||||
id:
|
||||
description: A unique identifier for the post.
|
||||
type: string
|
||||
license:
|
||||
description: The license of the post.
|
||||
enum:
|
||||
- all-rights-reserved
|
||||
- cc-40-by
|
||||
- cc-40-by-sa
|
||||
- cc-40-by-nd
|
||||
- cc-40-by-nc
|
||||
- cc-40-by-nc-nd
|
||||
- cc-40-by-nc-sa
|
||||
- cc-40-zero
|
||||
- public-domain
|
||||
type: string
|
||||
licenseUrl:
|
||||
description: The URL to the license of the post.
|
||||
type: string
|
||||
publishStatus:
|
||||
description: The publish status of the post.
|
||||
type: string
|
||||
publishedAt:
|
||||
description: 'The post’s published date. If created as a draft, this field will not be present.'
|
||||
format: date
|
||||
type: string
|
||||
tags:
|
||||
description: The post’s tags
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
title:
|
||||
description: The post’s title
|
||||
type: string
|
||||
url:
|
||||
description: The URL of the post on Medium
|
||||
type: string
|
||||
title: Post Details
|
||||
Publication:
|
||||
description: 'Publications provide a way for authors to work collaboratively within a common narrative framework, brand or point of view.'
|
||||
example:
|
||||
description: What is this thing and how does it work?
|
||||
id: b969ac62a46b
|
||||
imageUrl: 'https://cdn-images-1.medium.com/fit/c/200/200/0*ae1jbP_od0W6EulE.jpeg'
|
||||
name: About Medium
|
||||
url: 'https://medium.com/about'
|
||||
properties:
|
||||
description:
|
||||
description: Short description of the publication
|
||||
type: string
|
||||
id:
|
||||
description: A unique identifier for the publication.
|
||||
type: string
|
||||
imageUrl:
|
||||
description: The URL to the publication’s image/logo
|
||||
type: string
|
||||
name:
|
||||
description: The publication’s name on Medium.
|
||||
type: string
|
||||
url:
|
||||
description: The URL to the publication’s homepage
|
||||
type: string
|
||||
title: Publication Info
|
||||
type: object
|
||||
PublicationResponse:
|
||||
description: Container object for publication list.
|
||||
example:
|
||||
data:
|
||||
- description: What is this thing and how does it work?
|
||||
id: b969ac62a46b
|
||||
imageUrl: 'https://cdn-images-1.medium.com/fit/c/200/200/0*ae1jbP_od0W6EulE.jpeg'
|
||||
name: About Medium
|
||||
url: 'https://medium.com/about'
|
||||
- description: Medium’s Developer resources
|
||||
id: b45573563f5a
|
||||
imageUrl: 'https://cdn-images-1.medium.com/fit/c/200/200/1*ccokMT4VXmDDO1EoQQHkzg@2x.png'
|
||||
name: Developers
|
||||
url: 'https://medium.com/developers'
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/definitions/Publication'
|
||||
type: array
|
||||
title: Publication List Container
|
||||
User:
|
||||
example:
|
||||
id: 5303d74c64f66366f00cb9b2a94f3251bf5
|
||||
imageUrl: 'https://images.medium.com/0*fkfQiTzT7TlUGGyI.png'
|
||||
name: Jamie Talbot
|
||||
url: 'https://medium.com/@majelbstoat'
|
||||
username: majelbstoat
|
||||
properties:
|
||||
id:
|
||||
description: A unique identifier for the user.
|
||||
type: string
|
||||
imageUrl:
|
||||
description: The URL to the user’s avatar on Medium
|
||||
type: string
|
||||
name:
|
||||
description: The user’s name on Medium.
|
||||
type: string
|
||||
url:
|
||||
description: The URL to the user’s profile on Medium
|
||||
type: string
|
||||
username:
|
||||
description: The user’s username on Medium.
|
||||
type: string
|
||||
type: object
|
||||
UserResponse:
|
||||
description: Container object for user info
|
||||
example:
|
||||
data:
|
||||
id: 5303d74c64f66366f00cb9b2a94f3251bf5
|
||||
imageUrl: 'https://images.medium.com/0*fkfQiTzT7TlUGGyI.png'
|
||||
name: Jamie Talbot
|
||||
url: 'https://medium.com/@majelbstoat'
|
||||
username: majelbstoat
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/definitions/User'
|
||||
title: User Container
|
||||
type: object
|
||||
@@ -1,48 +0,0 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
class OverlaysMerger {
|
||||
static get rule() {
|
||||
return 'writeCheck';
|
||||
}
|
||||
|
||||
any() {
|
||||
return {
|
||||
onEnter: (node, type, ctx) => {
|
||||
if (node['x-redocly-overlay']) {
|
||||
const definitionDir = path.dirname(ctx.filePath);
|
||||
const overlayPath = path.resolve(definitionDir, node['x-redocly-overlay'].path);
|
||||
|
||||
if (fs.existsSync(overlayPath)) {
|
||||
const patch = JSON.parse(fs.readFileSync(overlayPath));
|
||||
|
||||
Object.keys(patch).forEach((k) => {
|
||||
node[k] = patch[k];
|
||||
});
|
||||
|
||||
delete node['x-redocly-overlay'];
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class MergeChecker {
|
||||
static get rule() {
|
||||
return 'mergerCheck';
|
||||
}
|
||||
|
||||
OpenAPIInfo() {
|
||||
return {
|
||||
onEnter: (node, type, ctx) => {
|
||||
console.log(node);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = [
|
||||
OverlaysMerger, MergeChecker,
|
||||
];
|
||||
@@ -1,6 +0,0 @@
|
||||
rules:
|
||||
no-unused-schemas:
|
||||
level: warning
|
||||
excludePaths:
|
||||
- 'rebilly-full.yaml#/components/schemas/Unused'
|
||||
debug-info: on
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
# Test markdown file
|
||||
|
||||
This is a test markdown file.
|
||||
|
||||
Include it in your OpenAPI definition description like this:
|
||||
|
||||
|
||||
description:
|
||||
$ref: path/to/file.md
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"title": "Example OpenAPI 3 definition. Patched.",
|
||||
"version": "2.1"
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: `Bearer <JWT>`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
allOf:
|
||||
- name: bla
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- description: blo
|
||||
- description: bla
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,88 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: `Bearer <JWT>`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
name: bla
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: blo
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,88 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
$ref: https://api.redoc.online/registry/oleses-testing/API%20petstore/github1/bundle/Default-branch/openapi.yaml#/info/license
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
some: test
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}/{test}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: `Bearer <JWT>`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
name: bla
|
||||
in: querya
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: blo
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,30 +0,0 @@
|
||||
typeExtension: ../typeExtension.js
|
||||
customRules: ../customRules.js
|
||||
codeframes: on
|
||||
rules:
|
||||
bundler: off
|
||||
debug-info: off
|
||||
|
||||
parameterPartial: warning
|
||||
parameterWithAllOf: warning
|
||||
|
||||
oas3-schema/parameter: off
|
||||
oas3-schema/external-docs:
|
||||
url: off
|
||||
|
||||
|
||||
path-param-exists: on
|
||||
operation-2xx-response: on
|
||||
unique-parameter-names: on
|
||||
no-unused-schemas: on
|
||||
operation-operationId-unique: on
|
||||
path-declarations-must-exist: on
|
||||
|
||||
api-servers: on
|
||||
license-url: on
|
||||
no-extra-fields: on
|
||||
operation-description: on
|
||||
operation-operationId: on
|
||||
operation-tags: off
|
||||
provide-contact: on
|
||||
servers-no-trailing-slash: on
|
||||
@@ -1,48 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
title: nanana
|
||||
version: fafafa
|
||||
license:
|
||||
name: bla
|
||||
contact:
|
||||
name: Sergey
|
||||
email: knidarkness@gmail.com
|
||||
paths:
|
||||
/api:
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/parameter.name.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/parameter.name.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
components:
|
||||
schemas:
|
||||
exist:
|
||||
$ref: '#/components/schemas/exist'
|
||||
parameters:
|
||||
parameter.name.yml:
|
||||
in: query
|
||||
required: true
|
||||
name: name
|
||||
description: A name paramteter
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
noRef:
|
||||
$ref: 'bad.yaml#/does/not/exist'
|
||||
@@ -1,9 +0,0 @@
|
||||
in: query
|
||||
required: true
|
||||
name: anotherName
|
||||
description: A name paramteter
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
@@ -1,9 +0,0 @@
|
||||
in: query
|
||||
required: true
|
||||
name: name
|
||||
description: A name paramteter
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
@@ -1,6 +0,0 @@
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
@@ -1,20 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
title: nanana
|
||||
version: fafafa
|
||||
license:
|
||||
name: bla
|
||||
contact:
|
||||
name: Sergey
|
||||
email: knidarkness@gmail.com
|
||||
paths:
|
||||
/api:
|
||||
get:
|
||||
$ref: ./operations/api/api-get.yaml
|
||||
post:
|
||||
$ref: ./operations/api/api-post.yaml
|
||||
/anotherApi:
|
||||
get:
|
||||
$ref: ./operations/anotherApi/api-get.yaml
|
||||
post:
|
||||
$ref: ./operations/anotherApi/api-post.yaml
|
||||
@@ -1,9 +0,0 @@
|
||||
parameters:
|
||||
- $ref: ../../components/anotherParameters/parameter.name.yml
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,9 +0,0 @@
|
||||
parameters:
|
||||
- $ref: ../../components/anotherParameters/parameter.name.yml
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,9 +0,0 @@
|
||||
parameters:
|
||||
- $ref: ../../components/parameters/parameter.name.yml
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,9 +0,0 @@
|
||||
parameters:
|
||||
- $ref: ../../components/parameters/parameter.name.yml
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
@@ -1,74 +0,0 @@
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
title: nanana
|
||||
version: fafafa
|
||||
license:
|
||||
name: bla
|
||||
contact:
|
||||
name: Sergey
|
||||
email: knidarkness@gmail.com
|
||||
paths:
|
||||
/api:
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/parameter.name'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/parameter.name'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
/anotherApi:
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/anotherParameters_parameter.name'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/anotherParameters_parameter.name'
|
||||
responses:
|
||||
'200':
|
||||
description: blbbbb
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
components:
|
||||
parameters:
|
||||
parameter.name:
|
||||
in: query
|
||||
required: true
|
||||
name: name
|
||||
description: A name paramteter
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
anotherParameters_parameter.name:
|
||||
in: query
|
||||
required: true
|
||||
name: anotherName
|
||||
description: A name paramteter
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
@@ -1,18 +0,0 @@
|
||||
module.exports = (types) => ({
|
||||
...types,
|
||||
OpenAPIParameter: {
|
||||
...types.OpenAPIParameter,
|
||||
// enable dynamic type resolution for OpenAPIParameter and return either OpenAPIParameterWithAllOf or regular OpenAPIParameter
|
||||
resolveType: (node) => (node.allOf ? 'OpenAPIParameterWithAllOf' : 'OpenAPIParameter'),
|
||||
},
|
||||
// define OpenAPIParameterWithAllOf
|
||||
OpenAPIParameterWithAllOf: {
|
||||
properties: {
|
||||
allOf: 'OpenAPIParameterPartial',
|
||||
},
|
||||
},
|
||||
// define OpenAPIParameterPartial
|
||||
OpenAPIParameterPartial: {
|
||||
...types.OpenAPIParameter,
|
||||
},
|
||||
});
|
||||
@@ -1,174 +0,0 @@
|
||||
# Custom visitors
|
||||
|
||||
### Concepts
|
||||
|
||||
#### Type definitions
|
||||
|
||||
`openapi-cli` in its core has a type tree which defines the structure of the OpenAPI definition. `openapi-cli` then uses it to do type-aware traversal of OpenAPI Document.
|
||||
|
||||
Type tree is built from top level `Types` which can link to child types:
|
||||
It looks like below:
|
||||
|
||||
```js
|
||||
{
|
||||
// ...
|
||||
OpenAPIParameter: {
|
||||
isIdempotent: false, // we will remove this later, this is temporary hack. ...
|
||||
properties: {
|
||||
name: null, // null means it's a leaf
|
||||
in: null,
|
||||
description: null,
|
||||
required: null,
|
||||
deprecated: null,
|
||||
allowEmptyValue: null,
|
||||
style: null,
|
||||
explode: null,
|
||||
allowReserved: null,
|
||||
example: null,
|
||||
schema: 'OpenAPISchemaObject', // name of the type linked this field
|
||||
content: 'OpenAPIMediaTypeObject',
|
||||
examples: 'OpenAPIExampleMap',
|
||||
},
|
||||
resolveType: (node, definition, ctx) => 'SomeOtherParameterType', // optional function used to dynamically resolve the type of the node based on node/definition/ctx.
|
||||
},
|
||||
// ....
|
||||
}
|
||||
```
|
||||
|
||||
This tree can be modified.
|
||||
|
||||
#### Rules (visitors)
|
||||
|
||||
All the checks are built as visitors. Each rule is a class that registers one or more visitors for some types from type tree:
|
||||
|
||||
```js
|
||||
class OperationDescription {
|
||||
static get rule() {
|
||||
return 'operation-description'; // you can configure the rule by this name in config file
|
||||
}
|
||||
|
||||
|
||||
OpenAPIOperation() {
|
||||
return {
|
||||
onEnter: (node, typeDef, ctx) => {
|
||||
if (!node.description) {
|
||||
return [ctx.createError(`The field 'description' must be present on this level`), 'key')];
|
||||
}
|
||||
return [];
|
||||
},
|
||||
onExit: (node, typeDef, ctx) => {
|
||||
// ...
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Extending type definitions
|
||||
|
||||
Type tree can be extended using js file which is passed as `typeExtension` parameter in the config. It should follow such pattern (similar to reducers):
|
||||
|
||||
```js
|
||||
module.exports = (types) => ({
|
||||
...types,
|
||||
OpenAPIParameter: {
|
||||
...types.OpenAPIParameter,
|
||||
// enable dynamic type resolution for OpenAPIParameter and return either OpenAPIParameterWithAllOf or regular OpenAPIParameter
|
||||
resolveType: node => (node.allOf ? 'OpenAPIParameterWithAllOf' : 'OpenAPIParameter'),
|
||||
},
|
||||
// define OpenAPIParameterWithAllOf
|
||||
OpenAPIParameterWithAllOf: {
|
||||
properties: {
|
||||
allOf: 'OpenAPIParameterPartial',
|
||||
},
|
||||
},
|
||||
// define OpenAPIParameterPartial
|
||||
OpenAPIParameterPartial: {
|
||||
...types.OpenAPIParameter,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Adding custom rules
|
||||
|
||||
To add custom rules user should create another `js` file and set it as `customRules` param in the config. The `js` file should export array of custom rules:
|
||||
|
||||
```js
|
||||
class ValidateOpenAPIParameterPartial {
|
||||
static get rule() {
|
||||
return 'parameterPartial';
|
||||
}
|
||||
|
||||
// register visitor on a new type OpenAPIParameterPartial
|
||||
OpenAPIParameterPartial() {
|
||||
return {
|
||||
onEnter: (node, type, ctx) => {
|
||||
const result = [];
|
||||
let validators = {};
|
||||
if (Object.keys(node).length === 1 && node.description) {
|
||||
// reuse existing code for fields for structural rules code by name
|
||||
validators = {
|
||||
description: ctx.getRule('oas3-schema/parameter').validators.description,
|
||||
};
|
||||
} else {
|
||||
// reuse existing code for fields for structural rules code by name
|
||||
validators = {
|
||||
...ctx.getRule('oas3-schema/parameter').validators,
|
||||
};
|
||||
}
|
||||
|
||||
const fieldMessages = ctx.validateFieldsHelper(validators);
|
||||
result.push(...fieldMessages);
|
||||
|
||||
// example of some custom validations (just as an example)
|
||||
if (node.in && node.in !== 'header') {
|
||||
ctx.path.push('in');
|
||||
result.push(ctx.createError('Only header parameters can be extended with allOf', 'key'));
|
||||
ctx.path.pop();
|
||||
}
|
||||
return result;
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Example of another rule
|
||||
class ParameterWithAllOfRule {
|
||||
static get rule() {
|
||||
return 'parameterWithAllOf';
|
||||
}
|
||||
|
||||
constructor(config) { // config can be passed via rules.parameterWithAllOf in config file
|
||||
this.maxItems = config.maxItems || 2;
|
||||
}
|
||||
|
||||
OpenAPIParameterWithAllOf() {
|
||||
return {
|
||||
onEnter: (node, definition, ctx) => {
|
||||
const result = [];
|
||||
if (node.allOf.length > this.maxItems) {
|
||||
ctx.path.push('allOf');
|
||||
result.push(ctx.createError(`Do not use more that ${this.maxItems} items in allOf for OpenAPI Parameter`, 'key'));
|
||||
ctx.path.pop();
|
||||
}
|
||||
return result;
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
module.exports = [
|
||||
ValidateOpenAPIParameterPartial, ParameterWithAllOfRule,
|
||||
];
|
||||
|
||||
```
|
||||
|
||||
User can provide configuration file `.redocly.yaml` in current working dir.
|
||||
|
||||
```yaml
|
||||
lint:
|
||||
typeExtension: typeExtension.js # relative path to the extended types
|
||||
customRules: customRules.js # relative path to the custom visitors
|
||||
rules:
|
||||
parameterPartial: warning # allowed values are 'error', 'warning' and 'off'
|
||||
parameterWithAllOf: off # by default custom rules are 'on' and set to 'error' severity
|
||||
```
|
||||
@@ -10,37 +10,4 @@ You can do it with `transformers` visitors.
|
||||
|
||||
## Usage
|
||||
|
||||
To create a visitor which will run before the default ones you should define a parameter `transformers` in your `.redocly.yaml` config file and pass it a path to the JavaScript file similar to one you use for usual [custom visitors](CUSTOM_VISITORS.md).
|
||||
|
||||
To change the data in the transformer visitor you can modify the `node` object passed in as a parameter. For example, following example checks if there's a vendor extension `x-redocly-overlay` field inside node, it reads a file determined by the field's content and updates node with files contents.
|
||||
|
||||
```js
|
||||
class OverlaysMerger {
|
||||
static get rule() {
|
||||
return 'writeCheck';
|
||||
}
|
||||
|
||||
any() {
|
||||
return {
|
||||
onEnter: (node, _type, _ctx) => {
|
||||
if (node['x-redocly-overlay']) {
|
||||
const definitionDir = path.dirname(ctx.filePath);
|
||||
const overlayPath = path.resolve(definitionDir, node['x-redocly-overlay'].path);
|
||||
|
||||
if (fs.existsSync(overlayPath)) {
|
||||
const patch = JSON.parse(fs.readFileSync(overlayPath));
|
||||
|
||||
Object.keys(patch).forEach((k) => {
|
||||
node[k] = patch[k];
|
||||
});
|
||||
|
||||
delete node['x-redocly-overlay'];
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The only difference between `customRules` and `transformers` is the execution order. `customRules` are run after the default visitors and `transformers` are run before them.
|
||||
TBD
|
||||
47
docs/custom-visitors.md
Normal file
47
docs/custom-visitors.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Custom visitors
|
||||
|
||||
### Concepts
|
||||
|
||||
#### Type definitions
|
||||
|
||||
`openapi-cli` in its core has a type tree which defines the structure of the OpenAPI definition. `openapi-cli` then uses it to do type-aware traversal of OpenAPI Document.
|
||||
|
||||
Type tree is built from top level `Types` which can link to child types.
|
||||
|
||||
TBD
|
||||
|
||||
This tree can be modified.
|
||||
|
||||
#### Rules (visitors)
|
||||
|
||||
TBD
|
||||
|
||||
### Extending type definitions
|
||||
|
||||
Type tree can be extended using js file which is passed as `typeExtension` parameter in the config. It should follow such pattern (similar to reducers):
|
||||
|
||||
```js
|
||||
module.exports = (types) => ({
|
||||
...types,
|
||||
OpenAPIParameter: {
|
||||
...types.OpenAPIParameter,
|
||||
// enable dynamic type resolution for OpenAPIParameter and return either OpenAPIParameterWithAllOf or regular OpenAPIParameter
|
||||
resolveType: node => (node.allOf ? 'OpenAPIParameterWithAllOf' : 'OpenAPIParameter'),
|
||||
},
|
||||
// define OpenAPIParameterWithAllOf
|
||||
OpenAPIParameterWithAllOf: {
|
||||
properties: {
|
||||
allOf: 'OpenAPIParameterPartial',
|
||||
},
|
||||
},
|
||||
// define OpenAPIParameterPartial
|
||||
OpenAPIParameterPartial: {
|
||||
...types.OpenAPIParameter,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Adding custom rules
|
||||
|
||||
|
||||
TBD
|
||||
9424
package-lock.json
generated
9424
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
84
package.json
84
package.json
@@ -1,35 +1,24 @@
|
||||
{
|
||||
"name": "@redocly/openapi-cli",
|
||||
"version": "0.12.16",
|
||||
"version": "1.0.0-alpha.1",
|
||||
"description": "",
|
||||
"main": "./dist/index.js",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"build": "npm run babel && npm run copyFiles",
|
||||
"babel": "babel src --out-dir dist --source-maps inline",
|
||||
"copyFiles": "chmod +x ./dist/index.js && cp ./package.json ./dist/package.json && cp src/.redocly.yaml dist/.redocly.yaml && cp src/preview-docs/default.hbs dist/preview-docs",
|
||||
"lint": "eslint ./src",
|
||||
"prepublishOnly": "npm run build && cp src/.redocly.yaml dist/.redocly.yaml",
|
||||
"test": "jest --coverage"
|
||||
"test": "npm run typecheck && npm run unit",
|
||||
"unit": "jest --coverage --coverageReporters lcov text-summary",
|
||||
"cli": "ts-node src/cli.ts",
|
||||
"build": "rm -rf lib && tsc -p tsconfig.build.json",
|
||||
"benchmark": "node --expose-gc --noconcurrent_sweeping --predictable ./benchmark/benchmark.js",
|
||||
"prettier": "npx prettier --write \"**/*.{ts,js}\"",
|
||||
"typecheck": "tsc --noEmit --skipLibCheck"
|
||||
},
|
||||
"bin": {
|
||||
"openapi": "dist/index.js"
|
||||
"openapi": "lib/cli.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Redocly/openapi-cli.git"
|
||||
},
|
||||
"jest": {
|
||||
"testPathIgnorePatterns": [
|
||||
"/node_modules/",
|
||||
"/dist/"
|
||||
],
|
||||
"coveragePathIgnorePatterns": [
|
||||
"/node_modules/",
|
||||
"/dist/"
|
||||
]
|
||||
},
|
||||
"author": "Sergey Dubovyk <serhii@redoc.ly> (https://redoc.ly/)",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"linter",
|
||||
"OpenAPI",
|
||||
@@ -38,33 +27,38 @@
|
||||
"Swagger linter",
|
||||
"oas"
|
||||
],
|
||||
"contributors": [
|
||||
"Sergey Dubovyk <serhii@redoc.ly> (https://redoc.ly/)",
|
||||
"Roman Hotsiy <roman@redoc.ly> (https://redoc.ly/)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.8.4",
|
||||
"@babel/core": "^7.9.0",
|
||||
"@babel/node": "^7.8.7",
|
||||
"@babel/preset-env": "^7.9.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-airbnb-base": "^14.1.0",
|
||||
"eslint-plugin-babel": "^5.3.0",
|
||||
"eslint-plugin-import": "^2.20.1",
|
||||
"jest": "^25.1.0",
|
||||
"prettier": "^2.0.1",
|
||||
"webpack": "^4.42.0",
|
||||
"webpack-cli": "^3.3.11"
|
||||
"@types/jest": "^25.2.3",
|
||||
"@types/js-yaml": "^3.12.4",
|
||||
"@types/yargs": "^15.0.5",
|
||||
"jest": "^26.0.1",
|
||||
"outdent": "^0.7.1",
|
||||
"prettier": "^2.0.5",
|
||||
"ts-jest": "^26.0.0",
|
||||
"ts-node": "^8.10.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^3.0.0",
|
||||
"chokidar": "^3.3.1",
|
||||
"commander": "^5.0.0",
|
||||
"handlebars": "^4.7.3",
|
||||
"@types/node": "^14.0.4",
|
||||
"colorette": "^1.2.0",
|
||||
"js-yaml": "^3.13.1",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"merge-deep": "^3.0.2",
|
||||
"minimatch": "^3.0.4",
|
||||
"node-fetch": "^2.6.0",
|
||||
"portfinder": "^1.0.25",
|
||||
"simple-websocket": "^8.1.1",
|
||||
"yaml-ast-parser": "0.0.43"
|
||||
"typescript": "^3.9.2",
|
||||
"yaml-ast-parser": "0.0.43",
|
||||
"yargs": "^15.3.1"
|
||||
},
|
||||
"prettier": {
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 100
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"testMatch": [
|
||||
"**/__tests__/**/*.test.ts"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
10
resources/.redocly.yaml
Normal file
10
resources/.redocly.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
lint:
|
||||
plugins:
|
||||
- './local-plugin.js'
|
||||
|
||||
extends:
|
||||
- recommended
|
||||
- local/all
|
||||
rules:
|
||||
operation-2xx-response: warning
|
||||
operation-description: off
|
||||
44
resources/local-plugin.js
Normal file
44
resources/local-plugin.js
Normal file
@@ -0,0 +1,44 @@
|
||||
export const id = 'local';
|
||||
|
||||
export const rules = {
|
||||
oas3: {
|
||||
'paths-kebab-case': () => {
|
||||
return {
|
||||
PathItem(_path, { report, key }) {
|
||||
const segments = key.substr(1).split('/');
|
||||
if (
|
||||
!segments.every((segment) => /^{.+}$/.test(segment) && /[a-z0-9-_.]+/.test(segment))
|
||||
) {
|
||||
report({
|
||||
message: `${key} is not kebab-case`,
|
||||
location: { reportOnKey: true },
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
'boolean-parameter-prefixes': () => {
|
||||
return {
|
||||
Parameter: {
|
||||
Schema(schema, { report, parentLocations, location }, parents) {
|
||||
if (schema.type === 'boolean' && !/^(is|has)[A-Z]/.test(parents.Parameter.name)) {
|
||||
report({
|
||||
message: `Boolean parameter ${parents.Parameter.name} should have a \`is\` or \`has\` prefix`,
|
||||
location: parentLocations.Parameter.append(['name']),
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const configs = {
|
||||
all: {
|
||||
rules: {
|
||||
'local/paths-kebab-case': 'error',
|
||||
'local/boolean-parameter-prefixes': 'error',
|
||||
},
|
||||
},
|
||||
};
|
||||
137
resources/petstore-with-errors.yaml
Normal file
137
resources/petstore-with-errors.yaml
Normal file
@@ -0,0 +1,137 @@
|
||||
|
||||
openapi: "3.0.0"
|
||||
info:
|
||||
version: 1.0.0
|
||||
title: Swagger Petstore
|
||||
license:
|
||||
name: MIT
|
||||
servers:
|
||||
- url: http://petstore.swagger.io/v1
|
||||
tags:
|
||||
- name: aaa
|
||||
- name: pets
|
||||
paths:
|
||||
/pets?id:
|
||||
get:
|
||||
summary: List all pets
|
||||
operationId: listPets
|
||||
requestBody:
|
||||
content:
|
||||
'application/json':
|
||||
examples:
|
||||
foo:
|
||||
summary: A foo example
|
||||
value: {"foo": "bar"}
|
||||
externalValue: 'http://example.org/foo.json'
|
||||
tags:
|
||||
- pets
|
||||
parameters:
|
||||
- name: limit
|
||||
in: query
|
||||
description: How many items to return at one time (max 100)
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
responses:
|
||||
'200':
|
||||
description: A paged array of pets
|
||||
headers:
|
||||
x-next:
|
||||
description: A link to the next page of responses
|
||||
schema:
|
||||
type: string
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Pets"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
post:
|
||||
summary: Create a pet
|
||||
description: afasf
|
||||
operationId: createPets
|
||||
tags:
|
||||
- pets
|
||||
responses:
|
||||
'201':
|
||||
description: Null response
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/pets/{petId}/TesSt:
|
||||
get:
|
||||
summary: Info for a specific pet
|
||||
description: aaa
|
||||
operationId: showPetById
|
||||
tags:
|
||||
- pets
|
||||
parameters:
|
||||
- name: petId
|
||||
in: path
|
||||
required: true
|
||||
description: The id of the pet to retrieve
|
||||
schema:
|
||||
type: boolean
|
||||
responses:
|
||||
'200':
|
||||
description: Expected response to a valid request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Pet"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
components:
|
||||
parameters:
|
||||
anotherParam:
|
||||
nname: anotherParam
|
||||
schema:
|
||||
type: intege
|
||||
in: query
|
||||
schemas:
|
||||
Some:
|
||||
type: integer
|
||||
enum:
|
||||
- 1
|
||||
- 'adsad'
|
||||
- 2
|
||||
Pet:
|
||||
type: object
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: int64
|
||||
name:
|
||||
type: string
|
||||
tag:
|
||||
type: string
|
||||
Pets:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Pet"
|
||||
Error:
|
||||
type: object
|
||||
required:
|
||||
- code
|
||||
- message
|
||||
properties:
|
||||
code:
|
||||
type: integer
|
||||
format: int32
|
||||
message:
|
||||
type: string
|
||||
@@ -1,36 +0,0 @@
|
||||
lint:
|
||||
codeframes: on
|
||||
rules:
|
||||
bundler:
|
||||
nameConflicts: warning
|
||||
debug-info: off
|
||||
|
||||
no-extra-fields: warning
|
||||
no-$ref-siblings: warning
|
||||
|
||||
suggest-possible-refs: on
|
||||
oas3-schema: on
|
||||
operations-tags-alpabetical: warning
|
||||
path-param-exists: on
|
||||
operation-2xx-response: warning
|
||||
unique-parameter-names: on
|
||||
no-unused-schemas: warning
|
||||
operation-operationId-unique: on
|
||||
path-declarations-must-exist: on
|
||||
string-matcher: off
|
||||
enum-match-type: warning
|
||||
|
||||
operation-tags-defined: warning
|
||||
provide-contact: warning
|
||||
|
||||
camel-case-names: off
|
||||
parameter-description: off
|
||||
server-not-example: off
|
||||
api-servers: off
|
||||
license-url: off
|
||||
license-required: off
|
||||
model-description: off
|
||||
operation-description: off
|
||||
operation-operationId: off
|
||||
operation-tags: off
|
||||
servers-no-trailing-slash: off
|
||||
55
src/__tests__/__snapshots__/bundle.test.ts.snap
Normal file
55
src/__tests__/__snapshots__/bundle.test.ts.snap
Normal file
@@ -0,0 +1,55 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`bundle should bundle external refs 1`] = `
|
||||
openapi: 3.0.0
|
||||
paths:
|
||||
/pet:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/path-param'
|
||||
put:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: '#/components/parameters/param-b'
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: '#/components/parameters/param-c'
|
||||
components:
|
||||
parameters:
|
||||
param-a:
|
||||
name: param_a
|
||||
path-param:
|
||||
name: path_param
|
||||
param-b:
|
||||
name: param_b
|
||||
param-c:
|
||||
name: param_c
|
||||
|
||||
`;
|
||||
|
||||
exports[`bundle should bundle external refs and warn for conflicting names 1`] = `
|
||||
openapi: 3.0.0
|
||||
paths:
|
||||
/pet:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/path-param'
|
||||
put:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-b'
|
||||
- $ref: '#/components/parameters/param-b-2'
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: '#/components/parameters/param-c'
|
||||
components:
|
||||
parameters:
|
||||
param-b:
|
||||
name: param_b
|
||||
path-param:
|
||||
name: path_param
|
||||
param-b-2:
|
||||
name: param_b
|
||||
param-c:
|
||||
name: param_c
|
||||
|
||||
`;
|
||||
65
src/__tests__/bundle.test.ts
Normal file
65
src/__tests__/bundle.test.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import outdent from 'outdent';
|
||||
import * as path from 'path';
|
||||
|
||||
import { bundleDocument, bundle } from '../bundle';
|
||||
|
||||
import { parseYamlToDocument, yamlSerializer } from './utils';
|
||||
|
||||
describe('bundle', () => {
|
||||
expect.addSnapshotSerializer(yamlSerializer);
|
||||
|
||||
it('change nothing with only internal refs', async () => {
|
||||
const document = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
contact: {}
|
||||
license: {}
|
||||
paths:
|
||||
/pet:
|
||||
get:
|
||||
operationId: get
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/shared_a'
|
||||
- name: get_b
|
||||
post:
|
||||
operationId: post
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/shared_a'
|
||||
components:
|
||||
parameters:
|
||||
shared_a:
|
||||
name: shared-a
|
||||
`,
|
||||
'',
|
||||
);
|
||||
|
||||
const { bundle, messages } = await bundleDocument({
|
||||
document,
|
||||
});
|
||||
|
||||
const origCopy = JSON.parse(JSON.stringify(document.parsed));
|
||||
|
||||
expect(messages).toHaveLength(0);
|
||||
expect(bundle).toEqual(origCopy);
|
||||
});
|
||||
|
||||
it('should bundle external refs', async () => {
|
||||
const { bundle: res, messages } = await bundle({
|
||||
ref: path.join(__dirname, 'fixtures/refs/openapi-with-external-refs.yaml'),
|
||||
});
|
||||
expect(messages).toHaveLength(0);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should bundle external refs and warn for conflicting names', async () => {
|
||||
const { bundle: res, messages } = await bundle({
|
||||
ref: path.join(__dirname, 'fixtures/refs/openapi-with-external-refs-conflicting-names.yaml'),
|
||||
});
|
||||
expect(messages).toHaveLength(1);
|
||||
expect(messages[0].message).toEqual(
|
||||
`Two schemas are referenced with the same name but different content. Renamed param-b to param-b-2`,
|
||||
);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
494
src/__tests__/codeframes.test.ts
Normal file
494
src/__tests__/codeframes.test.ts
Normal file
@@ -0,0 +1,494 @@
|
||||
import { outdent } from 'outdent';
|
||||
|
||||
import { getLineColLocation, getCodeframe } from '../format/codeframes';
|
||||
import { LocationObject } from '../walk';
|
||||
import { Source } from '../resolve';
|
||||
|
||||
describe('Location', () => {
|
||||
it('should correctly calculate location for key', () => {
|
||||
const loc = {
|
||||
reportOnKey: true,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
const preciseLocation = getLineColLocation(loc);
|
||||
expect(preciseLocation.start).toEqual({ line: 3, col: 3 });
|
||||
expect(preciseLocation.end).toEqual({ line: 3, col: 10 });
|
||||
});
|
||||
|
||||
it('should correctly calculate location for value', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/license/name',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
const preciseLocation = getLineColLocation(loc);
|
||||
expect(preciseLocation.start).toEqual({ line: 4, col: 11 });
|
||||
expect(preciseLocation.end).toEqual({ line: 4, col: 14 });
|
||||
});
|
||||
|
||||
it('should correctly fallback to the closest parent node if pointer is incorrect', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/missing',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
const preciseLocation = getLineColLocation(loc);
|
||||
expect(preciseLocation.start).toEqual({ line: 3, col: 3 });
|
||||
expect(preciseLocation.end).toEqual({ line: 5, col: 28 });
|
||||
});
|
||||
|
||||
it('should return first line for empty doc', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/missing',
|
||||
source: new Source('foobar.yaml', ''),
|
||||
};
|
||||
|
||||
const preciseLocation = getLineColLocation(loc);
|
||||
expect(preciseLocation.start).toEqual({ line: 1, col: 1 });
|
||||
expect(preciseLocation.end).toEqual({ line: 1, col: 1 });
|
||||
});
|
||||
|
||||
it('should return full range for file with newlines', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/missing',
|
||||
source: new Source('foobar.yaml', '\n\n\n'),
|
||||
};
|
||||
|
||||
const preciseLocation = getLineColLocation(loc);
|
||||
expect(preciseLocation.start).toEqual({ line: 1, col: 1 });
|
||||
expect(preciseLocation.end).toEqual({ line: 1, col: 1 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('codeframes', () => {
|
||||
function getColorCodeframe(loc: LocationObject) {
|
||||
return getCodeframe(getLineColLocation(loc), true)
|
||||
.replace(/\x1b\[90m(.*?)\x1b\[39m/g, '<g>$1</g>')
|
||||
.replace(/\x1b\[31m(.*?)\x1b\[39m/g, '<r>$1</r>');
|
||||
}
|
||||
|
||||
function getPlainCodeframe(loc: LocationObject) {
|
||||
return getCodeframe(getLineColLocation(loc), false);
|
||||
}
|
||||
|
||||
expect.addSnapshotSerializer({
|
||||
test: (val) => typeof val === 'string',
|
||||
print: (v) => v as string,
|
||||
});
|
||||
|
||||
it('should correctly generate simple codeframe', () => {
|
||||
const loc = {
|
||||
reportOnKey: true,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
2 | info:
|
||||
3 | license:
|
||||
| ^^^^^^^
|
||||
4 | name: MIT
|
||||
5 | url: https://google.com
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <g>openapi: 3.0.2</g>
|
||||
<g>2 |</g> <g>info:</g>
|
||||
<g>3 |</g> <r>license</r>:
|
||||
<g>4 |</g> <g>name: MIT</g>
|
||||
<g>5 |</g> <g>url: https://google.com</g>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for single-line file on key', () => {
|
||||
const loc = {
|
||||
reportOnKey: true,
|
||||
pointer: '#/openapi',
|
||||
source: new Source('foobar.yaml', `openapi: 3.0.2`),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
| ^^^^^^^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`<g>1 |</g> <r>openapi</r>: 3.0.2`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for single-line file on value', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/openapi',
|
||||
source: new Source('foobar.yaml', `openapi: 3.0.2`),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
| ^^^^^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`<g>1 |</g> openapi: <r>3.0.2</r>`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for multiline-value', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
2 | info:
|
||||
3 | license:
|
||||
4 | name: MIT
|
||||
| ^^^^^^^^^
|
||||
5 | url: https://google.com
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>2 |</g> <g>info:</g>
|
||||
<g>3 |</g> <g>license:</g>
|
||||
<g>4 |</g> <r>name: MIT</r>
|
||||
<g>5 |</g> <r>url: https://google.com</r>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for multiline-value json-like', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license: {
|
||||
name: MIT,
|
||||
url: https://google.com
|
||||
}
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
2 | info:
|
||||
3 | license: {
|
||||
| ^
|
||||
4 | name: MIT,
|
||||
| ^^^^^^^^^^
|
||||
5 | url: https://google.com
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
6 | }
|
||||
| ^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <g>openapi: 3.0.2</g>
|
||||
<g>2 |</g> <g>info:</g>
|
||||
<g>3 |</g> license: <r>{</r>
|
||||
<g>4 |</g> <r>name: MIT,</r>
|
||||
<g>5 |</g> <r>url: https://google.com</r>
|
||||
<g>6 |</g> <r>}</r>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for multiline-value and show only 5 lines', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
field2: MIT
|
||||
field3: MIT
|
||||
field4: MIT
|
||||
field5: MIT
|
||||
url: https://google.com
|
||||
openapi: 3.0.2
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | info:
|
||||
2 | license:
|
||||
3 | name: MIT
|
||||
| ^^^^^^^^^
|
||||
4 | field2: MIT
|
||||
| ^^^^^^^^^^^
|
||||
… | < 3 more lines >
|
||||
8 | url: https://google.com
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
9 | openapi: 3.0.2
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <g>info:</g>
|
||||
<g>2 |</g> <g>license:</g>
|
||||
<g>3 |</g> <r>name: MIT</r>
|
||||
<g>4 |</g> <r>field2: MIT</r>
|
||||
<g>… |</g> <g>< 3 more lines ></g>
|
||||
<g>8 |</g> <r>url: https://google.com</r>
|
||||
<g>9 |</g> <g>openapi: 3.0.2</g>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for multiline-value and show only 5 lines with emtpy lines', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/license',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
info:
|
||||
|
||||
license:
|
||||
name: MIT
|
||||
|
||||
field2: MIT
|
||||
field3: MIT
|
||||
field4: MIT
|
||||
field5: MIT
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
2 |
|
||||
3 | license:
|
||||
4 | name: MIT
|
||||
| ^^^^^^^^^
|
||||
5 |
|
||||
| ^
|
||||
… | < 3 more lines >
|
||||
9 | field5: MIT
|
||||
| ^^^^^^^^^^^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>2 |</g>
|
||||
<g>3 |</g> <g>license:</g>
|
||||
<g>4 |</g> <r>name: MIT</r>
|
||||
<g>5 |</g>
|
||||
<g>… |</g> <g>< 3 more lines ></g>
|
||||
<g>9 |</g> <r>field5: MIT</r>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame dedent', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/paths/~1pet/put/parameters/0/in',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
paths:
|
||||
'/pet':
|
||||
put:
|
||||
operationId: test
|
||||
parameters:
|
||||
- name: a
|
||||
in: wrong
|
||||
post:
|
||||
operationId: test2
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
10 | parameters:
|
||||
11 | - name: a
|
||||
12 | in: wrong
|
||||
| ^^^^^
|
||||
13 | post:
|
||||
14 | operationId: test2
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>10 |</g> <g>parameters:</g>
|
||||
<g>11 |</g> <g>- name: a</g>
|
||||
<g>12 |</g> in: <r>wrong</r>
|
||||
<g>13 |</g> <g>post:</g>
|
||||
<g>14 |</g> <g>operationId: test2</g>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for too long line', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/description',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
info:
|
||||
description: Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
2 | info:
|
||||
3 | description: Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...<77 chars>
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^...<77 chars>
|
||||
4 | license:
|
||||
5 | name: MIT
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <g>openapi: 3.0.2</g>
|
||||
<g>2 |</g> <g>info:</g>
|
||||
<g>3 |</g> description: [31mLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolo<g>...<87 chars></g>
|
||||
<g>4 |</g> <g>license:</g>
|
||||
<g>5 |</g> <g>name: MIT</g>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for emtpy file', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/info/description',
|
||||
source: new Source('foobar.yaml', `\n\n`),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 |
|
||||
| ^
|
||||
2 |
|
||||
3 |
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g>
|
||||
<g>2 |</g>
|
||||
<g>3 |</g>
|
||||
`);
|
||||
});
|
||||
|
||||
it('correctly generate code-frame for whole file and corectly skip newlines', () => {
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: '#/',
|
||||
source: new Source(
|
||||
'foobar.yaml',
|
||||
outdent`
|
||||
openapi: 3.0.2
|
||||
|
||||
info:
|
||||
|
||||
description: Lorem ipsum
|
||||
license:
|
||||
name: MIT
|
||||
url: https://google.com
|
||||
`,
|
||||
),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
| ^^^^^^^^^^^^^^
|
||||
2 |
|
||||
| ^
|
||||
… | < 5 more lines >
|
||||
8 | url: https://google.com
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <r>openapi: 3.0.2</r>
|
||||
<g>2 |</g>
|
||||
<g>… |</g> <g>< 5 more lines ></g>
|
||||
<g>8 |</g> <r>url: https://google.com</r>
|
||||
`);
|
||||
});
|
||||
|
||||
it('should show line even if there are not so many file in the file', () => {
|
||||
// (yaml parser sometimes shows error on the next after last line
|
||||
const loc = {
|
||||
reportOnKey: false,
|
||||
pointer: undefined,
|
||||
start: {
|
||||
line: 2,
|
||||
col: 1,
|
||||
},
|
||||
source: new Source('foobar.yaml', outdent`openapi: 3.0.2`),
|
||||
};
|
||||
|
||||
expect(getPlainCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
1 | openapi: 3.0.2
|
||||
2 |
|
||||
| ^
|
||||
`);
|
||||
|
||||
expect(getColorCodeframe(loc)).toMatchInlineSnapshot(`
|
||||
<g>1 |</g> <g>openapi: 3.0.2</g>
|
||||
<g>2 |</g>
|
||||
`);
|
||||
});
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
transitiveLocal:
|
||||
$ref: '#/components'
|
||||
|
||||
transitiveLocalBad:
|
||||
$ref: '#/oops'
|
||||
|
||||
recursive:
|
||||
b:
|
||||
$ref: '#/recursive/b'
|
||||
|
||||
components:
|
||||
a:
|
||||
type: string
|
||||
@@ -1,34 +0,0 @@
|
||||
local:
|
||||
test1:
|
||||
$ref: '#/transitiveLocal/a'
|
||||
test2:
|
||||
$ref: '#/transitiveLocal/b'
|
||||
test3:
|
||||
$ref: '#/transitiveLocalBad/b'
|
||||
test4:
|
||||
$ref: '#/recursive/b'
|
||||
external:
|
||||
test1:
|
||||
$ref: 'external.yaml#/transitiveLocal/a'
|
||||
test2:
|
||||
$ref: 'external.yaml#/transitiveLocal/b'
|
||||
test3:
|
||||
$ref: 'external.yaml#/transitiveLocalBad/b'
|
||||
test4:
|
||||
$ref: 'external.yaml#/recursive/b'
|
||||
|
||||
|
||||
|
||||
transitiveLocal:
|
||||
$ref: '#/components'
|
||||
|
||||
transitiveLocalBad:
|
||||
$ref: '#/oops'
|
||||
|
||||
recursive:
|
||||
b:
|
||||
$ref: '#/recursive/b'
|
||||
|
||||
components:
|
||||
a:
|
||||
type: string
|
||||
@@ -0,0 +1,19 @@
|
||||
openapi: 3.0.0
|
||||
paths:
|
||||
/pet:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/path-param'
|
||||
put:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-b'
|
||||
- $ref: ./param-b.yaml
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: ./param-c.yaml
|
||||
components:
|
||||
parameters:
|
||||
param-b:
|
||||
name: param_b
|
||||
path-param:
|
||||
name: path_param
|
||||
19
src/__tests__/fixtures/refs/openapi-with-external-refs.yaml
Normal file
19
src/__tests__/fixtures/refs/openapi-with-external-refs.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
openapi: 3.0.0
|
||||
paths:
|
||||
/pet:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/path-param'
|
||||
put:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: ./param-b.yaml
|
||||
get:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/param-a'
|
||||
- $ref: ./param-c.yaml
|
||||
components:
|
||||
parameters:
|
||||
param-a:
|
||||
name: param_a
|
||||
path-param:
|
||||
name: path_param
|
||||
1
src/__tests__/fixtures/refs/param-b.yaml
Normal file
1
src/__tests__/fixtures/refs/param-b.yaml
Normal file
@@ -0,0 +1 @@
|
||||
name: param_b
|
||||
1
src/__tests__/fixtures/refs/param-c.yaml
Normal file
1
src/__tests__/fixtures/refs/param-c.yaml
Normal file
@@ -0,0 +1 @@
|
||||
name: param_c
|
||||
8
src/__tests__/fixtures/resolve/External.yaml
Normal file
8
src/__tests__/fixtures/resolve/External.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
type: object
|
||||
properties:
|
||||
string:
|
||||
type: string
|
||||
number:
|
||||
type: number
|
||||
external:
|
||||
$ref: ./External2.yaml
|
||||
4
src/__tests__/fixtures/resolve/External2.yaml
Normal file
4
src/__tests__/fixtures/resolve/External2.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
type: object
|
||||
properties:
|
||||
circularParent:
|
||||
$ref: ./External.yaml#/properties
|
||||
4
src/__tests__/fixtures/resolve/externalInfo.yaml
Normal file
4
src/__tests__/fixtures/resolve/externalInfo.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
info:
|
||||
contact: {}
|
||||
license:
|
||||
$ref: ./externalLicense.yaml
|
||||
1
src/__tests__/fixtures/resolve/externalLicense.yaml
Normal file
1
src/__tests__/fixtures/resolve/externalLicense.yaml
Normal file
@@ -0,0 +1 @@
|
||||
name: MIT
|
||||
28
src/__tests__/fixtures/resolve/openapi.yaml
Normal file
28
src/__tests__/fixtures/resolve/openapi.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
openapi: 3.0.0
|
||||
paths:
|
||||
/pet:
|
||||
put:
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
local:
|
||||
$ref: '#/components/schemas/Local'
|
||||
localTransitive:
|
||||
$ref: '#/components/schemas/Local/properties/string'
|
||||
externalWithPointer:
|
||||
$ref: ./External.yaml#/properties/string
|
||||
external:
|
||||
$ref: ./External.yaml
|
||||
components:
|
||||
schemas:
|
||||
Local:
|
||||
properties:
|
||||
number:
|
||||
type: number
|
||||
string:
|
||||
type: string
|
||||
localCircular:
|
||||
$ref: '#/components/schemas/Local'
|
||||
150
src/__tests__/normalizeVisitors.test.ts
Normal file
150
src/__tests__/normalizeVisitors.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { normalizeVisitors, VisitorLevelContext } from '../visitors';
|
||||
import { OAS3RuleSet } from '../validate';
|
||||
import { OAS3Types } from '../types';
|
||||
|
||||
describe('Normalize visitors', () => {
|
||||
it('should work correctly for single rule', () => {
|
||||
const schemaEnter = () => undefined;
|
||||
|
||||
const ruleset: OAS3RuleSet[] = [
|
||||
{
|
||||
test: () => {
|
||||
return {
|
||||
Schema: schemaEnter,
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const visitors = ruleset.flatMap((ruleset) =>
|
||||
Object.keys(ruleset).map((ruleId) => ({
|
||||
ruleId,
|
||||
severity: 'error' as 'error',
|
||||
visitor: ruleset[ruleId](),
|
||||
})),
|
||||
);
|
||||
|
||||
const normalized = normalizeVisitors(visitors, OAS3Types);
|
||||
expect(normalized).toBeDefined();
|
||||
expect(normalized.Schema.enter).toHaveLength(1);
|
||||
expect(normalized.Schema.enter[0].visit).toEqual(schemaEnter);
|
||||
expect(normalized.Schema.enter[0].context.parent).toEqual(null);
|
||||
const { type, ...contextWithoutType } = normalized.Schema.enter[0]
|
||||
.context as VisitorLevelContext;
|
||||
|
||||
expect(contextWithoutType).toEqual({
|
||||
activatedOn: null,
|
||||
parent: null,
|
||||
isSkippedLevel: false,
|
||||
});
|
||||
|
||||
expect(type.name).toEqual('Schema');
|
||||
});
|
||||
|
||||
it('should work for nested rule', () => {
|
||||
const infoEnter = () => undefined;
|
||||
const infoLeave = () => undefined;
|
||||
const contactEnter = () => undefined;
|
||||
|
||||
const ruleset: OAS3RuleSet[] = [
|
||||
{
|
||||
test: () => {
|
||||
return {
|
||||
Info: {
|
||||
enter: infoEnter,
|
||||
leave: infoLeave,
|
||||
Contact: contactEnter,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const visitors = ruleset.flatMap((ruleset) =>
|
||||
Object.keys(ruleset).map((ruleId) => ({
|
||||
ruleId,
|
||||
severity: 'error' as 'error',
|
||||
visitor: ruleset[ruleId](),
|
||||
})),
|
||||
);
|
||||
|
||||
const normalized = normalizeVisitors(visitors, OAS3Types);
|
||||
expect(normalized).toBeDefined();
|
||||
expect(normalized.Info.enter).toHaveLength(1);
|
||||
|
||||
expect(normalized.Info.enter[0].context).toStrictEqual(normalized.Info.leave[0].context);
|
||||
|
||||
expect(normalized.Info.enter[0].visit).toEqual(infoEnter);
|
||||
expect(normalized.Contact.enter[0].visit).toEqual(contactEnter);
|
||||
|
||||
expect(normalized.Contact.enter[0].context.parent).toEqual(normalized.Info.enter[0].context);
|
||||
|
||||
expect(normalized.Info.leave).toHaveLength(1);
|
||||
expect(normalized.Info.leave[0].visit).toEqual(infoLeave);
|
||||
});
|
||||
|
||||
it('should normalize with weak interminient types', () => {
|
||||
const contactEnter = () => undefined;
|
||||
|
||||
const ruleset: OAS3RuleSet[] = [
|
||||
{
|
||||
test: () => {
|
||||
return {
|
||||
PathItem: {
|
||||
Parameter: contactEnter,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const visitors = ruleset.flatMap((ruleset) =>
|
||||
Object.keys(ruleset).map((ruleId) => ({
|
||||
ruleId,
|
||||
severity: 'error' as 'error',
|
||||
visitor: ruleset[ruleId](),
|
||||
})),
|
||||
);
|
||||
|
||||
const normalized = normalizeVisitors(visitors, OAS3Types);
|
||||
expect(normalized).toBeDefined();
|
||||
expect(normalized.PathItem.enter).toHaveLength(1);
|
||||
expect(normalized.Operation.enter).toHaveLength(1);
|
||||
expect(normalized.Parameter.enter).toHaveLength(1);
|
||||
expect(normalized.Parameter_List.enter).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should order deeper visitors first', () => {
|
||||
const pathParam = () => undefined;
|
||||
const operationParam = () => undefined;
|
||||
|
||||
const ruleset: OAS3RuleSet[] = [
|
||||
{
|
||||
test: () => {
|
||||
return {
|
||||
PathItem: {
|
||||
Parameter: pathParam,
|
||||
Operation: {
|
||||
Parameter: operationParam,
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const visitors = ruleset.flatMap((ruleset) =>
|
||||
Object.keys(ruleset).map((ruleId) => ({
|
||||
ruleId,
|
||||
severity: 'error' as 'error',
|
||||
visitor: ruleset[ruleId](),
|
||||
})),
|
||||
);
|
||||
|
||||
const normalized = normalizeVisitors(visitors, OAS3Types);
|
||||
expect(normalized).toBeDefined();
|
||||
expect(normalized.Parameter.enter).toHaveLength(2);
|
||||
expect(normalized.Parameter.enter[0].visit).toStrictEqual(operationParam);
|
||||
expect(normalized.Parameter.enter[1].visit).toStrictEqual(pathParam);
|
||||
});
|
||||
});
|
||||
195
src/__tests__/resolve.test.ts
Normal file
195
src/__tests__/resolve.test.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import { outdent } from 'outdent';
|
||||
import * as path from 'path';
|
||||
|
||||
import { resolveDocument, BaseResolver, Document } from '../resolve';
|
||||
import { parseYamlToDocument } from './utils';
|
||||
import { DefinitionRootType } from '../types';
|
||||
|
||||
describe('collect refs', () => {
|
||||
it('should resolve local refs', async () => {
|
||||
const rootDocument = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
$ref: "#/defs/info"
|
||||
defs:
|
||||
info:
|
||||
contact: {}
|
||||
license: {}
|
||||
`,
|
||||
'foobar.yaml',
|
||||
);
|
||||
|
||||
const resolvedRefs = await resolveDocument({
|
||||
rootDocument,
|
||||
externalRefResolver: new BaseResolver(),
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
|
||||
expect(resolvedRefs).toBeDefined();
|
||||
expect(resolvedRefs.size).toEqual(1);
|
||||
expect(Array.from(resolvedRefs.keys()).map((ref) => ref.$ref)).toMatchInlineSnapshot(
|
||||
[`#/defs/info`],
|
||||
`
|
||||
Object {
|
||||
"0": "#/defs/info",
|
||||
}
|
||||
`,
|
||||
);
|
||||
expect(Array.from(resolvedRefs.values()).map((info) => info.node)).toEqual([
|
||||
{ contact: {}, license: {} },
|
||||
]);
|
||||
});
|
||||
|
||||
// Or using async/await.
|
||||
it('should throw on self-circular refs', async () => {
|
||||
expect.assertions(1);
|
||||
|
||||
const rootDocument = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
$ref: "#/info"
|
||||
defs:
|
||||
info:
|
||||
contact: {}
|
||||
license: {}
|
||||
`,
|
||||
'',
|
||||
);
|
||||
|
||||
try {
|
||||
await resolveDocument({
|
||||
rootDocument,
|
||||
externalRefResolver: new BaseResolver(),
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
} catch (e) {
|
||||
expect(e.message).toEqual('Self-referencing circular pointer');
|
||||
}
|
||||
});
|
||||
|
||||
it('should resolve local transitive refs', async () => {
|
||||
const rootDocument = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
$ref: "#/tmp/info"
|
||||
tmp:
|
||||
$ref: '#/defs'
|
||||
defs:
|
||||
info:
|
||||
contact: {}
|
||||
license: {}
|
||||
`,
|
||||
'foobar.yaml',
|
||||
);
|
||||
|
||||
const resolvedRefs = await resolveDocument({
|
||||
rootDocument,
|
||||
externalRefResolver: new BaseResolver(),
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
|
||||
expect(resolvedRefs).toBeDefined();
|
||||
expect(resolvedRefs.size).toEqual(2);
|
||||
expect(Array.from(resolvedRefs.keys()).map((ref) => ref.$ref)).toEqual([
|
||||
'#/defs',
|
||||
'#/tmp/info',
|
||||
]);
|
||||
expect(Array.from(resolvedRefs.values()).map((info) => info.node)).toEqual([
|
||||
{ info: { contact: {}, license: {} } },
|
||||
{ contact: {}, license: {} },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should throw on ref loop', async () => {
|
||||
const rootDocument = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
$ref: "#/loop"
|
||||
loop:
|
||||
$ref: '#/loop2'
|
||||
loop2:
|
||||
$ref: '#/info'
|
||||
`,
|
||||
'foobar.yaml',
|
||||
);
|
||||
|
||||
try {
|
||||
await resolveDocument({
|
||||
rootDocument,
|
||||
externalRefResolver: new BaseResolver(),
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
} catch (e) {
|
||||
expect(e.message).toEqual('Self-referencing circular pointer');
|
||||
}
|
||||
});
|
||||
|
||||
it('should resolve external ref', async () => {
|
||||
const cwd = path.join(__dirname, 'fixtures/resolve');
|
||||
const rootDocument = parseYamlToDocument(
|
||||
outdent`
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
$ref: "./externalInfo.yaml#/info"
|
||||
`,
|
||||
path.join(cwd, 'foobar.yaml'),
|
||||
);
|
||||
|
||||
const resolvedRefs = await resolveDocument({
|
||||
rootDocument,
|
||||
externalRefResolver: new BaseResolver(),
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
|
||||
expect(resolvedRefs).toBeDefined();
|
||||
// expect(resolvedRefs.size).toEqual(2);
|
||||
expect(Array.from(resolvedRefs.keys()).map((ref) => ref.$ref)).toEqual([
|
||||
'./externalInfo.yaml#/info',
|
||||
'./externalLicense.yaml',
|
||||
]);
|
||||
|
||||
expect(Array.from(resolvedRefs.values()).map((info) => info.node)).toEqual([
|
||||
{
|
||||
contact: {},
|
||||
license: {
|
||||
$ref: './externalLicense.yaml',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'MIT',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should resolve external refs with circular', async () => {
|
||||
const cwd = path.join(__dirname, 'fixtures/resolve');
|
||||
const externalRefResolver = new BaseResolver();
|
||||
const rootDocument = await externalRefResolver.resolveDocument(null, `${cwd}/openapi.yaml`);
|
||||
|
||||
expect(rootDocument).toBeDefined();
|
||||
|
||||
const resolvedRefs = await resolveDocument({
|
||||
rootDocument: rootDocument as Document,
|
||||
externalRefResolver: externalRefResolver,
|
||||
rootType: DefinitionRootType,
|
||||
});
|
||||
|
||||
expect(resolvedRefs).toBeDefined();
|
||||
// expect(resolvedRefs.size).toEqual(2);
|
||||
expect(Array.from(resolvedRefs.keys()).map((ref) => ref.$ref)).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"#/components/schemas/Local",
|
||||
"#/components/schemas/Local/properties/string",
|
||||
"#/components/schemas/Local",
|
||||
"./External.yaml#/properties/string",
|
||||
"./External.yaml",
|
||||
"./External2.yaml",
|
||||
"./External.yaml#/properties",
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
||||
@@ -1,75 +0,0 @@
|
||||
import { join } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
import resolveNode from '../resolver';
|
||||
import createContext from '../context';
|
||||
import { getLintConfig } from '../config';
|
||||
|
||||
tests('local', 'index.yaml');
|
||||
tests('external', 'external.yaml');
|
||||
|
||||
function tests(type, resolvedFileName) {
|
||||
describe(`Transitive $refs ${type} file`, () => {
|
||||
let ctx;
|
||||
let doc;
|
||||
|
||||
beforeEach(async () => {
|
||||
const file = join(__dirname, 'data', 'index.yaml');
|
||||
const source = readFileSync(file, 'utf-8');
|
||||
const document = yaml.safeLoad(source);
|
||||
|
||||
ctx = createContext(document, source, file, getLintConfig({}));
|
||||
ctx.path = [type];
|
||||
doc = document[type];
|
||||
});
|
||||
|
||||
test('should successfully resolve transitive $ref', async () => {
|
||||
ctx.path.push('test1');
|
||||
const res = await resolveNode(doc.test1, ctx);
|
||||
expect(res).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"node": Object {
|
||||
"type": "string",
|
||||
},
|
||||
"onStack": true,
|
||||
}
|
||||
`);
|
||||
expect(ctx.result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should fail to resolve incorrect transitive $ref with correct error at initial file', async () => {
|
||||
ctx.path.push('test2');
|
||||
const res = await resolveNode(doc.test2, ctx);
|
||||
expect(res.node).toEqual(doc.test2);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch('index.yaml');
|
||||
expect(ctx.result[0].path).toEqual([type, 'test2', '$ref']);
|
||||
});
|
||||
|
||||
test('should fail to resolve incorrect transitive $ref with error at first unresolved $ref', async () => {
|
||||
ctx.path.push('test3');
|
||||
const res = await resolveNode(doc.test3, ctx);
|
||||
expect(res.node).toEqual(doc.test3);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch(resolvedFileName);
|
||||
|
||||
// error at first ref that can't be resolved
|
||||
expect(ctx.result[0].path).toEqual(['transitiveLocalBad', '$ref']);
|
||||
// referenced from at starting $ref
|
||||
expect(ctx.result[0].referencedFrom.path).toEqual([type, 'test3']);
|
||||
});
|
||||
|
||||
test('should fail to resolve circular transitive $ref', async () => {
|
||||
ctx.path.push('test4');
|
||||
const res = await resolveNode(doc.test4, ctx);
|
||||
expect(res.node).toEqual(doc.test4);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch(resolvedFileName);
|
||||
|
||||
expect(ctx.result[0].path).toEqual(['recursive', 'b', '$ref']);
|
||||
expect(ctx.result[0].referencedFrom.path).toEqual([type, 'test4']);
|
||||
expect(ctx.result[0].message).toMatch(/circular/i);
|
||||
});
|
||||
});
|
||||
}
|
||||
54
src/__tests__/utils.ts
Normal file
54
src/__tests__/utils.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
||||
import { Document, Source } from '../resolve';
|
||||
import { NormalizedReportMessage } from '../walk';
|
||||
import { RuleConfig, LintConfig } from '../config/config';
|
||||
import { OAS3RuleSet } from '../validate';
|
||||
|
||||
export function parseYamlToDocument(body: string, absoluteRef: string = ''): Document {
|
||||
return {
|
||||
source: new Source(absoluteRef, body),
|
||||
parsed: yaml.safeLoad(body, { filename: absoluteRef }),
|
||||
};
|
||||
}
|
||||
|
||||
export function replaceSourceWithRef(results: NormalizedReportMessage[], cwd?: string) {
|
||||
return results.map((r) => {
|
||||
return {
|
||||
...r,
|
||||
location: r.location.map((l) => ({
|
||||
...l,
|
||||
source: cwd ? path.relative(cwd, l.source.absoluteRef) : l.source.absoluteRef,
|
||||
})),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export const yamlSerializer = {
|
||||
test: () => {
|
||||
return true;
|
||||
},
|
||||
print: (val: any) => {
|
||||
return yaml.safeDump(val);
|
||||
},
|
||||
};
|
||||
|
||||
export function makeConfigForRuleset(rules: OAS3RuleSet) {
|
||||
const rulesConf: Record<string, RuleConfig> = {};
|
||||
const ruleId = 'test';
|
||||
Object.keys(rules).forEach((name) => {
|
||||
rulesConf[`${ruleId}/${name}`] = 'error';
|
||||
});
|
||||
|
||||
return new LintConfig({
|
||||
plugins: [
|
||||
{
|
||||
id: ruleId,
|
||||
rules: { oas3: rules },
|
||||
},
|
||||
],
|
||||
extends: [],
|
||||
rules: rulesConf,
|
||||
});
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
1161
src/__tests__/walk.test.ts
Normal file
1161
src/__tests__/walk.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,93 +0,0 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
import { getLintConfig } from './config';
|
||||
import traverseNode from './traverse';
|
||||
import createContext from './context';
|
||||
|
||||
import { OpenAPIRoot } from './types/OAS3';
|
||||
import { OAS2Root } from './types/OAS2';
|
||||
|
||||
import { readYaml } from './utils';
|
||||
|
||||
function writeBundleToFile(bundleObject, outputFile) {
|
||||
const nameParts = outputFile.split('.');
|
||||
const ext = nameParts[nameParts.length - 1];
|
||||
|
||||
const outputPath = path.resolve(outputFile);
|
||||
|
||||
const outputDir = path.dirname(outputPath);
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
|
||||
let fileData = null;
|
||||
|
||||
switch (ext) {
|
||||
case 'json':
|
||||
fileData = JSON.stringify(bundleObject, null, 2);
|
||||
break;
|
||||
case 'yaml':
|
||||
case 'yml':
|
||||
default:
|
||||
fileData = yaml.safeDump(bundleObject);
|
||||
break;
|
||||
}
|
||||
fs.writeFileSync(`${outputPath}`, fileData);
|
||||
}
|
||||
|
||||
export const bundleToFile = async (fName, outputFile, force) => {
|
||||
const resolvedFileName = fName; // path.resolve(fName);
|
||||
const { document, source } = readYaml(resolvedFileName);
|
||||
|
||||
if (!document.openapi && !document.swagger) { return []; }
|
||||
|
||||
const lintConfig = getLintConfig({});
|
||||
lintConfig.rules = {
|
||||
...lintConfig.rules,
|
||||
bundler: {
|
||||
...(lintConfig.rules && typeof lintConfig.rules.bundler === 'object' ? lintConfig.rules.bundler : null),
|
||||
output: outputFile,
|
||||
ignoreErrors: force,
|
||||
},
|
||||
};
|
||||
|
||||
const ctx = createContext(document, source, resolvedFileName, lintConfig);
|
||||
|
||||
const rootNode = ctx.openapiVersion === 3 ? OpenAPIRoot : OAS2Root;
|
||||
await traverseNode(document, rootNode, ctx);
|
||||
|
||||
if (outputFile) {
|
||||
writeBundleToFile(ctx.bundlingResult, outputFile);
|
||||
} else {
|
||||
process.stdout.write(yaml.safeDump(ctx.bundlingResult));
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
|
||||
return ctx.result;
|
||||
};
|
||||
|
||||
export const bundle = async (fName, force, options) => {
|
||||
const resolvedFileName = fName; // path.resolve(fName);
|
||||
const { document, source } = readYaml(resolvedFileName);
|
||||
|
||||
if (!document.openapi && !document.swagger) { return []; }
|
||||
|
||||
const lintConfig = getLintConfig(options);
|
||||
lintConfig.rules = {
|
||||
...lintConfig.rules,
|
||||
bundler: {
|
||||
...(lintConfig.rules && typeof lintConfig.rules.bundler === 'object' ? lintConfig.rules.bundler : null),
|
||||
outputObject: true,
|
||||
ignoreErrors: force,
|
||||
},
|
||||
};
|
||||
|
||||
const ctx = createContext(document, source, resolvedFileName, lintConfig);
|
||||
|
||||
const rootNode = ctx.openapiVersion === 3 ? OpenAPIRoot : OAS2Root;
|
||||
await traverseNode(document, rootNode, ctx);
|
||||
|
||||
return { bundle: ctx.bundlingResult, result: ctx.result, fileDependencies: ctx.fileDependencies };
|
||||
};
|
||||
|
||||
export default bundleToFile;
|
||||
185
src/bundle.ts
Normal file
185
src/bundle.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { BaseResolver, resolveDocument, Document, ResolvedRefMap } from './resolve';
|
||||
|
||||
import { OAS3Rule, normalizeVisitors, BaseVisitor, OAS3Visitor } from './visitors';
|
||||
import { TypeTreeNode, OAS3Types } from './types';
|
||||
import { WalkContext, walkDocument } from './walk';
|
||||
import { detectOpenAPI } from './validate';
|
||||
import { Location, pointerBaseName, refBaseName } from './ref';
|
||||
|
||||
export enum OASVersion {
|
||||
Version2,
|
||||
Version3_0_x,
|
||||
}
|
||||
|
||||
export type OAS3RuleSet = Record<string, OAS3Rule>;
|
||||
|
||||
// todo: fix visitors typing
|
||||
export async function bundle(opts: { ref: string; externalRefResolver?: BaseResolver }) {
|
||||
const { ref, externalRefResolver = new BaseResolver() } = opts;
|
||||
|
||||
let document: Document;
|
||||
try {
|
||||
document = (await externalRefResolver.resolveDocument(null, ref)) as Document;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
return bundleDocument({
|
||||
document,
|
||||
...opts,
|
||||
});
|
||||
}
|
||||
|
||||
type BundleContext = WalkContext;
|
||||
|
||||
export async function bundleDocument(opts: {
|
||||
document: Document;
|
||||
customTypes?: Record<string, TypeTreeNode>;
|
||||
externalRefResolver?: BaseResolver;
|
||||
}) {
|
||||
const { document, customTypes, externalRefResolver = new BaseResolver() } = opts;
|
||||
// TODO: wrap safeLoad errors to our format
|
||||
switch (detectOpenAPI(document.parsed)) {
|
||||
case OASVersion.Version2:
|
||||
throw new Error('OAS2 is not implemented yet');
|
||||
case OASVersion.Version3_0_x: {
|
||||
const types = customTypes ?? OAS3Types;
|
||||
|
||||
const resolvedRefMap = await resolveDocument({
|
||||
rootDocument: document,
|
||||
rootType: types.DefinitionRoot,
|
||||
externalRefResolver,
|
||||
});
|
||||
|
||||
const normalizedVisitors = normalizeVisitors(
|
||||
[
|
||||
{
|
||||
severity: 'error',
|
||||
ruleId: 'bundler',
|
||||
visitor: makeBundleVisitor(OASVersion.Version3_0_x),
|
||||
},
|
||||
],
|
||||
types,
|
||||
);
|
||||
|
||||
const ctx: BundleContext = {
|
||||
messages: [],
|
||||
oasVersion: OASVersion.Version3_0_x,
|
||||
};
|
||||
|
||||
walkDocument({
|
||||
document,
|
||||
rootType: types.DefinitionRoot as TypeTreeNode,
|
||||
normalizedVisitors,
|
||||
resolvedRefMap,
|
||||
ctx,
|
||||
});
|
||||
|
||||
return { bundle: document.parsed, messages: ctx.messages };
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
function mapOAS3TypeToComponent(typeName: string) {
|
||||
switch (typeName) {
|
||||
case 'Schema':
|
||||
return 'schemas';
|
||||
case 'Parameter':
|
||||
return 'parameters';
|
||||
case 'Response':
|
||||
return 'responses';
|
||||
case 'Example':
|
||||
return 'examples';
|
||||
case 'RequestBody':
|
||||
return 'requestBodies';
|
||||
case 'Header':
|
||||
return 'headers';
|
||||
case 'SecuritySchema':
|
||||
return 'securitySchemes';
|
||||
case 'Link':
|
||||
return 'links';
|
||||
case 'Callback':
|
||||
return 'callbacks';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// function oas3Move
|
||||
|
||||
function makeBundleVisitor<T extends BaseVisitor>(version: OASVersion) {
|
||||
let components: Record<string, Record<string, any>>;
|
||||
|
||||
// @ts-ignore
|
||||
const visitor: T = {
|
||||
ref(node, ctx, resolved) {
|
||||
if (!resolved.location || !resolved.node) return; // error is reported by walker
|
||||
|
||||
// todo discriminator
|
||||
const componentType =
|
||||
version === OASVersion.Version3_0_x ? mapOAS3TypeToComponent(ctx.type.name) : null;
|
||||
if (!componentType) {
|
||||
delete node.$ref;
|
||||
Object.assign(node, resolved.node);
|
||||
} else {
|
||||
node.$ref = saveComponent(componentType, resolved);
|
||||
}
|
||||
|
||||
function saveComponent(componentType: string, target: { node: any; location: Location }) {
|
||||
components[componentType] = components[componentType] || {};
|
||||
const name = getComponentName(target, componentType);
|
||||
components[componentType][name] = target.node;
|
||||
if (version === OASVersion.Version3_0_x) {
|
||||
return `#/components/${componentType}/${name}`;
|
||||
} else {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
}
|
||||
|
||||
function getComponentName(target: { node: any; location: Location }, componentType: string) {
|
||||
const [fileRef, pointer] = [target.location.source.absoluteRef, target.location.pointer];
|
||||
|
||||
const pointerBase = pointerBaseName(pointer);
|
||||
const refBase = refBaseName(fileRef);
|
||||
|
||||
let name = pointerBase || refBase;
|
||||
|
||||
const componentsGroup = components[componentType];
|
||||
if (!componentsGroup || !componentsGroup[name] || componentsGroup[name] === target.node)
|
||||
return name;
|
||||
|
||||
if (pointerBase) {
|
||||
name = `${refBase}/${pointerBase}`;
|
||||
if (!componentsGroup[name] || componentsGroup[name] === target.node) return name;
|
||||
}
|
||||
|
||||
const prevName = name;
|
||||
let serialId = 2;
|
||||
while (componentsGroup[name] && !componentsGroup[name] !== target.node) {
|
||||
name = `${name}-${serialId}`;
|
||||
serialId++;
|
||||
}
|
||||
|
||||
ctx.report({
|
||||
message: `Two schemas are referenced with the same name but different content. Renamed ${prevName} to ${name}`,
|
||||
location: { reportOnKey: true },
|
||||
});
|
||||
|
||||
return name;
|
||||
}
|
||||
},
|
||||
DefinitionRoot: {
|
||||
enter(root: any) {
|
||||
if (version === OASVersion.Version3_0_x) {
|
||||
components = root.components = root.components || {};
|
||||
} else if (version === OASVersion.Version2) {
|
||||
components = root;
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return visitor;
|
||||
}
|
||||
176
src/cli.ts
Normal file
176
src/cli.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env node
|
||||
import * as yargs from 'yargs';
|
||||
|
||||
import { validate } from './validate';
|
||||
|
||||
import { bundle } from './bundle';
|
||||
import { dumpYaml, saveYaml } from './utils';
|
||||
import { formatMessages } from './format/format';
|
||||
import { ResolveError, YamlParseError } from './resolve';
|
||||
import { loadConfig } from './config/config';
|
||||
import { NormalizedReportMessage } from './walk';
|
||||
import { red, green, yellow } from 'colorette';
|
||||
|
||||
yargs // eslint-disable-line
|
||||
.command(
|
||||
'lint [entrypoints...]',
|
||||
'Lint definition',
|
||||
(yargs) =>
|
||||
yargs
|
||||
.positional('entrypoints', {
|
||||
array: true,
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
})
|
||||
.option('short', {
|
||||
description: 'Reduce output to required minimun.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('max-messages', {
|
||||
requiresArg: true,
|
||||
description: 'Reduce output to max N messages.',
|
||||
type: 'number',
|
||||
default: 100,
|
||||
})
|
||||
.option('config', {
|
||||
description: 'Specify custom config file',
|
||||
requiresArg: true,
|
||||
type: 'string',
|
||||
}),
|
||||
async (argv) => {
|
||||
const config = await loadConfig(argv.config);
|
||||
|
||||
for (const entryPoint of argv.entrypoints) {
|
||||
try {
|
||||
console.time(`${entryPoint} validation took`);
|
||||
const results = await validate({
|
||||
ref: entryPoint,
|
||||
config: config.lint,
|
||||
});
|
||||
console.timeEnd(`${entryPoint} validation took`);
|
||||
|
||||
console.time(`Formatting messages took`);
|
||||
formatMessages(results, {
|
||||
format: argv.short ? 'short' : 'full',
|
||||
maxMessages: argv['max-messages'],
|
||||
});
|
||||
|
||||
const totals = getTotals(results);
|
||||
printLintTotals(totals);
|
||||
|
||||
console.timeEnd(`Formatting messages took`);
|
||||
|
||||
process.exit(totals.errors > 0 ? 1 : 0);
|
||||
} catch (e) {
|
||||
handleError(e, entryPoint);
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.command(
|
||||
'bundle [entrypoints...]',
|
||||
'Bundle definition',
|
||||
(yargs) =>
|
||||
yargs
|
||||
.positional('entrypoints', {
|
||||
array: true,
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
})
|
||||
.option('config', {
|
||||
description: 'Specify custom config file',
|
||||
type: 'string',
|
||||
})
|
||||
.options({
|
||||
output: { type: 'string', alias: 'o' },
|
||||
}),
|
||||
async (argv) => {
|
||||
for (const entryPoint of argv.entrypoints) {
|
||||
try {
|
||||
console.time(`${entryPoint} bundle took`);
|
||||
|
||||
const { bundle: result, messages } = await bundle({
|
||||
ref: entryPoint,
|
||||
});
|
||||
|
||||
console.timeEnd(`${entryPoint} bundle took`);
|
||||
|
||||
if (result) {
|
||||
const output = dumpYaml(result);
|
||||
if (!argv.output) {
|
||||
process.stdout.write(output);
|
||||
} else {
|
||||
saveYaml(argv.output, result);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(messages.length ? 'Failed to bundle' : 'Bundled successfully');
|
||||
formatMessages(messages, {});
|
||||
} catch (e) {
|
||||
handleError(e, entryPoint);
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.demandCommand(1)
|
||||
.strict().argv;
|
||||
|
||||
function handleError(e: Error, ref: string) {
|
||||
if (e instanceof ResolveError) {
|
||||
process.stdout.write(
|
||||
`Failed to resolve entrypoint definition at ${ref}:\n\n - ${e.message}\n`,
|
||||
);
|
||||
} else if (e instanceof YamlParseError) {
|
||||
process.stdout.write(`Failed to parse entrypoint definition at ${ref}:\n\n - ${e.message}\n`);
|
||||
// TODO: codeframe
|
||||
} else {
|
||||
process.stdout.write(`Something went wrong when processing ${ref}:\n\n - ${e.message}\n`);
|
||||
throw e;
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function printLintTotals(totals: Totals) {
|
||||
if (totals.errors > 0) {
|
||||
process.stderr.write(
|
||||
red(
|
||||
`❌ Validation failed with ${pluralize('error', totals.errors)} and ${pluralize(
|
||||
'warning',
|
||||
totals.warnings,
|
||||
)}\n`,
|
||||
),
|
||||
);
|
||||
} else if (totals.warnings > 0) {
|
||||
process.stderr.write(green('Woohoo! Your OpenAPI definition is valid 🎉\n'));
|
||||
process.stderr.write(yellow(`You have ${pluralize('warning', totals.warnings)}\n`));
|
||||
} else {
|
||||
process.stderr.write(green('Woohoo! Your OpenAPI definition is valid 🎉\n'));
|
||||
}
|
||||
|
||||
console.log();
|
||||
}
|
||||
|
||||
type Totals = {
|
||||
errors: number;
|
||||
warnings: number;
|
||||
};
|
||||
|
||||
function getTotals(messages: NormalizedReportMessage[]): Totals {
|
||||
let errors = 0;
|
||||
let warnings = 0;
|
||||
|
||||
for (const m of messages) {
|
||||
if (m.severity === 'error') errors++;
|
||||
if (m.severity === 'warning') warnings++;
|
||||
}
|
||||
|
||||
return {
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
function pluralize(label: string, num: number) {
|
||||
return num === 1 ? `1 ${label}` : `${num} ${label}s`;
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
import isEqual from 'lodash.isequal';
|
||||
|
||||
const errorBelongsToGroup = (error, group) => error.message === group.message
|
||||
&& error.path.join('/') === group.path.join('/')
|
||||
&& error.severity === group.severity
|
||||
&& error.location.startIndex === group.location.startIndex
|
||||
&& error.location.endIndex === group.location.endIndex
|
||||
&& error.location.possibleAlternate === group.location.possibleAlternate;
|
||||
|
||||
const errorAlreadyInGroup = (error, group) => group.referencedFromPlaces.some(
|
||||
(place) => isEqual(place, error.referencedFrom),
|
||||
);
|
||||
|
||||
const groupFromError = (error) => ({
|
||||
message: error.message,
|
||||
location: error.location,
|
||||
path: error.path,
|
||||
codeFrame: error.codeFrame,
|
||||
value: error.value,
|
||||
file: error.file,
|
||||
severity: error.severity,
|
||||
enableCodeframe: error.enableCodeframe,
|
||||
target: error.target,
|
||||
possibleAlternate: error.possibleAlternate,
|
||||
fromRule: error.fromRule,
|
||||
referencedFromPlaces: error.referencedFrom ? [error.referencedFrom] : [],
|
||||
});
|
||||
|
||||
const addErrorToGroup = (error, group) => {
|
||||
if (error.referencedFrom && !errorAlreadyInGroup(error, group)) {
|
||||
group.referencedFromPlaces.push(error.referencedFrom);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
export const groupErrors = (errors) => {
|
||||
const groups = [];
|
||||
for (let i = 0; i < errors.length; i += 1) {
|
||||
let assigned = false;
|
||||
for (let j = 0; j < groups.length; j += 1) {
|
||||
if (errorBelongsToGroup(errors[i], groups[j])) {
|
||||
assigned = addErrorToGroup(errors[i], groups[j]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!assigned) groups.push(groupFromError(errors[i]));
|
||||
}
|
||||
return groups;
|
||||
};
|
||||
|
||||
export const groupByFiles = (result) => {
|
||||
const fileGroups = {};
|
||||
result.forEach((row) => {
|
||||
if (fileGroups[row.file]) {
|
||||
fileGroups[row.file].push(row);
|
||||
} else {
|
||||
fileGroups[row.file] = [row];
|
||||
}
|
||||
});
|
||||
return fileGroups;
|
||||
};
|
||||
282
src/cli/index.js
282
src/cli/index.js
@@ -1,282 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import chalk from 'chalk';
|
||||
import commander from 'commander';
|
||||
|
||||
import fs from 'fs';
|
||||
import {
|
||||
join, basename, dirname, extname,
|
||||
} from 'path';
|
||||
import * as chockidar from 'chokidar';
|
||||
|
||||
import RedoclyClient from '../redocly';
|
||||
import { promptUser } from './utils';
|
||||
import { validateFromFile, validateFromUrl } from '../validate';
|
||||
import { bundle, bundleToFile } from '../bundle';
|
||||
|
||||
import { isFullyQualifiedUrl, debounce } from '../utils';
|
||||
|
||||
import { outputMessages, printValidationHeader } from './outputMessages';
|
||||
import { getFallbackEntryPointsOrExit, getConfig } from '../config';
|
||||
|
||||
import startPreviewServer from '../preview-docs';
|
||||
|
||||
const program = new commander.Command();
|
||||
|
||||
const validateFile = async (filePath, options, cmdObj) => {
|
||||
let result;
|
||||
|
||||
if (!fs.existsSync(filePath) && isFullyQualifiedUrl(filePath)) {
|
||||
process.stdout.write('Will validate from URL\n');
|
||||
result = await validateFromUrl(filePath, options);
|
||||
} else {
|
||||
result = await validateFromFile(filePath, options);
|
||||
}
|
||||
const resultStats = outputMessages(result, cmdObj);
|
||||
|
||||
const { totalErrors, totalWarnings } = resultStats;
|
||||
process.stdout.write(
|
||||
`${chalk.blueBright(filePath)} results. Errors: ${totalErrors}, warnings: ${totalWarnings}\n`,
|
||||
);
|
||||
|
||||
return {
|
||||
errors: resultStats.totalErrors,
|
||||
warnings: resultStats.totalWarnings,
|
||||
};
|
||||
};
|
||||
|
||||
const cli = () => {
|
||||
const f = fs.readFileSync(`${__dirname}/../package.json`, 'utf-8');
|
||||
const { version } = JSON.parse(f);
|
||||
|
||||
program
|
||||
.version(version, '-v, --version', 'Output current version of the OpenAPI CLI.');
|
||||
|
||||
program
|
||||
.command('registry:login')
|
||||
.description('Login to the Redoc.ly API Registry with access token')
|
||||
.option('-p, --prompt', 'Ask for credentials instead of looking them in the .env or enviroment variables')
|
||||
.action(async () => {
|
||||
const clientToken = await promptUser(
|
||||
chalk.green(`\n 🔑 Copy your access token from ${chalk.blue(`https://app.${process.env.REDOCLY_DOMAIN || 'redoc.ly'}/profile`)} and paste it below`),
|
||||
);
|
||||
const client = new RedoclyClient();
|
||||
client.login(clientToken);
|
||||
});
|
||||
|
||||
program
|
||||
.command('registry:logout')
|
||||
.description('Clear stored credentials for Redoc.ly API Registry')
|
||||
.action(async () => {
|
||||
const client = new RedoclyClient();
|
||||
client.logout();
|
||||
});
|
||||
|
||||
program
|
||||
.command('bundle [entryPoints...]')
|
||||
.description('Create a bundle using <entryPoint> as a root document.')
|
||||
.option('-o, --output <outputName>', 'Filename or folder for the bundle.')
|
||||
.option('--short', 'Reduce output in case of bundling errors.')
|
||||
.option('--ext <ext>', 'Output extension: json, yaml or yml')
|
||||
.option('-f, --force', 'Produce bundle output file even if validation errors were encountered')
|
||||
.action(async (entryPoints, cmdObj) => {
|
||||
if (cmdObj.ext && ['yaml', 'yml', 'json'].indexOf(cmdObj.ext) === -1) {
|
||||
process.stdout.write(
|
||||
'Unsupported value for --ext option. Supported values are: yaml, yml or json',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const config = getConfig({});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
entryPoints = getFallbackEntryPointsOrExit(entryPoints, config);
|
||||
|
||||
const isOutputDir = cmdObj.output && !extname(cmdObj.output);
|
||||
const ext = cmdObj.ext || extname(cmdObj.output || '').substring(1) || 'yaml';
|
||||
const dir = isOutputDir ? cmdObj.output : dirname(cmdObj.output || '');
|
||||
|
||||
const results = {
|
||||
errors: 0,
|
||||
warnings: 0,
|
||||
};
|
||||
|
||||
for (const entryPoint of entryPoints) {
|
||||
let output;
|
||||
if (cmdObj.output) {
|
||||
const fileName = isOutputDir
|
||||
? basename(entryPoint, extname(entryPoint))
|
||||
: basename(cmdObj.output, `.${ext}`);
|
||||
output = join(dir, `${fileName}.${ext}`);
|
||||
}
|
||||
|
||||
const bundlingStatus = await bundleToFile(entryPoint, output, cmdObj.force);
|
||||
const resultStats = outputMessages(bundlingStatus, cmdObj);
|
||||
|
||||
if (resultStats.totalErrors === 0) {
|
||||
// we do not want to output anything to stdout if it's being piped.
|
||||
if (output) {
|
||||
process.stdout.write(`Created a bundle for ${entryPoint} at ${output}\n`);
|
||||
}
|
||||
} else {
|
||||
if (cmdObj.force) {
|
||||
process.stderr.write(
|
||||
`Created a bundle for ${entryPoint} at ${output}. Errors ignored because of --force\n`,
|
||||
);
|
||||
} else {
|
||||
process.stderr.write(
|
||||
`Errors encountered while bundling ${entryPoint}: bundle not created (use --force to ignore errors)\n`,
|
||||
);
|
||||
}
|
||||
results.errors += resultStats.totalErrors;
|
||||
results.warnings += resultStats.totalWarnings;
|
||||
}
|
||||
}
|
||||
process.exit(results.errors === 0 || cmdObj.force ? 0 : 1);
|
||||
});
|
||||
|
||||
program
|
||||
.command('validate [entryPoints...]')
|
||||
.description('Validate given OpenAPI 3 definition file.')
|
||||
.option('--short', 'Reduce output to required minimun.')
|
||||
.option('--no-frame', 'Print no codeframes with errors.')
|
||||
.option('--config <path>', 'Specify custom yaml or json config')
|
||||
.action(async (entryPoints, cmdObj) => {
|
||||
const options = {};
|
||||
const results = {
|
||||
errors: 0,
|
||||
warnings: 0,
|
||||
};
|
||||
|
||||
const config = getConfig({});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
entryPoints = getFallbackEntryPointsOrExit(entryPoints, config);
|
||||
|
||||
options.codeframes = cmdObj.frame;
|
||||
if (cmdObj.config) options.configPath = cmdObj.config;
|
||||
|
||||
for (let i = 0; i < entryPoints.length; i++) {
|
||||
printValidationHeader(entryPoints[i]);
|
||||
|
||||
const msgs = await validateFile(entryPoints[i], options, cmdObj);
|
||||
results.errors += msgs.errors;
|
||||
results.warnings += msgs.warnings;
|
||||
}
|
||||
if (entryPoints.length > 1) {
|
||||
process.stdout.write(`Validation results:\nerrors: ${results.errors}\nwarnings: ${results.warnings}\n`);
|
||||
}
|
||||
process.exit(results.errors > 0 ? 1 : 0);
|
||||
});
|
||||
|
||||
function myParseInt(value) {
|
||||
return parseInt(value, 10);
|
||||
}
|
||||
|
||||
program
|
||||
.command('preview-docs [entryPoint]')
|
||||
.description('Preview API Reference docs for the specified entrypoint OAS definition')
|
||||
.option('-p, --port <value>', 'Preview port', myParseInt, 8080)
|
||||
.option('--use-community-edition', 'Use Redoc CE for docs preview.')
|
||||
.action(async (entryPoint, cmdObj) => {
|
||||
const output = 'dist/openapi.yaml';
|
||||
|
||||
let config = getConfig({});
|
||||
// eslint-disable-next-line no-param-reassign, prefer-destructuring
|
||||
entryPoint = getFallbackEntryPointsOrExit(entryPoint ? [entryPoint] : [], config)[0];
|
||||
|
||||
let cachedBundle;
|
||||
const deps = new Set();
|
||||
|
||||
async function getBundle() {
|
||||
return cachedBundle;
|
||||
}
|
||||
|
||||
async function updateBundle() {
|
||||
process.stdout.write('\nBundling...\n\n');
|
||||
const { bundle: openapiBundle, result, fileDependencies } = await bundle(entryPoint, output, {
|
||||
lint: {
|
||||
codeframes: false,
|
||||
},
|
||||
});
|
||||
|
||||
const removed = [...deps].filter((x) => !fileDependencies.has(x));
|
||||
watcher.unwatch(removed);
|
||||
watcher.add([...fileDependencies]);
|
||||
deps.clear();
|
||||
fileDependencies.forEach(deps.add, deps);
|
||||
|
||||
const resultStats = outputMessages(result, { short: true });
|
||||
|
||||
if (resultStats.totalErrors === 0) {
|
||||
process.stdout.write(
|
||||
resultStats.totalErrors === 0
|
||||
? `Created a bundle for ${entryPoint} ${resultStats.totalWarnings > 0 ? 'with warnings' : 'successfully'}\n`
|
||||
: chalk.yellow(`Created a bundle for ${entryPoint} with errors. Docs may be broken or not accurate\n`),
|
||||
);
|
||||
}
|
||||
|
||||
return openapiBundle;
|
||||
}
|
||||
|
||||
const redoclyClient = new RedoclyClient();
|
||||
const isAuthorizedWithRedocly = await redoclyClient.isAuthorizedWithRedocly();
|
||||
|
||||
setImmediate(() => {
|
||||
cachedBundle = updateBundle();
|
||||
}); // initial cache
|
||||
|
||||
const referenceDocs = config.referenceDocs || {};
|
||||
|
||||
const redocOptions = {
|
||||
...referenceDocs,
|
||||
useCommunityEdition: cmdObj.useCommunityEdition || referenceDocs.useCommunityEdition,
|
||||
licenseKey: process.env.REDOCLY_LICENSE_KEY || referenceDocs.licenseKey,
|
||||
};
|
||||
|
||||
const hotClients = await startPreviewServer(cmdObj.port, {
|
||||
getBundle,
|
||||
getOptions: () => redocOptions,
|
||||
useRedocPro: (isAuthorizedWithRedocly || redocOptions.licenseKey) && !redocOptions.useCommunityEdition,
|
||||
});
|
||||
|
||||
const watcher = chockidar.watch([entryPoint, config.configPath], {
|
||||
disableGlobbing: true,
|
||||
ignoreInitial: true,
|
||||
});
|
||||
|
||||
const debouncedUpdatedeBundle = debounce(async () => {
|
||||
cachedBundle = updateBundle();
|
||||
await cachedBundle;
|
||||
hotClients.broadcast('{"type": "reload", "bundle": true}');
|
||||
}, 2000);
|
||||
|
||||
const changeHandler = async (type, file) => {
|
||||
process.stdout.write(`${chalk.green('watch')} ${type} ${chalk.blue(file)}\n`);
|
||||
if (file === config.configPath) {
|
||||
config = getConfig({ configPath: file });
|
||||
hotClients.broadcast(JSON.stringify({ type: 'reload' }));
|
||||
return;
|
||||
}
|
||||
|
||||
debouncedUpdatedeBundle();
|
||||
};
|
||||
|
||||
watcher.on('change', changeHandler.bind(undefined, 'changed'));
|
||||
watcher.on('add', changeHandler.bind(undefined, 'added'));
|
||||
watcher.on('unlink', changeHandler.bind(undefined, 'removed'));
|
||||
|
||||
watcher.on('ready', () => {
|
||||
process.stdout.write(`\n 👀 Watching ${chalk.blue(entryPoint)} and all related resources for changes\n`);
|
||||
});
|
||||
});
|
||||
|
||||
program.on('command:*', () => {
|
||||
process.stderr.write(`\nUnknown command ${program.args.join(' ')}\n\n`);
|
||||
program.outputHelp();
|
||||
});
|
||||
|
||||
if (process.argv.length === 2) process.argv.push('-h');
|
||||
|
||||
program.parse(process.argv);
|
||||
};
|
||||
|
||||
export default cli;
|
||||
@@ -1,133 +0,0 @@
|
||||
import path from 'path';
|
||||
import chalk from 'chalk';
|
||||
|
||||
import {
|
||||
outputLightBlue,
|
||||
outputBgRed,
|
||||
outputGrey,
|
||||
outputBgYellow,
|
||||
outputRed,
|
||||
outputBgLightBlue,
|
||||
outputYellow,
|
||||
outputUnderline,
|
||||
} from '../utils';
|
||||
|
||||
import { messageLevels } from '../error/default';
|
||||
|
||||
import { groupByFiles, groupErrors } from './groupMessages';
|
||||
|
||||
const colorizeMessageHeader = (msg, longestPath) => {
|
||||
const msgHeader = `${path.relative(process.cwd(), msg.file)}:${msg.location.startLine}:${msg.location.startCol}`;
|
||||
switch (msg.severity) {
|
||||
case messageLevels.ERROR:
|
||||
return outputBgRed(outputBgRed(msgHeader.padEnd(longestPath + 2 - 20)));
|
||||
case messageLevels.WARNING:
|
||||
return outputBgYellow(outputRed(msgHeader.padEnd(longestPath + 2 - 20)));
|
||||
case messageLevels.INFO:
|
||||
return outputBgLightBlue(outputRed(msgHeader.padEnd(longestPath + 2 - 20)));
|
||||
default:
|
||||
return msgHeader;
|
||||
}
|
||||
};
|
||||
|
||||
const colorizeRuleName = (error, severity) => {
|
||||
switch (severity) {
|
||||
case messageLevels.ERROR:
|
||||
return outputRed(error);
|
||||
case messageLevels.WARNING:
|
||||
return outputYellow(error);
|
||||
case messageLevels.INFO:
|
||||
return outputBgLightBlue(error);
|
||||
default:
|
||||
return error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const pathImproveReadability = (msgPath) => msgPath.map((el) => (el[0] === '/' ? outputGrey('[\'') + outputLightBlue(el) + outputGrey('\']') : outputGrey(el)));
|
||||
const prettifyReferencedFrom = (row) => `${outputLightBlue(`${row.file}:${row.startLine}`)} ${outputGrey(`#/${pathImproveReadability(row.path).join(outputGrey('/'))}`)}`;
|
||||
|
||||
const renderReferencedFrom = (referencedFromPlaces, severity) => {
|
||||
if (referencedFromPlaces.length === 0) return '';
|
||||
return `This ${severity.toLowerCase()} is referenced from:\n${referencedFromPlaces.map((row, id) => `${id + 1}) ${prettifyReferencedFrom(row)}`).join('\n')}`;
|
||||
};
|
||||
|
||||
const getMsgSeverity = (msg) => {
|
||||
switch (msg.severity) {
|
||||
case messageLevels.WARNING:
|
||||
return 'Warning';
|
||||
case messageLevels.ERROR:
|
||||
default:
|
||||
return 'Error';
|
||||
}
|
||||
};
|
||||
|
||||
export const prettyPrint = (i, error) => {
|
||||
const { possibleAlternate } = error;
|
||||
const message = `[${i}] ${colorizeMessageHeader(error)} ${outputGrey(`at #/${outputGrey(pathImproveReadability(error.path).join(outputGrey('/')))}`)}`
|
||||
+ `\n${error.message}\n`
|
||||
+ `${possibleAlternate && possibleAlternate.possibleAlternate ? `\nDid you mean: ${outputLightBlue(possibleAlternate.possibleAlternate)} ?\n` : ''}`
|
||||
+ `${error.enableCodeframe ? `\n${error.codeFrame}\n\n` : ''}`
|
||||
+ `${error.fromRule ? `${getMsgSeverity(error)} was generated by ${chalk.red(error.fromRule)} rule.\n\n` : ''}`
|
||||
+ `${renderReferencedFrom(error.referencedFromPlaces, getMsgSeverity(error))}`
|
||||
+ '\n\n';
|
||||
return message;
|
||||
};
|
||||
|
||||
export const printValidationHeader = (_filePath) => {
|
||||
// nope for now [Roman]
|
||||
// well, at least I've tried. Not a designer, for sure [Sergey]
|
||||
};
|
||||
|
||||
export const prettyPrintShort = (i, error, longestPath, longestRuleName) => {
|
||||
const message = `${(`${error.location.startLine}:${error.location.startCol}`).padEnd(longestPath)} ${colorizeRuleName(error.fromRule.padEnd(longestRuleName + 2), error.severity)} ${error.message}\n`;
|
||||
return message;
|
||||
};
|
||||
|
||||
export const outputMessages = (result, cmdObj) => {
|
||||
const errorsGrouped = groupErrors(result);
|
||||
const groupedByFile = groupByFiles(errorsGrouped);
|
||||
|
||||
const totalErrors = errorsGrouped.filter(
|
||||
(msg) => msg.severity === messageLevels.ERROR,
|
||||
).length;
|
||||
const totalWarnings = errorsGrouped.filter(
|
||||
(msg) => msg.severity === messageLevels.WARNING,
|
||||
).length;
|
||||
|
||||
if (cmdObj.short && errorsGrouped.length !== 0) {
|
||||
const posLength = errorsGrouped
|
||||
.map((msg) => `${msg.location.startLine}:${msg.location.startCol}`)
|
||||
.sort((e, o) => e.length > o.length)
|
||||
.pop()
|
||||
.length;
|
||||
|
||||
const longestRuleName = errorsGrouped
|
||||
.map((msg) => msg.fromRule)
|
||||
.sort((e, o) => e.length > o.length)
|
||||
.pop()
|
||||
.length;
|
||||
|
||||
Object.keys(groupedByFile).forEach((fileName) => {
|
||||
process.stderr.write(`${outputUnderline(`${path.relative(process.cwd(), fileName)}:\n`)}`);
|
||||
groupedByFile[fileName]
|
||||
.sort((a, b) => a.severity < b.severity)
|
||||
.forEach(
|
||||
(entry, id) => process.stderr.write(
|
||||
prettyPrintShort(id + 1, entry, posLength, longestRuleName),
|
||||
),
|
||||
);
|
||||
process.stderr.write('\n');
|
||||
});
|
||||
} else {
|
||||
if (errorsGrouped.length > 0) process.stderr.write('\n\n');
|
||||
errorsGrouped
|
||||
.sort((a, b) => a.severity < b.severity)
|
||||
.forEach((entry, id) => process.stderr.write(prettyPrint(id + 1, entry)));
|
||||
}
|
||||
|
||||
return {
|
||||
totalErrors,
|
||||
totalWarnings,
|
||||
};
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
import * as readline from 'readline';
|
||||
|
||||
export async function promptUser(query) {
|
||||
return new Promise((resolve) => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
rl.question(`${query}:\n\n `, (answer) => {
|
||||
rl.close();
|
||||
resolve(answer);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default promptUser;
|
||||
116
src/config.js
116
src/config.js
@@ -1,116 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import merge from 'merge-deep';
|
||||
import yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
||||
let warningShown = false;
|
||||
|
||||
export function getConfig(options) {
|
||||
let config = {};
|
||||
let { configPath } = options;
|
||||
if (!configPath) {
|
||||
configPath = `${process.cwd()}/.openapi-cli.yaml`;
|
||||
|
||||
if (fs.existsSync('.redocly.yaml')) {
|
||||
configPath = path.resolve('.redocly.yaml');
|
||||
} else if (fs.existsSync('.redocly.yml')) {
|
||||
configPath = path.resolve('.redocly.yml');
|
||||
} else if (fs.existsSync('.openapi-cli.yaml')) {
|
||||
if (!warningShown) process.stderr.write('warning: .openapi-cli.yaml is deprecated, rename to .redocly.yaml\n');
|
||||
configPath = path.resolve('.openapi-cli.yaml');
|
||||
} else if (fs.existsSync('.openapi-cli.yml')) {
|
||||
if (!warningShown) process.stderr.write('warning: .openapi-cli.yml is deprecated, rename to .redocly.yml\n');
|
||||
configPath = path.resolve('.openapi-cli.yml');
|
||||
}
|
||||
}
|
||||
|
||||
const defaultConfigRaw = fs.readFileSync(`${__dirname}/.redocly.yaml`, 'utf-8');
|
||||
const defaultConfig = yaml.safeLoad(defaultConfigRaw);
|
||||
|
||||
if (fs.existsSync(configPath)) {
|
||||
const configRaw = fs.readFileSync(configPath, 'utf-8');
|
||||
config = yaml.safeLoad(configRaw);
|
||||
|
||||
if (config.rules || config.transformers || config.typeExtension || config.customRules) {
|
||||
if (!warningShown) {
|
||||
process.stderr.write(
|
||||
'warning: top level "rules", "transformers", "typeExtension" and "customRules" '
|
||||
+ 'are deprecated. Move them under the "lint" field.\n',
|
||||
);
|
||||
}
|
||||
|
||||
warningShown = true;
|
||||
config = { lint: config };
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedConfig = merge(defaultConfig, config, options);
|
||||
resolvedConfig.configPath = configPath;
|
||||
|
||||
const lintConfig = resolvedConfig.lint;
|
||||
|
||||
if (!lintConfig.typeExtension) {
|
||||
lintConfig.typeExtension = `${__dirname}/typeExtensionDefault.js`;
|
||||
} else {
|
||||
lintConfig.typeExtension = `${process.cwd()}/${lintConfig.typeExtension}`;
|
||||
}
|
||||
|
||||
const definitionResolver = require(lintConfig.typeExtension);
|
||||
|
||||
lintConfig.definitionResolver = definitionResolver;
|
||||
|
||||
lintConfig.customRules = lintConfig.customRules
|
||||
? `${process.cwd()}/${lintConfig.customRules}` : `${__dirname}/customRulesDefault.js`;
|
||||
const rulesExtensions = require(lintConfig.customRules);
|
||||
lintConfig.rulesExtensions = rulesExtensions;
|
||||
|
||||
lintConfig.transformers = lintConfig.transformers
|
||||
? `${process.cwd()}/${lintConfig.transformers}` : `${__dirname}/customRulesDefault.js`;
|
||||
const transformingVisitors = require(lintConfig.transformers);
|
||||
lintConfig.transformingVisitors = transformingVisitors;
|
||||
|
||||
if (!resolvedConfig.lint) {
|
||||
resolvedConfig.lint = {};
|
||||
}
|
||||
|
||||
resolvedConfig.lint.headers = (
|
||||
(
|
||||
resolvedConfig.resolve
|
||||
&& resolvedConfig.resolve.http
|
||||
&& resolvedConfig.resolve.http.headers)
|
||||
|| []
|
||||
).map((header) => ({
|
||||
...header,
|
||||
value: header.envVariable ? process.env[header.envVariable] : header.value,
|
||||
}));
|
||||
|
||||
return resolvedConfig;
|
||||
}
|
||||
|
||||
export function getLintConfig(options) {
|
||||
return getConfig(options).lint;
|
||||
}
|
||||
|
||||
export function getDefinitionNames(config = getConfig({})) {
|
||||
return config.apiDefinitions ? Object.keys(config.apiDefinitions) : null;
|
||||
}
|
||||
|
||||
export function getFallbackEntryPointsOrExit(argsEntrypoints, config = getConfig({})) {
|
||||
let res = argsEntrypoints;
|
||||
if (
|
||||
(!argsEntrypoints || !argsEntrypoints.length)
|
||||
&& config.apiDefinitions
|
||||
&& Object.keys(config.apiDefinitions).length > 0
|
||||
) {
|
||||
res = Object.values(config.apiDefinitions);
|
||||
} else if (argsEntrypoints && argsEntrypoints.length && config.apiDefinitions) {
|
||||
res = res.map((aliasOrPath) => config.apiDefinitions[aliasOrPath] || aliasOrPath);
|
||||
}
|
||||
|
||||
if (!res || !res.length) {
|
||||
process.stderr.write('error: missing required argument "entryPoints"\n');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
32
src/config/all.ts
Normal file
32
src/config/all.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { RulesConfig } from './config';
|
||||
|
||||
export default {
|
||||
rules: {
|
||||
schema: 'error',
|
||||
'operation-2xx-response': 'error',
|
||||
'operation-operationId-unique': 'error',
|
||||
'operation-parameters-unique': 'error',
|
||||
'path-parameters-defined': 'error',
|
||||
'operation-tag-defined': 'error',
|
||||
'example-value-or-external-value': 'error',
|
||||
'typed-enum': 'error',
|
||||
'path-no-trailing-slashes': 'error',
|
||||
'path-declaration-must-exist': 'error',
|
||||
'operationId-valid-in-url': 'error',
|
||||
'openapi-tags-alphabetical': 'error',
|
||||
'server-not-example.com': 'error',
|
||||
'server-trailing-slash': 'error',
|
||||
'info-description': 'error',
|
||||
'tag-description': 'error',
|
||||
'info-contact': 'error',
|
||||
'info-license': 'error',
|
||||
'operation-description': 'error',
|
||||
'no-unused-schemas': 'error',
|
||||
'path-not-include-query': 'error',
|
||||
'parameter-description': 'error',
|
||||
'operation-singular-tag': 'error',
|
||||
'info-license-url': 'error',
|
||||
'operation-security-defined': 'error',
|
||||
'no-unresolved-refs': 'error',
|
||||
},
|
||||
} as RulesConfig;
|
||||
8
src/config/builtIn.ts
Normal file
8
src/config/builtIn.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import recommended from './recommended';
|
||||
import all from './all';
|
||||
import { RulesConfig } from './config';
|
||||
|
||||
export const builtInConfigs: Record<string, RulesConfig> = {
|
||||
recommended,
|
||||
all,
|
||||
};
|
||||
227
src/config/config.ts
Normal file
227
src/config/config.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { builtInConfigs } from './builtIn';
|
||||
import { rules as builtinRules } from '../rules/builtin';
|
||||
import { loadYaml, notUndefined } from '../utils';
|
||||
import oas3 from '../rules/oas3';
|
||||
import { OASVersion } from '../validate';
|
||||
|
||||
import { MessageSeverity } from '../walk';
|
||||
import { OAS3RuleSet } from '../validate';
|
||||
|
||||
import recommended from './recommended';
|
||||
import { red, blue } from 'colorette';
|
||||
|
||||
export type RuleConfig =
|
||||
| MessageSeverity
|
||||
| 'off'
|
||||
| {
|
||||
severity: MessageSeverity;
|
||||
options?: Record<string, any>;
|
||||
};
|
||||
|
||||
export type RulesConfig = {
|
||||
plugins?: (string | Plugin)[];
|
||||
extends?: string[];
|
||||
rules?: Record<string, RuleConfig>;
|
||||
};
|
||||
|
||||
export type Plugin = {
|
||||
id: string;
|
||||
configs?: Record<string, RulesConfig>;
|
||||
rules?: {
|
||||
oas3?: OAS3RuleSet; // FIXME
|
||||
oas2?: any;
|
||||
};
|
||||
};
|
||||
|
||||
export type RawConfig = {
|
||||
apiDefinitions?: Record<string, string>;
|
||||
lint?: RulesConfig;
|
||||
};
|
||||
|
||||
export class LintConfig {
|
||||
plugins: Plugin[];
|
||||
rules: Record<string, RuleConfig>;
|
||||
|
||||
definedRules: { oas3: OAS3RuleSet[] } = {
|
||||
oas3: [],
|
||||
};
|
||||
|
||||
constructor(public rawConfig: RulesConfig, configFile?: string) {
|
||||
this.plugins = rawConfig.plugins ? resolvePlugins(rawConfig.plugins, configFile) : [];
|
||||
|
||||
this.plugins.push({
|
||||
id: '', // default plugin doesn't have id
|
||||
rules: {
|
||||
oas3: oas3,
|
||||
},
|
||||
});
|
||||
|
||||
const extendConfigs: RulesConfig[] = rawConfig.extends
|
||||
? resolvePresets(rawConfig.extends, this.plugins)
|
||||
: [recommended];
|
||||
|
||||
if (rawConfig.rules)
|
||||
extendConfigs.push({
|
||||
rules: rawConfig.rules,
|
||||
});
|
||||
|
||||
this.rules = mergeExtends(extendConfigs).rules;
|
||||
}
|
||||
|
||||
getRuleSettings(ruleId: string) {
|
||||
const settings = this.rules[ruleId] || 'off';
|
||||
if (typeof settings === 'string') {
|
||||
return {
|
||||
severity: settings,
|
||||
options: undefined,
|
||||
};
|
||||
} else {
|
||||
// @ts-ignore
|
||||
return { severity: 'error', ...settings };
|
||||
}
|
||||
}
|
||||
|
||||
getRulesForOASVersion(version: OASVersion) {
|
||||
switch (version) {
|
||||
case OASVersion.Version3_0_x:
|
||||
const oas3Rules: OAS3RuleSet[] = []; // default ruleset
|
||||
this.plugins.forEach((p) => p.rules?.oas3 && oas3Rules.push(p.rules.oas3));
|
||||
return oas3Rules;
|
||||
default:
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Config {
|
||||
apiDefinitions: Record<string, string>;
|
||||
lint: LintConfig;
|
||||
constructor(public rawConfig: RawConfig, public configFile?: string) {
|
||||
this.apiDefinitions = rawConfig.apiDefinitions || {};
|
||||
this.lint = new LintConfig(rawConfig.lint || {}, configFile);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadConfig(configPath?: string): Promise<Config> {
|
||||
if (configPath === undefined) {
|
||||
configPath = await findConfig();
|
||||
}
|
||||
|
||||
let rawConfig: RawConfig = {};
|
||||
// let resolvedPlugins: Plugin[] = [];
|
||||
|
||||
if (configPath !== undefined) {
|
||||
try {
|
||||
rawConfig = await loadYaml(configPath);
|
||||
} catch (e) {
|
||||
throw new Error(`Error parsing config file at \`${configPath}\`: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return new Config(rawConfig, configPath);
|
||||
}
|
||||
|
||||
async function findConfig() {
|
||||
if (await existsAsync('.redocly.yaml')) {
|
||||
return '.redocly.yaml';
|
||||
} else if (await existsAsync('.redocly.yml')) {
|
||||
return '.redocly.yml';
|
||||
}
|
||||
}
|
||||
|
||||
function existsAsync(path: string) {
|
||||
return new Promise(function (resolve) {
|
||||
fs.exists(path, resolve);
|
||||
});
|
||||
}
|
||||
|
||||
function resolvePresets(presets: string[], plugins: Plugin[]) {
|
||||
return presets.map((presetName) => {
|
||||
let preset = builtInConfigs[presetName];
|
||||
if (!preset && presetName.indexOf('/') > -1) {
|
||||
const [pluginName, configName] = presetName.split('/');
|
||||
const plugin = plugins.find((p) => p.id === pluginName);
|
||||
if (!plugin) {
|
||||
throw new Error(`Invalid preset ${red(presetName)}: plugin ${pluginName} is not included`);
|
||||
}
|
||||
|
||||
preset = plugin.configs?.[configName]!;
|
||||
if (!preset) {
|
||||
throw new Error(
|
||||
`Invalid preset ${red(
|
||||
presetName,
|
||||
)}: plugin ${pluginName} doesnt export config with name ${configName}`,
|
||||
);
|
||||
}
|
||||
return preset;
|
||||
}
|
||||
|
||||
if (!preset) {
|
||||
throw new Error(`Invalid preset ${red(presetName)}: no such built-in preset`);
|
||||
}
|
||||
return preset;
|
||||
});
|
||||
}
|
||||
|
||||
function resolvePlugins(plugins: (string | Plugin)[] | null, configPath: string = ''): Plugin[] {
|
||||
if (!plugins) return [];
|
||||
|
||||
return plugins
|
||||
.map((p) => {
|
||||
// todo: resolve npm packages similar to eslint
|
||||
if (typeof p === 'string') {
|
||||
if (builtinRules[p]) return undefined;
|
||||
}
|
||||
const plugin =
|
||||
typeof p === 'string' ? (require(path.resolve(path.dirname(configPath), p)) as Plugin) : p;
|
||||
|
||||
const id = plugin.id;
|
||||
if (!id) {
|
||||
throw new Error(red(`Plugin must define \`id\` property in ${blue(p.toString())}`));
|
||||
}
|
||||
|
||||
if (plugin.rules) {
|
||||
if (!plugin.rules.oas3 && !plugin.rules.oas2) {
|
||||
throw new Error(`Plugin rules must have \`oas3\` or \`oas2\` rules "${p}}`);
|
||||
}
|
||||
if (plugin.rules.oas3) {
|
||||
plugin.rules.oas3 = prefixRules(plugin.rules.oas3, id);
|
||||
}
|
||||
if (plugin.rules.oas2) {
|
||||
plugin.rules.oas3 = prefixRules(plugin.rules.oas2, id);
|
||||
}
|
||||
}
|
||||
|
||||
return plugin;
|
||||
})
|
||||
.filter(notUndefined);
|
||||
}
|
||||
|
||||
function prefixRules<T extends Record<string, any>>(rules: T, prefix: string) {
|
||||
const res: any = {};
|
||||
for (const name of Object.keys(rules)) {
|
||||
res[`${prefix}/${name}`] = rules[name];
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
function mergeExtends(rulesConfList: RulesConfig[]) {
|
||||
const result: Omit<RulesConfig, 'rules'> & Required<Pick<RulesConfig, 'rules'>> = {
|
||||
rules: {},
|
||||
};
|
||||
|
||||
for (let rulesConf of rulesConfList) {
|
||||
if (rulesConf.extends) {
|
||||
throw new Error(
|
||||
`\`extends\` is not supported yet in shared configs: ${JSON.stringify(rulesConf, null, 2)}`,
|
||||
);
|
||||
}
|
||||
Object.assign(result.rules, rulesConf.rules);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
32
src/config/recommended.ts
Normal file
32
src/config/recommended.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { RulesConfig } from './config';
|
||||
|
||||
export default {
|
||||
rules: {
|
||||
schema: 'error',
|
||||
'operation-2xx-response': 'error',
|
||||
'operation-operationId-unique': 'error',
|
||||
'operation-parameters-unique': 'error',
|
||||
'path-parameters-defined': 'error',
|
||||
'example-value-or-external-value': 'error',
|
||||
'typed-enum': 'error',
|
||||
'operation-tag-defined': 'off',
|
||||
'path-no-trailing-slashes': 'warning',
|
||||
'operationId-valid-in-url': 'warning',
|
||||
'path-declaration-must-exist': 'error',
|
||||
'openapi-tags-alphabetical': 'off',
|
||||
'server-not-example.com': 'warning',
|
||||
'server-trailing-slash': 'warning',
|
||||
'info-description': 'warning',
|
||||
'tag-description': 'warning',
|
||||
'info-contact': 'off',
|
||||
'info-license': 'off',
|
||||
'operation-description': 'off',
|
||||
'no-unused-schemas': 'warning',
|
||||
'path-not-include-query': 'error',
|
||||
'parameter-description': 'warning',
|
||||
'operation-singular-tag': 'warning',
|
||||
'info-license-url': 'off',
|
||||
'operation-security-defined': 'error',
|
||||
'no-unresolved-refs': 'error',
|
||||
},
|
||||
} as RulesConfig;
|
||||
@@ -1,72 +0,0 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import path from 'path';
|
||||
|
||||
import loadRuleset, { loadRulesetExtension } from './loader';
|
||||
import isRuleEnabled from './visitors/utils';
|
||||
import { loadDefinitions } from './resolveDefinition';
|
||||
import { messageHelpers } from './error';
|
||||
import { resolveNodeNoSideEffects } from './resolver';
|
||||
|
||||
const validateFieldsRaw = (node, ctx, config, ruleName, validators) => {
|
||||
const result = [];
|
||||
|
||||
const vals = Object.keys(validators);
|
||||
for (let i = 0; i < vals.length; i += 1) {
|
||||
if (isRuleEnabled(config, vals[i])) {
|
||||
if (validators[vals[i]]) {
|
||||
ctx.path.push(vals[i]);
|
||||
const validate = validators[vals[i]].bind({ rule: ruleName, config });
|
||||
const res = validate(node, ctx, config);
|
||||
if (res) {
|
||||
if (Array.isArray(res)) result.push(...res);
|
||||
else result.push(res);
|
||||
}
|
||||
ctx.path.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const getRule = (ctx, ruleName) => {
|
||||
const result = ctx.allRules.filter((r) => r.constructor.rule === ruleName);
|
||||
return result ? result[0] : null;
|
||||
};
|
||||
|
||||
function createContext(node, sourceFile, filePath, config) {
|
||||
const [enabledRules, allRules] = loadRuleset(config);
|
||||
|
||||
config.headers = config.headers || [];
|
||||
return {
|
||||
openapiVersion: node.swagger ? 2 : 3,
|
||||
document: node,
|
||||
filePath: path.resolve(filePath),
|
||||
path: [],
|
||||
cache: {},
|
||||
visited: [],
|
||||
result: [],
|
||||
registryDependencies: [],
|
||||
definitionStack: [],
|
||||
definitions: loadDefinitions(config),
|
||||
pathStack: [],
|
||||
source: sourceFile,
|
||||
enableCodeframe: !!(config && (config.codeframes === 'on' || config.codeframes === true)),
|
||||
customRules: [
|
||||
...loadRulesetExtension(config, 'transformingVisitors'),
|
||||
...enabledRules, ...loadRulesetExtension(config, 'rulesExtensions'),
|
||||
],
|
||||
allRules,
|
||||
config,
|
||||
headers: config.headers,
|
||||
messageHelpers,
|
||||
validateFieldsRaw,
|
||||
getRule,
|
||||
resolveNode: resolveNodeNoSideEffects,
|
||||
|
||||
fileDependencies: new Set(),
|
||||
|
||||
resolveCache: {},
|
||||
};
|
||||
}
|
||||
|
||||
export default createContext;
|
||||
@@ -1,3 +0,0 @@
|
||||
module.exports = [
|
||||
|
||||
];
|
||||
@@ -1,425 +0,0 @@
|
||||
import yaml from "js-yaml";
|
||||
import fs from "fs";
|
||||
|
||||
import createError, { messageLevels, fromError } from "../default";
|
||||
|
||||
const createCtx = () => ({
|
||||
document: yaml.safeLoad(
|
||||
fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
),
|
||||
filePath: "./definitions/syntetic/syntetic-1.yaml",
|
||||
path: [],
|
||||
cache: {},
|
||||
visited: [],
|
||||
definitionStack: [],
|
||||
pathStack: [],
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8"),
|
||||
enableCodeframe: true,
|
||||
});
|
||||
|
||||
describe("createError", () => {
|
||||
test("should create valid error", () => {
|
||||
const ctx = {
|
||||
...createCtx(),
|
||||
path: ["paths", "user", "get", "responses"],
|
||||
};
|
||||
expect(
|
||||
createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR,
|
||||
})
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"codeFrame": "[90m22| operationId: userGet[39m
|
||||
[90m23| description: Get user[39m
|
||||
[90m24| [4m[31mresponses:[39m[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[39m[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[39m[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[39m[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[39m[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[39m[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[39m[31m[24m[39m
|
||||
[90m31| project:[39m
|
||||
[90m32| get:[39m",
|
||||
"enableCodeframe": true,
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"fromRule": undefined,
|
||||
"location": Object {
|
||||
"endCol": 28,
|
||||
"endIndex": 600,
|
||||
"endLine": 30,
|
||||
"startCol": 7,
|
||||
"startIndex": 432,
|
||||
"startLine": 24,
|
||||
},
|
||||
"message": "This is a test error",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"possibleAlternate": undefined,
|
||||
"referencedFrom": null,
|
||||
"severity": 4,
|
||||
"target": undefined,
|
||||
"value": Object {},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test("should create error with alternative and from rule", () => {
|
||||
const ctx = {
|
||||
...createCtx(),
|
||||
path: [],
|
||||
enableCodeframe: false,
|
||||
};
|
||||
expect(
|
||||
createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR,
|
||||
target: "key",
|
||||
possibleAlternate: "example",
|
||||
fromRule: "testing",
|
||||
})
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"codeFrame": null,
|
||||
"enableCodeframe": false,
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"fromRule": "testing",
|
||||
"location": Object {
|
||||
"endCol": 14,
|
||||
"endIndex": 14,
|
||||
"endLine": 1,
|
||||
"startCol": 0,
|
||||
"startIndex": 0,
|
||||
"startLine": 1,
|
||||
},
|
||||
"message": "This is a test error",
|
||||
"path": Array [],
|
||||
"possibleAlternate": "example",
|
||||
"referencedFrom": null,
|
||||
"severity": 4,
|
||||
"target": "key",
|
||||
"value": Object {},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("fromError", () => {
|
||||
test("basic test", () => {
|
||||
const ctx = {
|
||||
...createCtx(),
|
||||
path: ["paths", "user", "get", "responses"],
|
||||
pathStack: [
|
||||
{
|
||||
file: createCtx().filePath,
|
||||
path: ["paths", "user", "get", "responses"],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
ctx.pathStack[0].source = ctx.source;
|
||||
ctx.pathStack[0].document = ctx.document;
|
||||
|
||||
const baseError = createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR,
|
||||
});
|
||||
ctx.path = ["paths", "project", "get", "responses"];
|
||||
expect(fromError(baseError, ctx)).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"cache": Object {},
|
||||
"codeFrame": "[90m22| operationId: userGet[39m
|
||||
[90m23| description: Get user[39m
|
||||
[90m24| [4m[31mresponses:[39m[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[39m[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[39m[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[39m[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[39m[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[39m[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[39m[31m[24m[39m
|
||||
[90m31| project:[39m
|
||||
[90m32| get:[39m",
|
||||
"definitionStack": Array [],
|
||||
"document": null,
|
||||
"enableCodeframe": true,
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"filePath": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"fromRule": undefined,
|
||||
"location": Object {
|
||||
"endCol": 28,
|
||||
"endIndex": 600,
|
||||
"endLine": 30,
|
||||
"startCol": 7,
|
||||
"startIndex": 432,
|
||||
"startLine": 24,
|
||||
},
|
||||
"message": "This is a test error",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"pathStack": Array [
|
||||
Object {
|
||||
"document": Object {
|
||||
"components": Object {
|
||||
"parameters": Object {
|
||||
"example": Object {
|
||||
"allOf": Array [
|
||||
Object {
|
||||
"in": "query",
|
||||
"name": "bla",
|
||||
"required": false,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"description": "blo",
|
||||
},
|
||||
Object {
|
||||
"description": "bla",
|
||||
},
|
||||
],
|
||||
},
|
||||
"genericExample": Object {
|
||||
"in": "query",
|
||||
"name": "example",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
"securitySchemes": Object {
|
||||
"JWT": Object {
|
||||
"bearerFormat": "JWT",
|
||||
"description": "You can create a JSON Web Token (JWT) via our [JWT Session resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
",
|
||||
"scheme": "bearer",
|
||||
"type": "http",
|
||||
},
|
||||
},
|
||||
},
|
||||
"externalDocs": Object {
|
||||
"description": Object {
|
||||
"$ref": "inc/docs-description.md",
|
||||
},
|
||||
"url": "googlecom",
|
||||
},
|
||||
"info": Object {
|
||||
"contact": Object {
|
||||
"email": "ivan@redoc.ly",
|
||||
"name": "Ivan Goncharov",
|
||||
},
|
||||
"license": Object {
|
||||
"name": "example",
|
||||
"url": "example.org",
|
||||
},
|
||||
"title": "Example OpenAPI 3 definition. Valid.",
|
||||
"version": 1,
|
||||
"x-redocly-overlay": Object {
|
||||
"path": "overlay-info.yaml",
|
||||
},
|
||||
},
|
||||
"openapi": "3.0.2",
|
||||
"paths": Object {
|
||||
"/user/{id}": Object {
|
||||
"get": Object {
|
||||
"description": "Get user by id",
|
||||
"operationId": "withPathParam",
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"description": "User id",
|
||||
"in": "path",
|
||||
"name": "test",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
],
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"project": Object {
|
||||
"get": Object {
|
||||
"description": "Get project",
|
||||
"operationId": "projectGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"user": Object {
|
||||
"get": Object {
|
||||
"description": "Get user",
|
||||
"operationId": "userGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"$ref": "#/components/parameters/example",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"servers": Array [
|
||||
Object {
|
||||
"url": "http://example.org",
|
||||
},
|
||||
],
|
||||
},
|
||||
"file": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"source": "openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
allOf:
|
||||
- name: bla
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- description: blo
|
||||
- description: bla
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string",
|
||||
},
|
||||
],
|
||||
"possibleAlternate": undefined,
|
||||
"referencedFrom": Object {
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"startLine": 24,
|
||||
},
|
||||
"severity": 4,
|
||||
"source": null,
|
||||
"target": undefined,
|
||||
"value": Object {},
|
||||
"visited": Array [],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
@@ -1,104 +0,0 @@
|
||||
import path from 'path';
|
||||
import { getLocationByPath, getCodeFrameForLocation } from '../yaml';
|
||||
|
||||
export const messageLevels = {
|
||||
ERROR: 4,
|
||||
WARNING: 3,
|
||||
INFO: 2,
|
||||
DEBUG: 1,
|
||||
};
|
||||
|
||||
const getLocationForPath = (fName, nodePath, target, { filePath, source }) => getLocationByPath(
|
||||
Array.from(nodePath),
|
||||
{ filePath, source },
|
||||
target,
|
||||
).startLine;
|
||||
|
||||
export const getMsgLevelFromString = (severityString) => {
|
||||
switch (severityString.toLowerCase()) {
|
||||
case 'debug':
|
||||
return 1;
|
||||
case 'info':
|
||||
return 2;
|
||||
case 'warning':
|
||||
return 3;
|
||||
case 'error':
|
||||
default:
|
||||
return 4;
|
||||
}
|
||||
};
|
||||
|
||||
export const getReferencedFrom = (ctx) => {
|
||||
const lastRef = ctx.pathStack[ctx.pathStack.length - 1];
|
||||
if (!lastRef) return null;
|
||||
return {
|
||||
file: path.relative(process.cwd(), lastRef.file),
|
||||
startLine: getLocationForPath(
|
||||
lastRef.file,
|
||||
[...lastRef.path, '$ref'],
|
||||
'key',
|
||||
{ source: lastRef.source, filePath: lastRef.file },
|
||||
),
|
||||
path: Array.from(lastRef.path),
|
||||
};
|
||||
};
|
||||
|
||||
const createError = (msg, node, ctx, options, overrideSeverity) => {
|
||||
const {
|
||||
target, possibleAlternate, fromRule,
|
||||
} = options;
|
||||
|
||||
let { severity = messageLevels.ERROR } = options;
|
||||
|
||||
if (overrideSeverity) severity = overrideSeverity;
|
||||
|
||||
if (typeof severity === 'string') {
|
||||
severity = getMsgLevelFromString(severity);
|
||||
}
|
||||
|
||||
let location = getLocationByPath(Array.from(ctx.path), ctx, target);
|
||||
if (!location) location = getLocationByPath(Array.from(ctx.path), ctx);
|
||||
|
||||
return {
|
||||
message: msg,
|
||||
path: Array.from(ctx.path),
|
||||
referencedFrom: getReferencedFrom(ctx),
|
||||
location,
|
||||
codeFrame: ctx.enableCodeframe && location
|
||||
? getCodeFrameForLocation(
|
||||
location.startIndex,
|
||||
location.endIndex,
|
||||
ctx.source,
|
||||
location.startLine,
|
||||
)
|
||||
: null,
|
||||
value: node,
|
||||
file: path.relative(process.cwd(), ctx.filePath),
|
||||
severity,
|
||||
enableCodeframe: ctx.enableCodeframe,
|
||||
possibleAlternate,
|
||||
fromRule,
|
||||
target,
|
||||
};
|
||||
};
|
||||
|
||||
export const createErrorFlat = (
|
||||
node, ctx, fromRule, severity, msg, target, possibleAlternate, overrideSeverity,
|
||||
) => createError(msg, node, ctx, {
|
||||
target, fromRule, severity, possibleAlternate,
|
||||
}, overrideSeverity);
|
||||
|
||||
export const fromError = (error, ctx) => (
|
||||
// let location = getLocationByPath(Array.from(ctx.path), ctx, error.target);
|
||||
// if (!location) location = getLocationByPath(Array.from(ctx.path), ctx);
|
||||
{
|
||||
...error,
|
||||
...ctx,
|
||||
document: null,
|
||||
source: null,
|
||||
path: error.path,
|
||||
referencedFrom: getReferencedFrom(ctx),
|
||||
}
|
||||
);
|
||||
|
||||
export default createError;
|
||||
@@ -1,33 +0,0 @@
|
||||
import { relative } from 'path';
|
||||
import createError, { getReferencedFrom } from './default';
|
||||
import { getCodeFrameForLocation } from '../yaml';
|
||||
|
||||
const mutuallyExclusiveFieldsMessageHelper = (fieldNames) => `Fields ${fieldNames.map((el) => `'${el}'`).join(', ')} are mutually exclusive.`;
|
||||
const fieldTypeMismatchMessageHelper = (desiredType) => `This field must be of ${desiredType} type.`;
|
||||
const missingRequiredField = (fieldName) => `The field '${fieldName}' must be present on this level.`;
|
||||
const fieldNotAllowedMessageHelper = (fieldName, definitionName) => `The field '${fieldName}' is not allowed in ${definitionName}. Use "x-" prefix or custom types to override this behavior.`;
|
||||
|
||||
export const messageHelpers = {
|
||||
mutuallyExclusiveFieldsMessageHelper,
|
||||
fieldTypeMismatchMessageHelper,
|
||||
missingRequiredField,
|
||||
fieldNotAllowedMessageHelper,
|
||||
};
|
||||
|
||||
export default createError;
|
||||
export { getReferencedFrom } from './default';
|
||||
|
||||
export const createYAMLParseError = (e, ctx, resolvedPath, source, root = false) => ({
|
||||
message: `${e.name}: ${e.reason}`,
|
||||
path: root ? [] : Array.from(ctx.path),
|
||||
referencedFrom: root ? null : getReferencedFrom(ctx),
|
||||
location: {
|
||||
startLine: e.mark.line + 1,
|
||||
startCol: e.mark.column,
|
||||
},
|
||||
codeFrame: getCodeFrameForLocation(e.mark.position - 1, e.mark.position, source, e.mark.line + 1),
|
||||
value: null,
|
||||
file: relative(process.cwd(), resolvedPath),
|
||||
severity: 4,
|
||||
enableCodeframe: ctx.enableCodeframe !== undefined ? ctx.enableCodeframe : true,
|
||||
});
|
||||
206
src/format/codeframes.ts
Normal file
206
src/format/codeframes.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
import { gray, red, options as colorOptions } from 'colorette';
|
||||
|
||||
import * as yamlAst from 'yaml-ast-parser';
|
||||
|
||||
import { parsePointer } from '../ref';
|
||||
import { LineColLocationObject, PointerLocationObject, Loc, LocationObject } from '../walk';
|
||||
|
||||
type YAMLMapping = yamlAst.YAMLMapping & { kind: yamlAst.Kind.MAPPING };
|
||||
type YAMLMap = yamlAst.YamlMap & { kind: yamlAst.Kind.MAP };
|
||||
type YAMLAnchorReference = yamlAst.YAMLAnchorReference & { kind: yamlAst.Kind.ANCHOR_REF };
|
||||
type YAMLSequence = yamlAst.YAMLSequence & { kind: yamlAst.Kind.SEQ };
|
||||
type YAMLScalar = yamlAst.YAMLScalar & { kind: yamlAst.Kind.SCALAR };
|
||||
|
||||
type YAMLNode = YAMLMapping | YAMLMap | YAMLAnchorReference | YAMLSequence | YAMLScalar;
|
||||
|
||||
const MAX_LINE_LENGTH = 150;
|
||||
const MAX_CODEFRAME_LINES = 3;
|
||||
|
||||
export function getCodeframe(location: LineColLocationObject, color: boolean) {
|
||||
colorOptions.enabled = color;
|
||||
|
||||
const { start, end = { line: start.line, col: start.col + 1 }, source } = location;
|
||||
|
||||
const lines = source.getLines();
|
||||
|
||||
const startLineNum = start.line;
|
||||
const endLineNum = Math.max(Math.min(end.line, lines.length), start.line);
|
||||
|
||||
let skipLines = Math.max(endLineNum - startLineNum - MAX_CODEFRAME_LINES + 1, 0);
|
||||
if (skipLines < 2) skipLines = 0; // do not skip one line
|
||||
|
||||
// Lines specified like this: ["prefix", "string"],
|
||||
const prefixedLines: [string, string][] = [];
|
||||
|
||||
let currentPad = 0;
|
||||
|
||||
for (let i = startLineNum; i <= endLineNum; i++) {
|
||||
if (skipLines > 0 && i >= endLineNum - skipLines) break;
|
||||
const line = lines[i - 1] || '';
|
||||
if (line !== '') currentPad = padSize(line);
|
||||
let startIdx = i === startLineNum ? start.col - 1 : currentPad;
|
||||
let endIdx = i === endLineNum ? end.col - 1 : line.length;
|
||||
|
||||
prefixedLines.push([`${i}`, markLine(line, startIdx, endIdx, red)]);
|
||||
if (!color) prefixedLines.push(['', underlineLine(line, startIdx, endIdx)]);
|
||||
}
|
||||
|
||||
if (skipLines > 0) {
|
||||
prefixedLines.push([`…`, `${whitespace(currentPad)}${gray(`< ${skipLines} more lines >`)}`]);
|
||||
// print last line
|
||||
prefixedLines.push([`${endLineNum}`, markLine(lines[endLineNum - 1], -1, end.col - 1, red)]);
|
||||
|
||||
if (!color) prefixedLines.push(['', underlineLine(lines[endLineNum - 1], -1, end.col - 1)]);
|
||||
}
|
||||
|
||||
return printPrefixedLines([
|
||||
[`${startLineNum - 2}`, markLine(lines[startLineNum - 1 - 2])],
|
||||
[`${startLineNum - 1}`, markLine(lines[startLineNum - 1 - 1])],
|
||||
...prefixedLines,
|
||||
[`${endLineNum + 1}`, markLine(lines[endLineNum - 1 + 1])],
|
||||
[`${endLineNum + 2}`, markLine(lines[endLineNum - 1 + 2])],
|
||||
]);
|
||||
|
||||
function markLine(
|
||||
line: string,
|
||||
startIdx: number = -1,
|
||||
endIdx: number = +Infinity,
|
||||
variant = gray,
|
||||
) {
|
||||
if (!color) return line;
|
||||
if (!line) return line;
|
||||
|
||||
if (startIdx === -1) {
|
||||
startIdx = padSize(line);
|
||||
}
|
||||
|
||||
endIdx = Math.min(endIdx, line.length);
|
||||
return (
|
||||
line.substr(0, startIdx) + variant(line.substring(startIdx, endIdx)) + line.substr(endIdx)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function printPrefixedLines(lines: [string, string][]): string {
|
||||
const existingLines = lines.filter(([_, line]) => line !== undefined);
|
||||
|
||||
const padLen = Math.max(...existingLines.map(([prefix]) => prefix.length));
|
||||
const dedentLen = Math.min(
|
||||
...existingLines.map(([_, line]) => (line === '' ? Infinity : padSize(line))),
|
||||
);
|
||||
|
||||
return existingLines
|
||||
.map(
|
||||
([prefix, line]) =>
|
||||
gray(leftPad(padLen, prefix) + ' |') +
|
||||
(line ? ' ' + limitLineLength(line.substring(dedentLen)) : ''),
|
||||
)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function limitLineLength(line: string, maxLen: number = MAX_LINE_LENGTH) {
|
||||
const overflowLen = line.length - maxLen;
|
||||
if (overflowLen > 0) {
|
||||
const charsMoreText = gray(`...<${overflowLen} chars>`);
|
||||
return line.substring(0, maxLen - charsMoreText.length) + charsMoreText;
|
||||
} else {
|
||||
return line;
|
||||
}
|
||||
}
|
||||
|
||||
function underlineLine(line: string, startIdx: number = -1, endIdx: number = +Infinity) {
|
||||
if (startIdx === -1) {
|
||||
startIdx = padSize(line);
|
||||
}
|
||||
|
||||
endIdx = Math.min(endIdx, line.length);
|
||||
return whitespace(startIdx) + '^'.repeat(Math.max(endIdx - startIdx, 1));
|
||||
}
|
||||
|
||||
function whitespace(len: number): string {
|
||||
return ' '.repeat(len);
|
||||
}
|
||||
|
||||
function leftPad(len: number, str: string): string {
|
||||
return whitespace(len - str.length) + str;
|
||||
}
|
||||
|
||||
function padSize(line: string): number {
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
if (line[i] !== ' ') return i;
|
||||
}
|
||||
return line.length;
|
||||
}
|
||||
|
||||
export function getLineColLocation(location: LocationObject): LineColLocationObject {
|
||||
if (location.pointer === undefined) return location;
|
||||
|
||||
const { source, pointer, reportOnKey } = location;
|
||||
const ast = source.getAst() as YAMLNode;
|
||||
const astNode = getAstNodeByPointer(ast, pointer, reportOnKey);
|
||||
return {
|
||||
...location,
|
||||
pointer: undefined,
|
||||
...positionsToLoc(source.body, astNode?.startPosition ?? 1, astNode?.endPosition ?? 1),
|
||||
};
|
||||
}
|
||||
|
||||
function positionsToLoc(
|
||||
source: string,
|
||||
startPos: number,
|
||||
endPos: number,
|
||||
): { start: Loc; end: Loc } {
|
||||
let currentLine = 1;
|
||||
let currentCol = 1;
|
||||
let start: Loc = { line: 1, col: 1 };
|
||||
|
||||
for (let i = 0; i < endPos - 1; i++) {
|
||||
if (i === startPos - 1) {
|
||||
start = { line: currentLine, col: currentCol + 1 };
|
||||
}
|
||||
if (source[i] === '\n') {
|
||||
if (source[i + 1] === '\r') i++;
|
||||
currentLine++;
|
||||
currentCol = 1;
|
||||
continue;
|
||||
}
|
||||
currentCol++;
|
||||
}
|
||||
|
||||
const end = startPos === endPos ? { ...start } : { line: currentLine, col: currentCol + 1 };
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
function getAstNodeByPointer(root: YAMLNode, pointer: string, reportOnKey: boolean) {
|
||||
const pointerSegments = parsePointer(pointer);
|
||||
if (root === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let currentNode = root;
|
||||
for (const key of pointerSegments) {
|
||||
if (currentNode.kind === yamlAst.Kind.MAP) {
|
||||
const mapping = currentNode.mappings.find((m) => m.key.value === key);
|
||||
if (!mapping?.value) break;
|
||||
currentNode = mapping?.value as YAMLNode;
|
||||
} else if (currentNode.kind === yamlAst.Kind.SEQ) {
|
||||
const elem = currentNode.items[parseInt(key, 10)] as YAMLNode;
|
||||
if (!elem) break;
|
||||
currentNode = elem as YAMLNode;
|
||||
}
|
||||
}
|
||||
|
||||
if (!reportOnKey) {
|
||||
return currentNode;
|
||||
} else {
|
||||
const parent = currentNode.parent as YAMLNode;
|
||||
if (!parent) return currentNode;
|
||||
if (parent.kind === yamlAst.Kind.SEQ) {
|
||||
return currentNode;
|
||||
} else if (parent.kind === yamlAst.Kind.MAPPING) {
|
||||
return parent.key;
|
||||
} else {
|
||||
return currentNode;
|
||||
}
|
||||
}
|
||||
}
|
||||
152
src/format/format.ts
Normal file
152
src/format/format.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import * as path from 'path';
|
||||
import {
|
||||
options as colorOptions,
|
||||
gray,
|
||||
blue,
|
||||
bgRed,
|
||||
bgYellow,
|
||||
underline,
|
||||
yellow,
|
||||
red,
|
||||
} from 'colorette';
|
||||
|
||||
import { NormalizedReportMessage, MessageSeverity, LineColLocationObject } from '../walk';
|
||||
import { getCodeframe, getLineColLocation } from './codeframes';
|
||||
|
||||
const BG_COLORS = {
|
||||
warning: bgYellow,
|
||||
error: bgRed,
|
||||
};
|
||||
|
||||
const COLORS = {
|
||||
warning: yellow,
|
||||
error: red,
|
||||
};
|
||||
|
||||
function severityToNumber(severity: MessageSeverity) {
|
||||
return severity === 'error' ? 1 : 2;
|
||||
}
|
||||
|
||||
export function formatMessages(
|
||||
messages: NormalizedReportMessage[],
|
||||
opts: {
|
||||
maxMessages?: number;
|
||||
cwd?: string;
|
||||
format?: 'full' | 'short';
|
||||
color?: boolean;
|
||||
},
|
||||
) {
|
||||
const {
|
||||
maxMessages = 100,
|
||||
cwd = process.cwd(),
|
||||
format = 'full',
|
||||
color = colorOptions.enabled,
|
||||
} = opts;
|
||||
|
||||
colorOptions.enabled = color; // force colors if specified
|
||||
|
||||
const totalMessages = messages.length;
|
||||
messages = messages
|
||||
.sort((a, b) => severityToNumber(a.severity) - severityToNumber(b.severity))
|
||||
.slice(0, maxMessages);
|
||||
|
||||
if (!totalMessages) return;
|
||||
|
||||
if (totalMessages > maxMessages) {
|
||||
process.stdout.write(`${maxMessages} first fo total ${totalMessages} messages:\n`);
|
||||
} else {
|
||||
process.stdout.write(`${totalMessages} messages\n`);
|
||||
}
|
||||
|
||||
if (format === 'full') {
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
const message = messages[i];
|
||||
process.stdout.write(`${fullFormatMessage(message, i)}\n`);
|
||||
}
|
||||
} else {
|
||||
const grouppedByFile = groupByFiles(messages);
|
||||
for (const [file, { ruleIdPad, locationPad: positionPad, fileMessages }] of Object.entries(
|
||||
grouppedByFile,
|
||||
)) {
|
||||
process.stderr.write(`${blue(path.relative(cwd, file))}:\n`);
|
||||
|
||||
for (let i = 0; i < fileMessages.length; i++) {
|
||||
const message = fileMessages[i];
|
||||
process.stdout.write(`${shortFormatMessage(message, positionPad, ruleIdPad)}\n`);
|
||||
}
|
||||
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
}
|
||||
|
||||
if (totalMessages > maxMessages) {
|
||||
process.stdout.write(
|
||||
`< ... ${totalMessages - maxMessages} more messages hidden > ${gray(
|
||||
'increase with `--max-messages N`',
|
||||
)}\n`,
|
||||
);
|
||||
}
|
||||
|
||||
function fullFormatMessage(message: NormalizedReportMessage, idx: number) {
|
||||
const bgColor = BG_COLORS[message.severity];
|
||||
|
||||
const location = message.location[0]; // todo: support multiple locations
|
||||
const relativePath = path.relative(cwd, location.source.absoluteRef);
|
||||
const loc = getLineColLocation(location);
|
||||
const atPointer = location.pointer ? gray(`at ${location.pointer}`) : '';
|
||||
const fileWithLoc = `${relativePath}:${loc.start.line}:${loc.start.col}`;
|
||||
return (
|
||||
`[${idx + 1}] ${bgColor(fileWithLoc)} ${atPointer}\n\n` +
|
||||
`${message.message}\n` +
|
||||
'\n' +
|
||||
getCodeframe(loc, color) +
|
||||
'\n\n' +
|
||||
`${message.severity} was generated by ${blue(message.ruleId)} rule.\n\n`
|
||||
);
|
||||
}
|
||||
|
||||
function shortFormatMessage(message: OnlyLineColMessage, locationPad: number, ruleIdPad: number) {
|
||||
const color = COLORS[message.severity];
|
||||
const { start } = message.location[0];
|
||||
return ` ${`${start.line}:${start.col}`.padEnd(locationPad + 2)} ${color(
|
||||
message.ruleId.padEnd(ruleIdPad),
|
||||
)} ${message.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
type OnlyLineColMessage = Omit<NormalizedReportMessage, 'location'> & {
|
||||
location: LineColLocationObject[];
|
||||
};
|
||||
|
||||
const groupByFiles = (messages: NormalizedReportMessage[]) => {
|
||||
const fileGroups: Record<
|
||||
string,
|
||||
{
|
||||
locationPad: number;
|
||||
ruleIdPad: number;
|
||||
fileMessages: OnlyLineColMessage[];
|
||||
}
|
||||
> = {};
|
||||
for (const message of messages) {
|
||||
const absoluteRef = message.location[0].source.absoluteRef; // TODO: multiple errors
|
||||
fileGroups[absoluteRef] = fileGroups[absoluteRef] || {
|
||||
fileMessages: [],
|
||||
ruleIdPad: 0,
|
||||
locationPad: 0,
|
||||
};
|
||||
|
||||
const mappedMessage = { ...message, location: message.location.map(getLineColLocation) };
|
||||
fileGroups[absoluteRef].fileMessages.push(mappedMessage);
|
||||
fileGroups[absoluteRef].ruleIdPad = Math.max(
|
||||
message.ruleId.length,
|
||||
fileGroups[absoluteRef].ruleIdPad,
|
||||
);
|
||||
|
||||
fileGroups[absoluteRef].locationPad = Math.max(
|
||||
Math.max(...mappedMessage.location.map((loc) => `${loc.start.line}:${loc.start.col}`.length)),
|
||||
fileGroups[absoluteRef].locationPad,
|
||||
);
|
||||
}
|
||||
|
||||
return fileGroups;
|
||||
};
|
||||
12
src/index.js
12
src/index.js
@@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { validateFromFile } from './validate';
|
||||
import cli from './cli';
|
||||
|
||||
export { validate, validateFromFile } from './validate';
|
||||
export { bundle, bundleToFile } from './bundle';
|
||||
|
||||
export default validateFromFile;
|
||||
|
||||
if (require.main === module) {
|
||||
cli();
|
||||
}
|
||||
0
src/index.ts
Normal file
0
src/index.ts
Normal file
@@ -1,92 +0,0 @@
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const get = (p, o) => p.reduce((xs, x) => ((xs && xs[x]) ? xs[x] : null), o);
|
||||
|
||||
function getObjByPathOrParent(json, JSONPath) {
|
||||
const value = get(JSONPath.split('.'), json);
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return {
|
||||
level: value,
|
||||
};
|
||||
case 'object':
|
||||
default:
|
||||
return {
|
||||
level: 4,
|
||||
...value,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function loadRuleset(config) {
|
||||
const ruleSet = [];
|
||||
const allRules = [];
|
||||
|
||||
const configCopy = {
|
||||
...config,
|
||||
rulesPath: config.rulesPath ? config.rulesPath : `${__dirname}/../visitors`,
|
||||
};
|
||||
let rulesDirectory = path.resolve(configCopy.rulesPath);
|
||||
if (!fs.existsSync(rulesDirectory)) {
|
||||
rulesDirectory = `${__dirname}/../visitors`;
|
||||
}
|
||||
const ruleSetDirContents = fs.readdirSync(rulesDirectory)
|
||||
.map((fName) => `${rulesDirectory}/${fName}`);
|
||||
const files = ruleSetDirContents.filter((fName) => fs.lstatSync(fName).isFile());
|
||||
|
||||
const dirs = ruleSetDirContents
|
||||
.filter((fName) => !fs.lstatSync(fName).isFile() && fName.indexOf('utils') === -1);
|
||||
|
||||
files.forEach((file) => {
|
||||
const Rule = require(file);
|
||||
const ruleConfig = getObjByPathOrParent(configCopy.rules, Rule.rule) || { level: 4 };
|
||||
|
||||
const ruleInstance = new Rule(ruleConfig);
|
||||
if (ruleConfig.level !== 'off') {
|
||||
if (!ruleInstance.config) {
|
||||
ruleInstance.config = ruleConfig;
|
||||
}
|
||||
ruleInstance._config = ruleConfig;
|
||||
ruleSet.push(ruleInstance);
|
||||
}
|
||||
allRules.push(ruleInstance);
|
||||
});
|
||||
|
||||
dirs.forEach((dir) => {
|
||||
const [nestedRules, allNestedRules] = loadRuleset({
|
||||
...configCopy,
|
||||
rulesPath: dir,
|
||||
});
|
||||
ruleSet.push(...nestedRules);
|
||||
allRules.push(...allNestedRules);
|
||||
});
|
||||
|
||||
return [ruleSet, allRules];
|
||||
}
|
||||
|
||||
export function loadRulesetExtension(config, rulesetName) {
|
||||
const additionalRules = [];
|
||||
|
||||
const configCopy = {
|
||||
...config,
|
||||
rulesPath: config.rulesPath ? config.rulesPath : `${__dirname}/../visitors`,
|
||||
};
|
||||
|
||||
config[rulesetName].forEach((Rule) => {
|
||||
const ruleConfig = getObjByPathOrParent(configCopy.rules, Rule.rule) || { level: 4 };
|
||||
|
||||
if (ruleConfig.level !== 'off') {
|
||||
const ruleInstance = new Rule(ruleConfig);
|
||||
if (!ruleInstance.config) {
|
||||
ruleInstance.config = ruleConfig;
|
||||
}
|
||||
ruleInstance._config = ruleConfig;
|
||||
additionalRules.push(ruleInstance);
|
||||
}
|
||||
});
|
||||
return additionalRules;
|
||||
}
|
||||
|
||||
export default loadRuleset;
|
||||
@@ -1,23 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf8" />
|
||||
<title>{{title}}</title>
|
||||
<!-- needed for adaptive design -->
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
</style>
|
||||
{{{redocHead}}}
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat:300,400,700|Roboto:300,400,700" rel="stylesheet">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{{{redocHTML}}}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,43 +0,0 @@
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
(function run() {
|
||||
const Socket = window.SimpleWebsocket;
|
||||
const port = window.__OPENAPI_CLI_WS_PORT;
|
||||
|
||||
let socket;
|
||||
|
||||
reconnect();
|
||||
|
||||
function reconnect() {
|
||||
socket = new Socket(`ws://127.0.0.1:${port}`);
|
||||
socket.on('connect', () => {
|
||||
socket.send('{"type": "ping"}');
|
||||
});
|
||||
|
||||
socket.on('data', (data) => {
|
||||
const message = JSON.parse(data);
|
||||
switch (message.type) {
|
||||
case 'pong':
|
||||
console.log('[hot] hot reloading connected');
|
||||
break;
|
||||
case 'reload':
|
||||
console.log('[hot] full page reload');
|
||||
window.location.reload();
|
||||
break;
|
||||
default:
|
||||
console.log(`[hot] ${message.type} received`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('close', () => {
|
||||
socket.destroy();
|
||||
console.log('Connection lost, trying to reconnect in 4s');
|
||||
setTimeout(() => {
|
||||
reconnect();
|
||||
}, 4000);
|
||||
});
|
||||
|
||||
socket.on('error', () => {
|
||||
socket.destroy();
|
||||
});
|
||||
}
|
||||
}());
|
||||
@@ -1,96 +0,0 @@
|
||||
// import { watch } from 'chokidar';
|
||||
import { compile } from 'handlebars';
|
||||
import chalk from 'chalk';
|
||||
import * as portfinder from 'portfinder';
|
||||
|
||||
|
||||
import { readFileSync, promises as fsPromises } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import {
|
||||
startHttpServer, startWsServer, respondWithGzip, mimeTypes,
|
||||
} from './server';
|
||||
|
||||
function getPageHTML(htmlTemplate, redocOptions = {}, useRedocPro, wsPort) {
|
||||
let templateSrc = readFileSync(htmlTemplate, 'utf-8');
|
||||
|
||||
// fix template for backward compatibility
|
||||
templateSrc = templateSrc
|
||||
.replace(/{?{{redocHead}}}?/, '{{{redocHead}}}')
|
||||
.replace('{{redocBody}}', '{{{redocHTML}}}');
|
||||
|
||||
const template = compile(templateSrc);
|
||||
|
||||
return template({
|
||||
redocHead: `
|
||||
<script>
|
||||
window.__REDOC_EXPORT = '${useRedocPro ? 'RedoclyReferenceDocs' : 'Redoc'}';
|
||||
window.__OPENAPI_CLI_WS_PORT = ${wsPort};
|
||||
</script>
|
||||
<script src="/simplewebsocket.min.js"></script>
|
||||
<script src="/hot.js"></script>
|
||||
<script src="${useRedocPro
|
||||
? 'https://cdn.jsdelivr.net/npm/@redocly/reference-docs@latest/dist/redocly-reference-docs.min.js'
|
||||
: 'https://cdn.jsdelivr.net/npm/redoc@latest/bundles/redoc.standalone.js'}"></script>
|
||||
`,
|
||||
redocHTML: `
|
||||
<div id="redoc"></div>
|
||||
<script>
|
||||
var container = document.getElementById('redoc');
|
||||
${useRedocPro ? "window[window.__REDOC_EXPORT].setPublicPath('https://cdn.jsdelivr.net/npm/@redocly/reference-docs@latest/dist/');" : ''}
|
||||
window[window.__REDOC_EXPORT].init("openapi.json", ${JSON.stringify(redocOptions)}, container)
|
||||
</script>`,
|
||||
});
|
||||
}
|
||||
|
||||
export default async function startPreviewServer(port, {
|
||||
getBundle,
|
||||
getOptions,
|
||||
useRedocPro,
|
||||
}) {
|
||||
const defaultTemplate = path.join(__dirname, 'default.hbs');
|
||||
const handler = async (request, response) => {
|
||||
console.time(chalk.dim(`GET ${request.url}`));
|
||||
const { htmlTemplate } = getOptions() || {};
|
||||
|
||||
if (request.url === '/') {
|
||||
respondWithGzip(getPageHTML(htmlTemplate || defaultTemplate, getOptions(), useRedocPro, wsPort), request, response, {
|
||||
'Content-Type': 'text/html',
|
||||
});
|
||||
} else if (request.url === '/openapi.json') {
|
||||
respondWithGzip(JSON.stringify(await getBundle()), request, response, {
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
} else {
|
||||
const filePath = {
|
||||
'/hot.js': path.join(__dirname, 'hot.js'),
|
||||
'/simplewebsocket.min.js': require.resolve('simple-websocket/simplewebsocket.min.js'),
|
||||
}[request.url] || path.resolve(htmlTemplate ? path.dirname(htmlTemplate) : process.cwd(), `.${request.url}`);
|
||||
|
||||
const extname = String(path.extname(filePath)).toLowerCase();
|
||||
|
||||
const contentType = mimeTypes[extname] || 'application/octet-stream';
|
||||
try {
|
||||
respondWithGzip(await fsPromises.readFile(filePath), request, response, {
|
||||
'Content-Type': contentType,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
respondWithGzip('404 Not Found', request, response, { 'Content-Type': 'text/html' }, 404);
|
||||
} else {
|
||||
respondWithGzip(`Something went wrong: ${e.code || e.message}...\n`, request, response, {}, 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
console.timeEnd(chalk.dim(`GET ${request.url}`));
|
||||
};
|
||||
|
||||
let wsPort = await portfinder.getPortPromise({ port: 32201 });
|
||||
|
||||
const server = startHttpServer(port, handler);
|
||||
server.on('listening', () => {
|
||||
process.stdout.write(`\n 🔎 Preview server running at ${chalk.blue(`http://127.0.0.1:${port}\n`)}`);
|
||||
});
|
||||
|
||||
return startWsServer(wsPort);
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
import * as http from 'http';
|
||||
import * as zlib from 'zlib';
|
||||
|
||||
const SocketServer = require('simple-websocket/server');
|
||||
|
||||
export const mimeTypes = {
|
||||
'.html': 'text/html',
|
||||
'.js': 'text/javascript',
|
||||
'.css': 'text/css',
|
||||
'.json': 'application/json',
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpg',
|
||||
'.gif': 'image/gif',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.wav': 'audio/wav',
|
||||
'.mp4': 'video/mp4',
|
||||
'.woff': 'application/font-woff',
|
||||
'.ttf': 'application/font-ttf',
|
||||
'.eot': 'application/vnd.ms-fontobject',
|
||||
'.otf': 'application/font-otf',
|
||||
'.wasm': 'application/wasm',
|
||||
};
|
||||
|
||||
// credits: https://stackoverflow.com/a/9238214/1749888
|
||||
export function respondWithGzip(contents, request, response, headers = {}, code = 200) {
|
||||
let compressedStream;
|
||||
const acceptEncoding = request.headers['accept-encoding'] || '';
|
||||
if (acceptEncoding.match(/\bdeflate\b/)) {
|
||||
response.writeHead(code, { ...headers, 'content-encoding': 'deflate' });
|
||||
compressedStream = zlib.createDeflate();
|
||||
} else if (acceptEncoding.match(/\bgzip\b/)) {
|
||||
response.writeHead(code, { ...headers, 'content-encoding': 'gzip' });
|
||||
compressedStream = zlib.createGzip();
|
||||
} else {
|
||||
response.writeHead(code, headers);
|
||||
if (typeof contents === 'string' || Buffer.isBuffer(contents)) {
|
||||
response.write(contents);
|
||||
response.end();
|
||||
} else {
|
||||
contents.pipe(response);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof contents === 'string' || Buffer.isBuffer(contents)) {
|
||||
compressedStream.write(contents);
|
||||
compressedStream.pipe(response);
|
||||
compressedStream.end();
|
||||
} else {
|
||||
contents.pipe(compressedStream).pipe(response);
|
||||
}
|
||||
}
|
||||
|
||||
export function startHttpServer(port, handler) {
|
||||
return http.createServer(handler).listen(port);
|
||||
}
|
||||
|
||||
export function startWsServer(port) {
|
||||
const socketServer = new SocketServer({ port, clientTracking: true });
|
||||
|
||||
socketServer.on('connection', (socket) => {
|
||||
socket.on('data', (data) => {
|
||||
const message = JSON.parse(data);
|
||||
switch (message.type) {
|
||||
case 'ping':
|
||||
socket.send('{"type": "pong"}');
|
||||
break;
|
||||
default:
|
||||
// nope
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
socketServer.broadcast = (message) => {
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
socketServer._server.clients.forEach((client) => {
|
||||
if (client.readyState === 1) { // OPEN
|
||||
client.send(message);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
return socketServer;
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
/* eslint-disable max-len */
|
||||
import {
|
||||
existsSync, readFileSync, writeFileSync, unlinkSync,
|
||||
} from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import chalk from 'chalk';
|
||||
|
||||
import query from './query';
|
||||
|
||||
const TOKEN_FILENAME = '.redocly-config.json';
|
||||
|
||||
export default class RedoclyClient {
|
||||
constructor() {
|
||||
this.loadToken();
|
||||
}
|
||||
|
||||
hasToken() {
|
||||
return !!this.accessToken;
|
||||
}
|
||||
|
||||
loadToken() {
|
||||
if (process.env.REDOCLY_AUTHORIZATION) {
|
||||
this.accessToken = process.env.REDOCLY_AUTHORIZATION;
|
||||
return;
|
||||
}
|
||||
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
if (existsSync(credentialsPath)) {
|
||||
const credentials = JSON.parse(readFileSync(credentialsPath, 'utf-8'));
|
||||
this.accessToken = credentials && credentials.token;
|
||||
}
|
||||
}
|
||||
|
||||
async isAuthorizedWithRedocly() {
|
||||
return this.hasToken() && !!(await this.getAuthorizationHeader());
|
||||
}
|
||||
|
||||
async verifyToken(accessToken) {
|
||||
if (!accessToken) return false;
|
||||
const authDetails = await RedoclyClient.authorize(accessToken);
|
||||
if (!authDetails) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
async getAuthorizationHeader() {
|
||||
// print this only if there is token but invalid
|
||||
if (this.accessToken && !(await this.verifyToken(this.accessToken))) {
|
||||
process.stdout.write(
|
||||
`${chalk.yellow('Warning:')} invalid Redoc.ly access token. Use "npx @redocly/openapi-cli registry:login" to provide your access token\n`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return this.accessToken;
|
||||
}
|
||||
|
||||
async login(accessToken) {
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
process.stdout.write(chalk.grey('\n Logging in...\n'));
|
||||
|
||||
const authorized = await this.verifyToken(accessToken);
|
||||
|
||||
if (!authorized) {
|
||||
process.stdout.write(chalk.red('Authorization failed. Please check if you entered a valid token.\n'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
this.accessToken = accessToken;
|
||||
const credentials = {
|
||||
token: accessToken,
|
||||
};
|
||||
|
||||
writeFileSync(credentialsPath, JSON.stringify(credentials, null, 2));
|
||||
process.stdout.write(chalk.green(' Authorization confirmed. ✅\n\n'));
|
||||
}
|
||||
|
||||
logout() {
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
if (existsSync(credentialsPath)) {
|
||||
unlinkSync(credentialsPath);
|
||||
}
|
||||
process.stdout.write('Logged out from the Redoc.ly account. ✋\n');
|
||||
}
|
||||
|
||||
async query(queryString, parameters = {}, headers = {}) {
|
||||
return query(queryString, parameters,
|
||||
{
|
||||
Authorization: this.accessToken,
|
||||
...headers,
|
||||
});
|
||||
}
|
||||
|
||||
static async authorize(accessToken, verbose = false) {
|
||||
try {
|
||||
const result = await query(`
|
||||
{
|
||||
definitions {
|
||||
id
|
||||
}
|
||||
}
|
||||
`,
|
||||
{},
|
||||
{
|
||||
Authorization: accessToken,
|
||||
});
|
||||
return result;
|
||||
} catch (e) {
|
||||
if (verbose) process.stderr.write(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async updateDependencies(dependencies) {
|
||||
const r = await this.query(`
|
||||
mutation UpdateBranchDependenciesFromURLs(
|
||||
$urls: [String!]!
|
||||
$definitionId: Int!
|
||||
$versionId: Int!
|
||||
$branchId: Int!
|
||||
) {
|
||||
updateBranchDependenciesFromURLs(
|
||||
definitionId: $definitionId
|
||||
versionId: $versionId
|
||||
branchId: $branchId
|
||||
urls: $urls
|
||||
) {
|
||||
branchName
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
urls: dependencies || [],
|
||||
definitionId: parseInt(process.env.DEFINITION, 10),
|
||||
versionId: parseInt(process.env.VERSION, 10),
|
||||
branchId: parseInt(process.env.BRANCH, 10),
|
||||
});
|
||||
return r;
|
||||
}
|
||||
|
||||
static isRegistryURL(link) {
|
||||
const domain = process.env.REDOCLY_DOMAIN || 'redoc.ly';
|
||||
if (!link.startsWith(`https://api.${domain}/registry/`)) return false;
|
||||
const registryPath = link.replace(`https://api.${domain}/registry/`, '');
|
||||
|
||||
const pathParts = registryPath.split('/');
|
||||
|
||||
// we can be sure, that there is job UUID present
|
||||
// (org, definition, version, bundle, branch, job, "openapi.yaml" 🤦♂️)
|
||||
// so skip this link.
|
||||
if (pathParts.length === 7) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
const GRAPHQL_ENDPOINT = process.env.REDOCLY_DOMAIN
|
||||
? `https://api.${process.env.REDOCLY_DOMAIN}/graphql` : 'https://api.redoc.ly/graphql';
|
||||
|
||||
export default async function query(queryString, variables = {}, headers = {}, debugInfo = '') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
headers = {
|
||||
...headers,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const gQLResponse = await fetch(GRAPHQL_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
query: queryString,
|
||||
variables,
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
if (!gQLResponse.ok) {
|
||||
throw new RequestError(`Failed to execute query: ${gQLResponse.status}`, 500, debugInfo);
|
||||
}
|
||||
|
||||
const response = await gQLResponse.json();
|
||||
if (response.errors && response.errors.length) {
|
||||
throw new RequestError(`Query failed: ${response.errors[0].message}`, 500, debugInfo);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export class RequestError extends Error {
|
||||
constructor(message, statusCode = 500, debugInfo = '') {
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
this.debugInfo = debugInfo;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user