mirror of
https://github.com/LukeHagar/redocly-cli.git
synced 2025-12-06 04:21:09 +00:00
feat: async API & headers support & registry integration (#107)
This commit is contained in:
@@ -20,4 +20,5 @@ rules:
|
||||
no-restricted-syntax: off
|
||||
no-console: off
|
||||
no-use-before-define: off
|
||||
no-await-in-loop: off
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,6 +6,5 @@ coverage/
|
||||
test/specs/openapi/rebilly-full.yaml
|
||||
test/specs/openapi/rebilly-full (1).yaml
|
||||
yarn.lock
|
||||
package-lock.json
|
||||
dist/
|
||||
test/
|
||||
@@ -1,6 +1,6 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "10"
|
||||
- "12"
|
||||
cache:
|
||||
directories:
|
||||
- "node_modules"
|
||||
|
||||
@@ -235,7 +235,7 @@ lint:
|
||||
|
||||
#### Built-in Rules
|
||||
|
||||
[Read the docs](RULES.md) for the built-in rules.
|
||||
[Read the docs](RULES.md) for the built-in rules. You also can [create](RULES.md/#string-matcher) your own regular expressions' based rules for `openapi-cli`.
|
||||
|
||||
### Advanced
|
||||
|
||||
|
||||
44
RULES.md
44
RULES.md
@@ -119,6 +119,50 @@ Below, you can find the table of available sub-rules you can update:
|
||||
#### no-extra-fields
|
||||
By default, custom fields, not defined within OpenAPI specification can be included only using `x-` prefix. This rule enforces such policy.
|
||||
|
||||
### string-matcher
|
||||
Allows you to create custom Regexp based rules.
|
||||
|
||||
Each sub-rule should define OpenAPI type and its property on which it should be triggered and can also have an error message and a level (same as for generic rules).
|
||||
|
||||
Also, each entry of `rules` must have one of following fields:
|
||||
- startsWith
|
||||
- endsWith
|
||||
- regexp
|
||||
|
||||
If `regexp` is used, the rule will match the value of the `on` type's property against the regular expression provided and if it doesn't matches throw an error.
|
||||
|
||||
In case of `startsWith` and `endsWith` options, property's value must start or end with given values.
|
||||
|
||||
Also, you can provide a `not: true` to invert the rule. For example, it'll mean that regexp SHOULD NOT match the value, or string SHOULD NOT start with given parameter.
|
||||
|
||||
Usage example:
|
||||
```
|
||||
lint:
|
||||
rules:
|
||||
string-matcher:
|
||||
level: warning
|
||||
rules:
|
||||
UrlsNotExample:
|
||||
on: OpenAPIServer.url
|
||||
not: true
|
||||
startsWith: https://api-sandbox
|
||||
level: error
|
||||
message: 'Example servers should not be in api sandbox. God knows why.'
|
||||
ParameterNameStartCapital:
|
||||
on: OpenAPIParameter.name
|
||||
not: true
|
||||
regexp: 'internal'
|
||||
message: 'Parameter names not contain word "internal".'
|
||||
ExternalDocsHelpdesk:
|
||||
level: 'error'
|
||||
on: OpenAPIExternalDocumentation.url
|
||||
startsWith: docs.redoc.ly
|
||||
message: 'External docs must be only on corporate helpdesk.'
|
||||
OnlyOpensourceLicense:
|
||||
on: OpenAPILicense.name
|
||||
regexp: '^Rebilly$'
|
||||
message: 'Only one license can be used.'
|
||||
```
|
||||
|
||||
## Linting rules
|
||||
### suggest-possible-refs
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
apiDefinitions:
|
||||
sample: ./syntetic/to_bundle/bundle.yaml
|
||||
simpleDefinition: ./syntetic/syntetic.yaml
|
||||
anotherWithError: ./syntetic/syntetic-2.yaml
|
||||
rebilly: ./openapi-directory/rebilly-full.yaml
|
||||
|
||||
resolve:
|
||||
http:
|
||||
headers:
|
||||
- name: Client-Header
|
||||
value: sometestvalue
|
||||
matches: api.redoc.ly/.*
|
||||
- name: Custom
|
||||
envVariable: TEST_VAR
|
||||
matches: api.redoc.ly/.*
|
||||
|
||||
lint:
|
||||
codeframes: on
|
||||
@@ -15,6 +26,7 @@ lint:
|
||||
oas3-schema/external-docs:
|
||||
url: off
|
||||
|
||||
string-matcher: off
|
||||
|
||||
path-param-exists: on
|
||||
operation-2xx-response: on
|
||||
@@ -25,9 +37,9 @@ lint:
|
||||
|
||||
api-servers: on
|
||||
license-url: on
|
||||
no-extra-fields: on
|
||||
operation-description: on
|
||||
operation-operationId: on
|
||||
no-extra-fields: off
|
||||
operation-description: off
|
||||
operation-operationId: off
|
||||
operation-tags: off
|
||||
provide-contact: on
|
||||
servers-no-trailing-slash: on
|
||||
provide-contact: off
|
||||
servers-no-trailing-slash: off
|
||||
@@ -8,8 +8,7 @@ info:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
$ref: https://api.redoc.online/registry/oleses-testing/API%20petstore/github1/bundle/Default-branch#/info/license
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
10797
package-lock.json
generated
Normal file
10797
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
34
package.json
34
package.json
@@ -37,32 +37,32 @@
|
||||
"oas"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.6.0",
|
||||
"@babel/core": "^7.6.0",
|
||||
"@babel/node": "^7.6.1",
|
||||
"@babel/preset-env": "^7.6.0",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"eslint": "^6.4.0",
|
||||
"eslint-config-airbnb-base": "^14.0.0",
|
||||
"@babel/cli": "^7.8.4",
|
||||
"@babel/core": "^7.9.0",
|
||||
"@babel/node": "^7.8.7",
|
||||
"@babel/preset-env": "^7.9.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-airbnb-base": "^14.1.0",
|
||||
"eslint-plugin-babel": "^5.3.0",
|
||||
"eslint-plugin-import": "^2.18.2",
|
||||
"jest": "^24.9.0",
|
||||
"nyc": "^14.1.1",
|
||||
"prettier": "^1.18.2",
|
||||
"webpack": "^4.41.0",
|
||||
"webpack-cli": "^3.3.9"
|
||||
"eslint-plugin-import": "^2.20.1",
|
||||
"jest": "^25.1.0",
|
||||
"prettier": "^2.0.1",
|
||||
"webpack": "^4.42.0",
|
||||
"webpack-cli": "^3.3.11"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^2.4.2",
|
||||
"chalk": "^3.0.0",
|
||||
"chokidar": "^3.3.1",
|
||||
"commander": "^3.0.1",
|
||||
"handlebars": "^4.7.2",
|
||||
"commander": "^5.0.0",
|
||||
"handlebars": "^4.7.3",
|
||||
"js-yaml": "^3.13.1",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"merge-deep": "^3.0.2",
|
||||
"minimatch": "^3.0.4",
|
||||
"node-fetch": "^2.6.0",
|
||||
"portfinder": "^1.0.25",
|
||||
"simple-websocket": "^8.1.1",
|
||||
"xmlhttprequest": "^1.8.0",
|
||||
"yaml-ast-parser": "0.0.43"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ lint:
|
||||
no-unused-schemas: on
|
||||
operation-operationId-unique: on
|
||||
path-declarations-must-exist: on
|
||||
string-matcher: off
|
||||
|
||||
camel-case-names: off
|
||||
parameter-description: off
|
||||
|
||||
@@ -14,7 +14,7 @@ function tests(type, resolvedFileName) {
|
||||
let ctx;
|
||||
let doc;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
const file = join(__dirname, 'data', 'index.yaml');
|
||||
const source = readFileSync(file, 'utf-8');
|
||||
const document = yaml.safeLoad(source);
|
||||
@@ -24,9 +24,9 @@ function tests(type, resolvedFileName) {
|
||||
doc = document[type];
|
||||
});
|
||||
|
||||
test('should successfully resolve transitive $ref', () => {
|
||||
test('should successfully resolve transitive $ref', async () => {
|
||||
ctx.path.push('test1');
|
||||
const res = resolveNode(doc.test1, ctx);
|
||||
const res = await resolveNode(doc.test1, ctx);
|
||||
expect(res).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"node": Object {
|
||||
@@ -38,18 +38,18 @@ function tests(type, resolvedFileName) {
|
||||
expect(ctx.result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should fail to resolve incorrect transitive $ref with correct error at initial file', () => {
|
||||
test('should fail to resolve incorrect transitive $ref with correct error at initial file', async () => {
|
||||
ctx.path.push('test2');
|
||||
const res = resolveNode(doc.test2, ctx);
|
||||
const res = await resolveNode(doc.test2, ctx);
|
||||
expect(res.node).toEqual(doc.test2);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch('index.yaml');
|
||||
expect(ctx.result[0].path).toEqual([type, 'test2', '$ref']);
|
||||
});
|
||||
|
||||
test('should fail to resolve incorrect transitive $ref with error at first unresolved $ref', () => {
|
||||
test('should fail to resolve incorrect transitive $ref with error at first unresolved $ref', async () => {
|
||||
ctx.path.push('test3');
|
||||
const res = resolveNode(doc.test3, ctx);
|
||||
const res = await resolveNode(doc.test3, ctx);
|
||||
expect(res.node).toEqual(doc.test3);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch(resolvedFileName);
|
||||
@@ -60,9 +60,9 @@ function tests(type, resolvedFileName) {
|
||||
expect(ctx.result[0].referencedFrom.path).toEqual([type, 'test3']);
|
||||
});
|
||||
|
||||
test('should fail to resolve circular transitive $ref', () => {
|
||||
test('should fail to resolve circular transitive $ref', async () => {
|
||||
ctx.path.push('test4');
|
||||
const res = resolveNode(doc.test4, ctx);
|
||||
const res = await resolveNode(doc.test4, ctx);
|
||||
expect(res.node).toEqual(doc.test4);
|
||||
expect(ctx.result).toHaveLength(1);
|
||||
expect(ctx.result[0].file).toMatch(resolvedFileName);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ import createContext from './context';
|
||||
|
||||
import { OpenAPIRoot } from './types';
|
||||
|
||||
export const bundleToFile = (fName, outputFile, force) => {
|
||||
export const bundleToFile = async (fName, outputFile, force) => {
|
||||
const resolvedFileName = fName; // path.resolve(fName);
|
||||
const doc = fs.readFileSync(resolvedFileName, 'utf-8');
|
||||
let document;
|
||||
@@ -21,7 +21,6 @@ export const bundleToFile = (fName, outputFile, force) => {
|
||||
if (!document.openapi) { return []; }
|
||||
|
||||
const lintConfig = getLintConfig({});
|
||||
// config.customRules = [];
|
||||
lintConfig.rules = {
|
||||
...lintConfig.rules,
|
||||
bundler: {
|
||||
@@ -33,11 +32,11 @@ export const bundleToFile = (fName, outputFile, force) => {
|
||||
|
||||
const ctx = createContext(document, doc, resolvedFileName, lintConfig);
|
||||
|
||||
traverseNode(document, OpenAPIRoot, ctx);
|
||||
await traverseNode(document, OpenAPIRoot, ctx);
|
||||
return ctx.result;
|
||||
};
|
||||
|
||||
export const bundle = (fName, force, options) => {
|
||||
export const bundle = async (fName, force, options) => {
|
||||
const resolvedFileName = fName; // path.resolve(fName);
|
||||
const doc = fs.readFileSync(resolvedFileName, 'utf-8');
|
||||
let document;
|
||||
@@ -62,7 +61,7 @@ export const bundle = (fName, force, options) => {
|
||||
|
||||
const ctx = createContext(document, doc, resolvedFileName, config);
|
||||
|
||||
traverseNode(document, OpenAPIRoot, ctx);
|
||||
await traverseNode(document, OpenAPIRoot, ctx);
|
||||
|
||||
return { bundle: ctx.bundlingResult, result: ctx.result, fileDependencies: ctx.fileDependencies };
|
||||
};
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import chalk from 'chalk';
|
||||
import program from 'commander';
|
||||
import commander from 'commander';
|
||||
|
||||
import fs from 'fs';
|
||||
import {
|
||||
join, basename, dirname, extname,
|
||||
@@ -18,18 +19,22 @@ import { getFallbackEntryPointsOrExit, getConfig } from '../config';
|
||||
|
||||
import startPreviewServer from '../preview-docs';
|
||||
|
||||
const validateFile = (filePath, options, cmdObj) => {
|
||||
const program = new commander.Command();
|
||||
|
||||
const validateFile = async (filePath, options, cmdObj) => {
|
||||
let result;
|
||||
|
||||
if (!fs.existsSync(filePath) && isFullyQualifiedUrl(filePath)) {
|
||||
process.stdout.write('Will validate from URL\n');
|
||||
result = validateFromUrl(filePath, options);
|
||||
result = await validateFromUrl(filePath, options);
|
||||
} else {
|
||||
result = validateFromFile(filePath, options);
|
||||
result = await validateFromFile(filePath, options);
|
||||
}
|
||||
const resultStats = outputMessages(result, cmdObj);
|
||||
|
||||
const { totalErrors, totalWarnings } = resultStats;
|
||||
process.stdout.write(
|
||||
`${chalk.blueBright(filePath)} results. Errors: ${resultStats.totalErrors}, warnings: ${resultStats.totalWarnings}\n`,
|
||||
`${chalk.blueBright(filePath)} results. Errors: ${totalErrors}, warnings: ${totalWarnings}\n`,
|
||||
);
|
||||
|
||||
return {
|
||||
@@ -52,7 +57,7 @@ const cli = () => {
|
||||
.option('--short', 'Reduce output in case of bundling errors.')
|
||||
.option('--ext <ext>', 'Output extension: json, yaml or yml')
|
||||
.option('-f, --force', 'Produce bundle output file even if validation errors were encountered')
|
||||
.action((entryPoints, cmdObj) => {
|
||||
.action(async (entryPoints, cmdObj) => {
|
||||
if (cmdObj.ext && ['yaml', 'yml', 'json'].indexOf(cmdObj.ext) === -1) {
|
||||
process.stdout.write(
|
||||
'Unsupported value for --ext option. Supported values are: yaml, yml or json',
|
||||
@@ -73,7 +78,7 @@ const cli = () => {
|
||||
warnings: 0,
|
||||
};
|
||||
|
||||
entryPoints.forEach((entryPoint) => {
|
||||
for (const entryPoint of entryPoints) {
|
||||
let output;
|
||||
if (cmdObj.output) {
|
||||
const fileName = isOutputDir
|
||||
@@ -82,7 +87,7 @@ const cli = () => {
|
||||
output = join(dir, `${fileName}.${ext}`);
|
||||
}
|
||||
|
||||
const bundlingStatus = bundleToFile(entryPoint, output, cmdObj.force);
|
||||
const bundlingStatus = await bundleToFile(entryPoint, output, cmdObj.force);
|
||||
const resultStats = outputMessages(bundlingStatus, cmdObj);
|
||||
|
||||
if (resultStats.totalErrors === 0) {
|
||||
@@ -103,17 +108,17 @@ const cli = () => {
|
||||
results.errors += resultStats.totalErrors;
|
||||
results.warnings += resultStats.totalWarnings;
|
||||
}
|
||||
});
|
||||
}
|
||||
process.exit(results.errors === 0 || cmdObj.force ? 0 : 1);
|
||||
});
|
||||
|
||||
program
|
||||
.command('validate [entryPoints...]')
|
||||
.description('Validate given OpenAPI 3 definition file.')
|
||||
.option('--short', 'Reduce output to required minimun')
|
||||
.option('--short', 'Reduce output to required minimun.')
|
||||
.option('--no-frame', 'Print no codeframes with errors.')
|
||||
.option('--config <path>', 'Specify custom yaml or json config')
|
||||
.action((entryPoints, cmdObj) => {
|
||||
.action(async (entryPoints, cmdObj) => {
|
||||
const options = {};
|
||||
const results = {
|
||||
errors: 0,
|
||||
@@ -130,7 +135,7 @@ const cli = () => {
|
||||
for (let i = 0; i < entryPoints.length; i++) {
|
||||
printValidationHeader(entryPoints[i]);
|
||||
|
||||
const msgs = validateFile(entryPoints[i], options, cmdObj);
|
||||
const msgs = await validateFile(entryPoints[i], options, cmdObj);
|
||||
results.errors += msgs.errors;
|
||||
results.warnings += msgs.warnings;
|
||||
}
|
||||
@@ -164,36 +169,36 @@ const cli = () => {
|
||||
return cachedBundle;
|
||||
}
|
||||
|
||||
function updateBundle() {
|
||||
cachedBundle = new Promise((resolve) => {
|
||||
process.stdout.write('\nBundling...\n\n');
|
||||
const { bundle: openapiBundle, result, fileDependencies } = bundle(entryPoint, output, {
|
||||
lint: {
|
||||
codeframes: false,
|
||||
},
|
||||
});
|
||||
|
||||
const removed = [...deps].filter((x) => !fileDependencies.has(x));
|
||||
watcher.unwatch(removed);
|
||||
watcher.add([...fileDependencies]);
|
||||
deps.clear();
|
||||
fileDependencies.forEach(deps.add, deps);
|
||||
|
||||
const resultStats = outputMessages(result, { short: true });
|
||||
|
||||
if (resultStats.totalErrors === 0) {
|
||||
process.stdout.write(
|
||||
resultStats.totalErrors === 0
|
||||
? `Created a bundle for ${entryPoint} ${resultStats.totalWarnings > 0 ? 'with warnings' : 'successfully'}\n`
|
||||
: chalk.yellow(`Created a bundle for ${entryPoint} with errors. Docs may be broken or not accurate\n`),
|
||||
);
|
||||
}
|
||||
|
||||
resolve(openapiBundle);
|
||||
async function updateBundle() {
|
||||
process.stdout.write('\nBundling...\n\n');
|
||||
const { bundle: openapiBundle, result, fileDependencies } = await bundle(entryPoint, output, {
|
||||
lint: {
|
||||
codeframes: false,
|
||||
},
|
||||
});
|
||||
|
||||
const removed = [...deps].filter((x) => !fileDependencies.has(x));
|
||||
watcher.unwatch(removed);
|
||||
watcher.add([...fileDependencies]);
|
||||
deps.clear();
|
||||
fileDependencies.forEach(deps.add, deps);
|
||||
|
||||
const resultStats = outputMessages(result, { short: true });
|
||||
|
||||
if (resultStats.totalErrors === 0) {
|
||||
process.stdout.write(
|
||||
resultStats.totalErrors === 0
|
||||
? `Created a bundle for ${entryPoint} ${resultStats.totalWarnings > 0 ? 'with warnings' : 'successfully'}\n`
|
||||
: chalk.yellow(`Created a bundle for ${entryPoint} with errors. Docs may be broken or not accurate\n`),
|
||||
);
|
||||
}
|
||||
|
||||
return openapiBundle;
|
||||
}
|
||||
|
||||
setImmediate(() => updateBundle()); // initial cache
|
||||
setImmediate(() => {
|
||||
cachedBundle = updateBundle();
|
||||
}); // initial cache
|
||||
|
||||
const hotClients = await startPreviewServer(cmdObj.port, {
|
||||
getBundle,
|
||||
@@ -206,7 +211,7 @@ const cli = () => {
|
||||
});
|
||||
|
||||
const debouncedUpdatedeBundle = debounce(async () => {
|
||||
updateBundle();
|
||||
cachedBundle = updateBundle();
|
||||
await cachedBundle;
|
||||
hotClients.broadcast('{"type": "reload", "bundle": true}');
|
||||
}, 2000);
|
||||
|
||||
17
src/cli/utils.js
Normal file
17
src/cli/utils.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import * as readline from 'readline';
|
||||
|
||||
export async function promptUser(query) {
|
||||
return new Promise((resolve) => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
rl.question(`${query}:\n`, (answer) => {
|
||||
rl.close();
|
||||
resolve(answer);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default promptUser;
|
||||
@@ -40,7 +40,6 @@ export function getConfig(options) {
|
||||
}
|
||||
|
||||
warningShown = true;
|
||||
|
||||
config = { lint: config };
|
||||
}
|
||||
}
|
||||
@@ -70,6 +69,21 @@ export function getConfig(options) {
|
||||
const transformingVisitors = require(lintConfig.transformers);
|
||||
lintConfig.transformingVisitors = transformingVisitors;
|
||||
|
||||
if (!resolvedConfig.lint) {
|
||||
resolvedConfig.lint = {};
|
||||
}
|
||||
|
||||
resolvedConfig.lint.headers = (
|
||||
(
|
||||
resolvedConfig.resolve
|
||||
&& resolvedConfig.resolve.http
|
||||
&& resolvedConfig.resolve.http.headers)
|
||||
|| []
|
||||
).map((header) => ({
|
||||
...header,
|
||||
value: header.envVariable ? process.env[header.envVariable] : header.value,
|
||||
}));
|
||||
|
||||
return resolvedConfig;
|
||||
}
|
||||
|
||||
@@ -86,7 +100,7 @@ export function getFallbackEntryPointsOrExit(argsEntrypoints, config = getConfig
|
||||
) {
|
||||
res = Object.values(config.apiDefinitions);
|
||||
} else if (argsEntrypoints && argsEntrypoints.length && config.apiDefinitions) {
|
||||
res = res.map(aliasOrPath => config.apiDefinitions[aliasOrPath] || aliasOrPath);
|
||||
res = res.map((aliasOrPath) => config.apiDefinitions[aliasOrPath] || aliasOrPath);
|
||||
}
|
||||
|
||||
if (!res || !res.length) {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import path from 'path';
|
||||
|
||||
import loadRuleset, { loadRulesetExtension } from './loader';
|
||||
@@ -34,6 +35,8 @@ const getRule = (ctx, ruleName) => {
|
||||
|
||||
function createContext(node, sourceFile, filePath, config) {
|
||||
const [enabledRules, allRules] = loadRuleset(config);
|
||||
|
||||
config.headers = config.headers || [];
|
||||
return {
|
||||
document: node,
|
||||
filePath: path.resolve(filePath),
|
||||
@@ -41,14 +44,19 @@ function createContext(node, sourceFile, filePath, config) {
|
||||
cache: {},
|
||||
visited: [],
|
||||
result: [],
|
||||
registryDependencies: [],
|
||||
definitionStack: [],
|
||||
definitions: loadDefinitions(config),
|
||||
pathStack: [],
|
||||
source: sourceFile,
|
||||
enableCodeframe: !!(config && (config.codeframes === 'on' || config.codeframes === true)),
|
||||
customRules: [...loadRulesetExtension(config, 'transformingVisitors'), ...enabledRules, ...loadRulesetExtension(config, 'rulesExtensions')],
|
||||
customRules: [
|
||||
...loadRulesetExtension(config, 'transformingVisitors'),
|
||||
...enabledRules, ...loadRulesetExtension(config, 'rulesExtensions'),
|
||||
],
|
||||
allRules,
|
||||
config,
|
||||
headers: config.headers,
|
||||
messageHelpers,
|
||||
validateFieldsRaw,
|
||||
getRule,
|
||||
|
||||
@@ -14,30 +14,30 @@ const createCtx = () => ({
|
||||
definitionStack: [],
|
||||
pathStack: [],
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8"),
|
||||
enableCodeframe: true
|
||||
enableCodeframe: true,
|
||||
});
|
||||
|
||||
describe("createError", () => {
|
||||
test("should create valid error", () => {
|
||||
const ctx = {
|
||||
...createCtx(),
|
||||
path: ["paths", "user", "get", "responses"]
|
||||
path: ["paths", "user", "get", "responses"],
|
||||
};
|
||||
expect(
|
||||
createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR
|
||||
severity: messageLevels.ERROR,
|
||||
})
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"codeFrame": "[90m22| operationId: userGet[39m
|
||||
[90m23| description: Get user[39m
|
||||
[90m24| [4m[31mresponses:[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[31m[24m[39m
|
||||
[90m24| [4m[31mresponses:[39m[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[39m[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[39m[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[39m[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[39m[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[39m[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[39m[31m[24m[39m
|
||||
[90m31| project:[39m
|
||||
[90m32| get:[39m",
|
||||
"enableCodeframe": true,
|
||||
@@ -71,14 +71,14 @@ describe("createError", () => {
|
||||
const ctx = {
|
||||
...createCtx(),
|
||||
path: [],
|
||||
enableCodeframe: false
|
||||
enableCodeframe: false,
|
||||
};
|
||||
expect(
|
||||
createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR,
|
||||
target: "key",
|
||||
possibleAlternate: "example",
|
||||
fromRule: "testing"
|
||||
fromRule: "testing",
|
||||
})
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
@@ -114,312 +114,312 @@ describe("fromError", () => {
|
||||
pathStack: [
|
||||
{
|
||||
file: createCtx().filePath,
|
||||
path: ["paths", "user", "get", "responses"]
|
||||
}
|
||||
]
|
||||
path: ["paths", "user", "get", "responses"],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
ctx.pathStack[0].source = ctx.source;
|
||||
ctx.pathStack[0].document = ctx.document;
|
||||
|
||||
const baseError = createError("This is a test error", {}, ctx, {
|
||||
severity: messageLevels.ERROR
|
||||
severity: messageLevels.ERROR,
|
||||
});
|
||||
ctx.path = ["paths", "project", "get", "responses"];
|
||||
expect(fromError(baseError, ctx)).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"cache": Object {},
|
||||
"codeFrame": "[90m22| operationId: userGet[39m
|
||||
[90m23| description: Get user[39m
|
||||
[90m24| [4m[31mresponses:[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[31m[24m[39m
|
||||
[90m31| project:[39m
|
||||
[90m32| get:[39m",
|
||||
"definitionStack": Array [],
|
||||
"document": null,
|
||||
"enableCodeframe": true,
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"filePath": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"fromRule": undefined,
|
||||
"location": Object {
|
||||
"endCol": 28,
|
||||
"endIndex": 600,
|
||||
"endLine": 30,
|
||||
"startCol": 7,
|
||||
"startIndex": 432,
|
||||
"startLine": 24,
|
||||
},
|
||||
"message": "This is a test error",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"pathStack": Array [
|
||||
Object {
|
||||
"document": Object {
|
||||
"components": Object {
|
||||
"parameters": Object {
|
||||
"example": Object {
|
||||
"allOf": Array [
|
||||
Object {
|
||||
"in": "query",
|
||||
"name": "bla",
|
||||
"required": false,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"description": "blo",
|
||||
},
|
||||
Object {
|
||||
"description": "bla",
|
||||
},
|
||||
],
|
||||
},
|
||||
"genericExample": Object {
|
||||
"in": "query",
|
||||
"name": "example",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
"securitySchemes": Object {
|
||||
"JWT": Object {
|
||||
"bearerFormat": "JWT",
|
||||
"description": "You can create a JSON Web Token (JWT) via our [JWT Session resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
",
|
||||
"scheme": "bearer",
|
||||
"type": "http",
|
||||
},
|
||||
},
|
||||
},
|
||||
"externalDocs": Object {
|
||||
"description": Object {
|
||||
"$ref": "inc/docs-description.md",
|
||||
},
|
||||
"url": "googlecom",
|
||||
},
|
||||
"info": Object {
|
||||
"contact": Object {
|
||||
"email": "ivan@redoc.ly",
|
||||
"name": "Ivan Goncharov",
|
||||
},
|
||||
"license": Object {
|
||||
"name": "example",
|
||||
"url": "example.org",
|
||||
},
|
||||
"title": "Example OpenAPI 3 definition. Valid.",
|
||||
"version": 1,
|
||||
"x-redocly-overlay": Object {
|
||||
"path": "overlay-info.yaml",
|
||||
},
|
||||
},
|
||||
"openapi": "3.0.2",
|
||||
"paths": Object {
|
||||
"/user/{id}": Object {
|
||||
"get": Object {
|
||||
"description": "Get user by id",
|
||||
"operationId": "withPathParam",
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"description": "User id",
|
||||
"in": "path",
|
||||
"name": "test",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
],
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"project": Object {
|
||||
"get": Object {
|
||||
"description": "Get project",
|
||||
"operationId": "projectGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"user": Object {
|
||||
"get": Object {
|
||||
"description": "Get user",
|
||||
"operationId": "userGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"$ref": "#/components/parameters/example",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"servers": Array [
|
||||
Object {
|
||||
"cache": Object {},
|
||||
"codeFrame": "[90m22| operationId: userGet[39m
|
||||
[90m23| description: Get user[39m
|
||||
[90m24| [4m[31mresponses:[39m[90m[24m[39m
|
||||
[90m25|[39m[31m [4m[31m '200':[39m[31m[24m[39m
|
||||
[90m26|[39m[31m [4m[31m description: example description[39m[31m[24m[39m
|
||||
[90m27|[39m[31m [4m[31m content:[39m[31m[24m[39m
|
||||
[90m28|[39m[31m [4m[31m application/json:[39m[31m[24m[39m
|
||||
[90m29|[39m[31m [4m[31m schema:[39m[31m[24m[39m
|
||||
[90m30|[39m[31m [4m[31m type: object[39m[31m[24m[39m
|
||||
[90m31| project:[39m
|
||||
[90m32| get:[39m",
|
||||
"definitionStack": Array [],
|
||||
"document": null,
|
||||
"enableCodeframe": true,
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"filePath": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"fromRule": undefined,
|
||||
"location": Object {
|
||||
"endCol": 28,
|
||||
"endIndex": 600,
|
||||
"endLine": 30,
|
||||
"startCol": 7,
|
||||
"startIndex": 432,
|
||||
"startLine": 24,
|
||||
},
|
||||
"message": "This is a test error",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"pathStack": Array [
|
||||
Object {
|
||||
"document": Object {
|
||||
"components": Object {
|
||||
"parameters": Object {
|
||||
"example": Object {
|
||||
"allOf": Array [
|
||||
Object {
|
||||
"url": "http://example.org",
|
||||
"in": "query",
|
||||
"name": "bla",
|
||||
"required": false,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"description": "blo",
|
||||
},
|
||||
Object {
|
||||
"description": "bla",
|
||||
},
|
||||
],
|
||||
},
|
||||
"file": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
"genericExample": Object {
|
||||
"in": "query",
|
||||
"name": "example",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
"securitySchemes": Object {
|
||||
"JWT": Object {
|
||||
"bearerFormat": "JWT",
|
||||
"description": "You can create a JSON Web Token (JWT) via our [JWT Session resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
",
|
||||
"scheme": "bearer",
|
||||
"type": "http",
|
||||
},
|
||||
},
|
||||
},
|
||||
"externalDocs": Object {
|
||||
"description": Object {
|
||||
"$ref": "inc/docs-description.md",
|
||||
},
|
||||
"url": "googlecom",
|
||||
},
|
||||
"info": Object {
|
||||
"contact": Object {
|
||||
"email": "ivan@redoc.ly",
|
||||
"name": "Ivan Goncharov",
|
||||
},
|
||||
"license": Object {
|
||||
"name": "example",
|
||||
"url": "example.org",
|
||||
},
|
||||
"title": "Example OpenAPI 3 definition. Valid.",
|
||||
"version": 1,
|
||||
"x-redocly-overlay": Object {
|
||||
"path": "overlay-info.yaml",
|
||||
},
|
||||
},
|
||||
"openapi": "3.0.2",
|
||||
"paths": Object {
|
||||
"/user/{id}": Object {
|
||||
"get": Object {
|
||||
"description": "Get user by id",
|
||||
"operationId": "withPathParam",
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"description": "User id",
|
||||
"in": "path",
|
||||
"name": "test",
|
||||
"required": true,
|
||||
"schema": Object {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
],
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"project": Object {
|
||||
"get": Object {
|
||||
"description": "Get project",
|
||||
"operationId": "projectGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"user": Object {
|
||||
"get": Object {
|
||||
"description": "Get user",
|
||||
"operationId": "userGet",
|
||||
"responses": Object {
|
||||
"200": Object {
|
||||
"content": Object {
|
||||
"application/json": Object {
|
||||
"schema": Object {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
},
|
||||
"description": "example description",
|
||||
},
|
||||
},
|
||||
},
|
||||
"parameters": Array [
|
||||
Object {
|
||||
"$ref": "#/components/parameters/example",
|
||||
},
|
||||
],
|
||||
"source": "openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
allOf:
|
||||
- name: bla
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- description: blo
|
||||
- description: bla
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string",
|
||||
},
|
||||
},
|
||||
"servers": Array [
|
||||
Object {
|
||||
"url": "http://example.org",
|
||||
},
|
||||
],
|
||||
"possibleAlternate": undefined,
|
||||
"referencedFrom": Object {
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"startLine": 24,
|
||||
},
|
||||
"severity": 4,
|
||||
"source": null,
|
||||
"target": undefined,
|
||||
"value": Object {},
|
||||
"visited": Array [],
|
||||
}
|
||||
`);
|
||||
},
|
||||
"file": "./definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"source": "openapi: 3.0.2
|
||||
info:
|
||||
x-redocly-overlay:
|
||||
path: overlay-info.yaml
|
||||
title: Example OpenAPI 3 definition. Valid.
|
||||
version: 1.0
|
||||
contact:
|
||||
name: Ivan Goncharov
|
||||
email: ivan@redoc.ly
|
||||
license:
|
||||
name: example
|
||||
url: example.org
|
||||
|
||||
servers:
|
||||
- url: 'http://example.org'
|
||||
|
||||
paths:
|
||||
user:
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/example'
|
||||
get:
|
||||
operationId: userGet
|
||||
description: Get user
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
project:
|
||||
get:
|
||||
operationId: projectGet
|
||||
description: Get project
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
'/user/{id}':
|
||||
get:
|
||||
parameters:
|
||||
- in: path
|
||||
name: test
|
||||
description: User id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
operationId: withPathParam
|
||||
description: Get user by id
|
||||
responses:
|
||||
'200':
|
||||
description: example description
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
externalDocs:
|
||||
description:
|
||||
$ref: inc/docs-description.md
|
||||
url: googlecom
|
||||
components:
|
||||
securitySchemes:
|
||||
JWT:
|
||||
description: >
|
||||
You can create a JSON Web Token (JWT) via our [JWT Session
|
||||
resource](https://rebilly.github.io/RebillyUserAPI/#tag/JWT-Session/paths/~1signin/post).
|
||||
|
||||
Usage format: \`Bearer <JWT>\`
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
parameters:
|
||||
example:
|
||||
allOf:
|
||||
- name: bla
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- description: blo
|
||||
- description: bla
|
||||
genericExample:
|
||||
name: example
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string",
|
||||
},
|
||||
],
|
||||
"possibleAlternate": undefined,
|
||||
"referencedFrom": Object {
|
||||
"file": "definitions/syntetic/syntetic-1.yaml",
|
||||
"path": Array [
|
||||
"paths",
|
||||
"user",
|
||||
"get",
|
||||
"responses",
|
||||
],
|
||||
"startLine": 24,
|
||||
},
|
||||
"severity": 4,
|
||||
"source": null,
|
||||
"target": undefined,
|
||||
"value": Object {},
|
||||
"visited": Array [],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -43,13 +43,15 @@ export const getReferencedFrom = (ctx) => {
|
||||
};
|
||||
};
|
||||
|
||||
const createError = (msg, node, ctx, options) => {
|
||||
const createError = (msg, node, ctx, options, overrideSeverity) => {
|
||||
const {
|
||||
target, possibleAlternate, fromRule,
|
||||
} = options;
|
||||
|
||||
let { severity = messageLevels.ERROR } = options;
|
||||
|
||||
if (overrideSeverity) severity = overrideSeverity;
|
||||
|
||||
if (typeof severity === 'string') {
|
||||
severity = getMsgLevelFromString(severity);
|
||||
}
|
||||
@@ -81,10 +83,10 @@ const createError = (msg, node, ctx, options) => {
|
||||
};
|
||||
|
||||
export const createErrorFlat = (
|
||||
node, ctx, fromRule, severity, msg, target, possibleAlternate,
|
||||
node, ctx, fromRule, severity, msg, target, possibleAlternate, overrideSeverity,
|
||||
) => createError(msg, node, ctx, {
|
||||
target, fromRule, severity, possibleAlternate,
|
||||
});
|
||||
}, overrideSeverity);
|
||||
|
||||
export const fromError = (error, ctx) => (
|
||||
// let location = getLocationByPath(Array.from(ctx.path), ctx, error.target);
|
||||
|
||||
139
src/redocly/index.js
Normal file
139
src/redocly/index.js
Normal file
@@ -0,0 +1,139 @@
|
||||
/* eslint-disable max-len */
|
||||
import {
|
||||
existsSync, readFileSync, writeFileSync, unlinkSync,
|
||||
} from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import chalk from 'chalk';
|
||||
|
||||
import query from './query';
|
||||
|
||||
const TOKEN_FILENAME = '.redocly-config.json';
|
||||
|
||||
export default class RedoclyClient {
|
||||
constructor() {
|
||||
this.loadToken();
|
||||
}
|
||||
|
||||
hasToken() {
|
||||
return !!this.accessToken;
|
||||
}
|
||||
|
||||
loadToken() {
|
||||
if (process.env.REDOCLY_AUTHORIZATION) {
|
||||
this.accessToken = process.env.REDOCLY_AUTHORIZATION;
|
||||
return;
|
||||
}
|
||||
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
if (existsSync(credentialsPath)) {
|
||||
const credentials = JSON.parse(readFileSync(credentialsPath, 'utf-8'));
|
||||
this.accessToken = credentials && credentials.token;
|
||||
}
|
||||
}
|
||||
|
||||
async verifyToken(accessToken) {
|
||||
if (!accessToken) return false;
|
||||
const authDetails = await RedoclyClient.authorize(accessToken);
|
||||
if (!authDetails) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
async getAuthorizationHeader() {
|
||||
// print this only if there is token but invalid
|
||||
if (this.accessToken && !(await this.verifyToken(this.accessToken))) {
|
||||
process.stdout.write(
|
||||
`${chalk.yellow('Warning:')} invalid Redoc.ly access token. Use "openapi login" to provide your access token\n`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return this.accessToken;
|
||||
}
|
||||
|
||||
async login(accessToken) {
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
process.stdout.write(chalk.grey('Logging in...\n'));
|
||||
|
||||
const authorized = await this.verifyToken(accessToken);
|
||||
|
||||
if (!authorized) {
|
||||
process.stdout.write(chalk.red('Authorization failed. Please check if you entered a valid token.\n'));
|
||||
}
|
||||
|
||||
this.accessToken = accessToken;
|
||||
const credentials = {
|
||||
token: accessToken,
|
||||
};
|
||||
|
||||
writeFileSync(credentialsPath, JSON.stringify(credentials, null, 2));
|
||||
process.stdout.write(chalk.green('Authorization confirmed. ✅\n'));
|
||||
}
|
||||
|
||||
logout() {
|
||||
const credentialsPath = resolve(homedir(), TOKEN_FILENAME);
|
||||
if (existsSync(credentialsPath)) {
|
||||
unlinkSync(credentialsPath);
|
||||
}
|
||||
process.stdout.write('Logged out from the Redoc.ly account. ✋\n');
|
||||
}
|
||||
|
||||
async query(queryString, parameters = {}, headers = {}) {
|
||||
return query(queryString, parameters,
|
||||
{
|
||||
Authorization: this.accessToken,
|
||||
...headers,
|
||||
});
|
||||
}
|
||||
|
||||
static async authorize(accessToken, verbose = false) {
|
||||
try {
|
||||
const result = await query(`
|
||||
{
|
||||
definitions {
|
||||
id
|
||||
}
|
||||
}
|
||||
`,
|
||||
{},
|
||||
{
|
||||
Authorization: accessToken,
|
||||
});
|
||||
return result;
|
||||
} catch (e) {
|
||||
if (verbose) process.stderr.write(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async updateDependencies(dependencies) {
|
||||
const r = await this.query(`
|
||||
mutation UpdateBranchDependenciesFromURLs ($urls: [String!]!, $definitionId: Int!, $versionId: Int!, $branchId: Int!) {
|
||||
updateBranchDependenciesFromURLs(definitionId:$definitionId, versionId:$versionId, branchId:$branchId, dependencies:$dependencies){
|
||||
branchName
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
urls: dependencies || [],
|
||||
definitionId: parseInt(process.env.DEFINITION, 10),
|
||||
versionId: parseInt(process.env.VERSION, 10),
|
||||
branchId: parseInt(process.env.BRANCH, 10),
|
||||
});
|
||||
return r;
|
||||
}
|
||||
|
||||
static isRegistryURL(link) {
|
||||
const domain = process.env.REDOCLY_DOMAIN || 'redoc.ly';
|
||||
if (!link.startsWith(`https://api.${domain}/registry/`)) return false;
|
||||
const registryPath = link.replace(`https://api.${domain}/registry/`, '');
|
||||
|
||||
const pathParts = registryPath.split('/');
|
||||
|
||||
// we can be sure, that there is job UUID present
|
||||
// (org, definition, version, bundle, branch, job, "openapi.yaml" 🤦♂️)
|
||||
// so skip this link.
|
||||
if (pathParts.length === 7) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
41
src/redocly/query.js
Normal file
41
src/redocly/query.js
Normal file
@@ -0,0 +1,41 @@
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
const GRAPHQL_ENDPOINT = process.env.REDOCLY_DOMAIN
|
||||
? `https://api.${process.env.REDOCLY_DOMAIN}/graphql` : 'https://api.redoc.ly/graphql';
|
||||
|
||||
export default async function query(queryString, variables = {}, headers = {}, debugInfo = '') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
headers = {
|
||||
...headers,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const gQLResponse = await fetch(GRAPHQL_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
query: queryString,
|
||||
variables,
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
if (!gQLResponse.ok) {
|
||||
throw new RequestError(`Failed to execute query: ${gQLResponse.status}`, 500, debugInfo);
|
||||
}
|
||||
|
||||
const response = await gQLResponse.json();
|
||||
if (response.errors && response.errors.length) {
|
||||
throw new RequestError(`Query failed: ${response.errors[0].message}`, 500, debugInfo);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export class RequestError extends Error {
|
||||
constructor(message, statusCode = 500, debugInfo = '') {
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
this.debugInfo = debugInfo;
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,10 @@ import fs from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
import { resolve as resolveFile, dirname } from 'path';
|
||||
import { resolve as resolveUrl } from 'url';
|
||||
import { XMLHttpRequest } from 'xmlhttprequest';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
import createError, { getReferencedFrom, createYAMLParseError } from './error';
|
||||
import { isFullyQualifiedUrl } from './utils';
|
||||
import { isFullyQualifiedUrl, match } from './utils';
|
||||
|
||||
function pushPath(ctx, filePath, docPath) {
|
||||
ctx.pathStack.push({
|
||||
@@ -39,7 +39,7 @@ export function popPath(ctx) {
|
||||
* @param {string} link A path in the yaml document which is to be resolved
|
||||
* @param {*} ctx JSON Object with the document field which represents the YAML structure
|
||||
*/
|
||||
function resolve(link, ctx, visited = []) {
|
||||
async function resolve(link, ctx, visited = []) {
|
||||
const linkSplitted = link.split('#/');
|
||||
if (linkSplitted[0] === '') linkSplitted[0] = ctx.filePath;
|
||||
const [filePath, docPath] = linkSplitted;
|
||||
@@ -70,19 +70,29 @@ function resolve(link, ctx, visited = []) {
|
||||
ctx.result.push(createYAMLParseError(e, ctx, resolvedFilePath));
|
||||
return { node: undefined };
|
||||
}
|
||||
// FIXME: lost yaml parsing and file read errors here
|
||||
} else if (isFullyQualifiedUrl(resolvedFilePath)) {
|
||||
try {
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', resolvedFilePath, false);
|
||||
xhr.send();
|
||||
const headers = {};
|
||||
for (let i = 0; i < ctx.headers.length; i++) {
|
||||
if (match(resolvedFilePath, ctx.headers[i].matches)) {
|
||||
headers[ctx.headers[i].name] = ctx.headers[i].value;
|
||||
}
|
||||
}
|
||||
|
||||
if (xhr.status !== 200) {
|
||||
const req = await fetch(resolvedFilePath, { headers });
|
||||
|
||||
if (!req.ok) {
|
||||
return { node: undefined };
|
||||
}
|
||||
|
||||
source = xhr.responseText;
|
||||
document = yaml.safeLoad(source);
|
||||
const text = await req.text();
|
||||
|
||||
try {
|
||||
document = yaml.safeLoad(text);
|
||||
} catch (e) {
|
||||
ctx.result.push(createYAMLParseError(e, ctx, resolvedFilePath));
|
||||
return { node: undefined };
|
||||
}
|
||||
} catch (e) {
|
||||
// FIXME: lost yaml parsing errors and network errors here
|
||||
return { node: undefined };
|
||||
@@ -117,7 +127,7 @@ function resolve(link, ctx, visited = []) {
|
||||
while (target !== undefined) {
|
||||
if (target && target.$ref) {
|
||||
// handle transitive $ref's
|
||||
const resolved = resolve(target.$ref, ctx, visited);
|
||||
const resolved = await resolve(target.$ref, ctx, visited);
|
||||
transitiveError = resolved.transitiveError;
|
||||
if (resolved.node === undefined && !transitiveError) {
|
||||
// We want to show only the error for the first $ref that can't be resolved.
|
||||
@@ -171,11 +181,11 @@ function resolve(link, ctx, visited = []) {
|
||||
* @param {*} node
|
||||
* @param {*} ctx
|
||||
*/
|
||||
function resolveNode(node, ctx) {
|
||||
async function resolveNode(node, ctx) {
|
||||
if (!node || typeof node !== 'object') return { node };
|
||||
|
||||
if (node.$ref) {
|
||||
const resolved = resolve(node.$ref, ctx);
|
||||
const resolved = await resolve(node.$ref, ctx);
|
||||
if (resolved.node === undefined) { // can't resolve
|
||||
popPath(ctx);
|
||||
|
||||
@@ -197,7 +207,7 @@ function resolveNode(node, ctx) {
|
||||
}
|
||||
|
||||
// to be used in mutators
|
||||
export function resolveNodeNoSideEffects(node, ctx) {
|
||||
export async function resolveNodeNoSideEffects(node, ctx) {
|
||||
const ctxCopy = { ...ctx, pathStack: ctx.pathStack.slice() };
|
||||
return resolveNode(node, ctxCopy);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
/* eslint-disable no-case-declarations */
|
||||
import path from 'path';
|
||||
@@ -9,7 +10,7 @@ import resolveScalars from './scalarsResolver';
|
||||
|
||||
import { fromError, createErrorFlat } from './error/default';
|
||||
|
||||
function traverseChildren(resolvedNode, definition, ctx, visited) {
|
||||
async function traverseChildren(resolvedNode, definition, ctx, visited) {
|
||||
let nodeChildren;
|
||||
const errors = [];
|
||||
switch (typeof definition.properties) {
|
||||
@@ -23,7 +24,7 @@ function traverseChildren(resolvedNode, definition, ctx, visited) {
|
||||
if (resolvedNodeKeys.includes(child)) {
|
||||
ctx.path.push(child);
|
||||
if (resolvedNode[child]) {
|
||||
childResult = traverseNode(resolvedNode[child], nodeChildren[child], ctx, visited);
|
||||
childResult = await traverseNode(resolvedNode[child], nodeChildren[child], ctx, visited);
|
||||
}
|
||||
if (childResult) errors.push(...childResult);
|
||||
ctx.path.pop();
|
||||
@@ -39,10 +40,10 @@ function traverseChildren(resolvedNode, definition, ctx, visited) {
|
||||
ctx.path.push(p);
|
||||
if (typeof definition.properties[p] === 'function') {
|
||||
if (resolvedNode[p]) {
|
||||
propResult = traverseNode(resolvedNode[p], definition.properties[p](), ctx, visited);
|
||||
propResult = await traverseNode(resolvedNode[p], definition.properties[p](), ctx, visited);
|
||||
}
|
||||
} else if (resolvedNode[p]) {
|
||||
propResult = traverseNode(resolvedNode[p], definition.properties[p], ctx, visited);
|
||||
propResult = await traverseNode(resolvedNode[p], definition.properties[p], ctx, visited);
|
||||
}
|
||||
if (propResult) errors.push(...propResult);
|
||||
ctx.path.pop();
|
||||
@@ -55,10 +56,10 @@ function traverseChildren(resolvedNode, definition, ctx, visited) {
|
||||
return errors;
|
||||
}
|
||||
|
||||
function onNodeEnter(node, ctx) {
|
||||
async function onNodeEnter(node, ctx) {
|
||||
const {
|
||||
node: resolvedNode, onStack,
|
||||
} = resolveNode(node, ctx);
|
||||
} = await resolveNode(node, ctx);
|
||||
|
||||
return {
|
||||
resolvedNode,
|
||||
@@ -78,10 +79,10 @@ const nestedIncludes = (c, s) => {
|
||||
};
|
||||
|
||||
|
||||
function traverseNode(node, definition, ctx, visited = []) {
|
||||
async function traverseNode(node, definition, ctx, visited = []) {
|
||||
if (!node || !definition) return [];
|
||||
|
||||
const nodeContext = onNodeEnter(node, ctx);
|
||||
const nodeContext = await onNodeEnter(node, ctx);
|
||||
const isRecursive = nestedIncludes(ctx.path, visited);
|
||||
const errors = [];
|
||||
const currentPath = `${path.relative(process.cwd(), ctx.filePath)}::${ctx.path.join('/')}`;
|
||||
@@ -95,28 +96,28 @@ function traverseNode(node, definition, ctx, visited = []) {
|
||||
resolveScalars(nodeContext.resolvedNode, definition, ctx);
|
||||
|
||||
if (definition.customResolveFields) {
|
||||
definition.customResolveFields(nodeContext.resolvedNode, ctx, visited);
|
||||
await definition.customResolveFields(nodeContext.resolvedNode, ctx, visited);
|
||||
}
|
||||
|
||||
if (Array.isArray(nodeContext.resolvedNode)) {
|
||||
nodeContext.resolvedNode.forEach((nodeChild, i) => {
|
||||
for (let i = 0; i < nodeContext.resolvedNode.length; i++) {
|
||||
ctx.path.push(i);
|
||||
const arrayResult = traverseNode(nodeChild, resolvedDefinition, ctx, localVisited);
|
||||
const arrayResult = await traverseNode(nodeContext.resolvedNode[i], resolvedDefinition, ctx, localVisited);
|
||||
if (arrayResult) errors.push(...arrayResult);
|
||||
ctx.path.pop();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
ctx.validateFields = ctx.validateFieldsRaw.bind(
|
||||
null, nodeContext.resolvedNode, ctx,
|
||||
);
|
||||
runRuleOnRuleset(nodeContext, 'onEnter', ctx, resolvedDefinition, node, errors, localVisited);
|
||||
await runRuleOnRuleset(nodeContext, 'onEnter', ctx, resolvedDefinition, node, errors, localVisited);
|
||||
|
||||
const newNode = !isRecursive
|
||||
&& (!resolvedDefinition.isIdempotent || !ctx.visited.includes(currentPath));
|
||||
if (newNode) {
|
||||
if (!ctx.visited.includes(currentPath)) ctx.visited.push(currentPath);
|
||||
|
||||
const errorsChildren = traverseChildren(
|
||||
const errorsChildren = await traverseChildren(
|
||||
nodeContext.resolvedNode, resolvedDefinition, ctx, localVisited,
|
||||
);
|
||||
errors.push(...errorsChildren);
|
||||
@@ -129,7 +130,7 @@ function traverseNode(node, definition, ctx, visited = []) {
|
||||
ctx.result.push(...cachedResult);
|
||||
}
|
||||
|
||||
runRuleOnRuleset(nodeContext, 'onExit', ctx, resolvedDefinition, node, errors);
|
||||
await runRuleOnRuleset(nodeContext, 'onExit', ctx, resolvedDefinition, node, errors);
|
||||
if (newNode) ctx.cache[currentPath] = errors;
|
||||
}
|
||||
onNodeExit(nodeContext, ctx);
|
||||
@@ -137,7 +138,7 @@ function traverseNode(node, definition, ctx, visited = []) {
|
||||
return errors;
|
||||
}
|
||||
|
||||
function runRuleOnRuleset(nodeContext, ruleName, ctx, definition, node, errors, visited) {
|
||||
async function runRuleOnRuleset(nodeContext, ruleName, ctx, definition, node, errors, visited) {
|
||||
for (let i = 0; i < ctx.customRules.length; i += 1) {
|
||||
ctx.validateFieldsHelper = ctx.validateFields.bind(
|
||||
null,
|
||||
@@ -156,12 +157,12 @@ function runRuleOnRuleset(nodeContext, ruleName, ctx, definition, node, errors,
|
||||
|
||||
const errorsOnEnterForType = ctx.customRules[i][definition.name]
|
||||
&& ctx.customRules[i][definition.name]()[ruleName]
|
||||
? ctx.customRules[i][definition.name]()[ruleName](
|
||||
? await ctx.customRules[i][definition.name]()[ruleName](
|
||||
nodeContext.resolvedNode, definition, ctx, node, { traverseNode, visited, resolveType },
|
||||
) : [];
|
||||
|
||||
const errorsOnEnterGeneric = ctx.customRules[i].any && ctx.customRules[i].any()[ruleName]
|
||||
? ctx.customRules[i].any()[ruleName](nodeContext.resolvedNode, definition, ctx, node, {
|
||||
? await ctx.customRules[i].any()[ruleName](nodeContext.resolvedNode, definition, ctx, node, {
|
||||
traverseNode, visited, resolveType,
|
||||
}) : [];
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ export default {
|
||||
propertyName: null,
|
||||
mapping: null,
|
||||
},
|
||||
customResolveFields: (node, ctx, visited) => {
|
||||
customResolveFields: async (node, ctx, visited) => {
|
||||
if (node && node.mapping && typeof node.mapping === 'object') {
|
||||
ctx.path.push('mapping');
|
||||
for (const key of Object.keys(node.mapping)) {
|
||||
ctx.path.push(key);
|
||||
traverseNode(
|
||||
await traverseNode(
|
||||
{
|
||||
$ref: node.mapping[key],
|
||||
[MAPPING_DATA_KEY]: { // FIXME: too hacky
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
// @ts-check
|
||||
/** @typedef {'string'|'number'|'integer'|'boolean'|'null'|'object'|'array'} JSONSchemaType */
|
||||
|
||||
import { XMLHttpRequest } from 'xmlhttprequest';
|
||||
import chalk from 'chalk';
|
||||
import fetch from 'node-fetch';
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
/* eslint-disable import/prefer-default-export */
|
||||
const urlPattern = new RegExp('^(https?:\\/\\/)?' // protocol
|
||||
@@ -171,15 +172,9 @@ export function getClosestString(given, others) {
|
||||
return null;
|
||||
}
|
||||
|
||||
export const getFileSync = (link) => {
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', link, false);
|
||||
xhr.send();
|
||||
|
||||
if (xhr.status !== 200) {
|
||||
return null;
|
||||
}
|
||||
return xhr.responseText;
|
||||
export const getFile = async (link) => {
|
||||
const resp = await fetch(link);
|
||||
return resp.text();
|
||||
};
|
||||
|
||||
export function isRef(node) {
|
||||
@@ -206,3 +201,11 @@ export function debounce(func, wait, immediate) {
|
||||
if (callNow) func.apply(context, args);
|
||||
};
|
||||
}
|
||||
|
||||
export function match(url, pattern) {
|
||||
if (!pattern.match(/^https?:\/\//)) {
|
||||
// if pattern doesn't specify protocol directly, do not match against it
|
||||
url = url.replace(/^https?:\/\//, '');
|
||||
}
|
||||
return minimatch(url, pattern);
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ import { OpenAPIRoot } from './types';
|
||||
|
||||
import { createYAMLParseError } from './error';
|
||||
|
||||
import { getFileSync } from './utils';
|
||||
import { getFile } from './utils';
|
||||
|
||||
import { getLintConfig } from './config';
|
||||
import traverseNode from './traverse';
|
||||
import createContext from './context';
|
||||
|
||||
export const validate = (yamlData, filePath, options = {}) => {
|
||||
export const validate = async (yamlData, filePath, options = {}) => {
|
||||
let document;
|
||||
try {
|
||||
document = yaml.safeLoad(yamlData);
|
||||
@@ -28,7 +28,7 @@ export const validate = (yamlData, filePath, options = {}) => {
|
||||
|
||||
ctx.getRule = ctx.getRule.bind(null, ctx);
|
||||
|
||||
traverseNode(document, OpenAPIRoot, ctx);
|
||||
await traverseNode(document, OpenAPIRoot, ctx);
|
||||
|
||||
const filtered = ctx.result.filter((msg) => {
|
||||
for (let j = 0; j < ctx.customRules.length; j++) {
|
||||
@@ -46,16 +46,15 @@ export const validate = (yamlData, filePath, options = {}) => {
|
||||
return filtered;
|
||||
};
|
||||
|
||||
export const validateFromUrl = (link, options) => {
|
||||
const doc = getFileSync(link);
|
||||
options.sourceUrl = true;
|
||||
const validationResult = validate(doc, link, options);
|
||||
export const validateFromUrl = async (link, options) => {
|
||||
const doc = await getFile(link);
|
||||
const validationResult = await validate(doc, link, options);
|
||||
return validationResult;
|
||||
};
|
||||
|
||||
export const validateFromFile = (fName, options) => {
|
||||
export const validateFromFile = async (fName, options) => {
|
||||
const resolvedFileName = fName; // path.resolve(fName);
|
||||
const doc = fs.readFileSync(resolvedFileName, 'utf-8');
|
||||
const validationResult = validate(doc, resolvedFileName, options);
|
||||
const validationResult = await validate(doc, resolvedFileName, options);
|
||||
return validationResult;
|
||||
};
|
||||
|
||||
110
src/visitors/rules/custom/stringMatcher.js
Normal file
110
src/visitors/rules/custom/stringMatcher.js
Normal file
@@ -0,0 +1,110 @@
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
|
||||
class StringMatcher {
|
||||
static get rule() {
|
||||
return 'string-matcher';
|
||||
}
|
||||
|
||||
constructor(config) {
|
||||
this.ruleSets = {};
|
||||
|
||||
for (const ruleName of Object.keys(config.rules || {})) {
|
||||
if (!config.rules[ruleName].on) {
|
||||
process.stdout.write(`Missing "on" field on the ${ruleName} subrule of string-matcher. Aborting validation`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const [typeName, field] = config.rules[ruleName].on.split('.');
|
||||
const rule = {
|
||||
not: false,
|
||||
...config.rules[ruleName],
|
||||
typeName,
|
||||
field,
|
||||
name: ruleName,
|
||||
};
|
||||
|
||||
const regexp = (!!rule.regexp && new RegExp(rule.regexp)) || null;
|
||||
|
||||
const startsWithHelper = (node, ctx, expr, inverse) => {
|
||||
if (!node[rule.field]) {
|
||||
return ctx.createError(`Missing ${rule.field} property required by ${rule.name} rule.`, 'key', undefined, rule.level);
|
||||
}
|
||||
|
||||
if ((!inverse && node[rule.field].startsWith(expr)) || (inverse && !node[rule.field].startsWith(expr))) return null;
|
||||
|
||||
ctx.path.push(rule.field);
|
||||
const error = ctx.createError(rule.message
|
||||
|| `Field ${rule.field} does not starts with: ${expr}. Error was generated by ${rule.name} validation rule.`,
|
||||
'value', undefined, rule.level);
|
||||
ctx.path.pop();
|
||||
return error;
|
||||
};
|
||||
|
||||
const endsWithHelper = (node, ctx, expr, inverse) => {
|
||||
if (!node[rule.field]) {
|
||||
return ctx.createError(`Missing ${rule.field} property required by ${rule.name} rule.`, 'key', undefined, rule.level);
|
||||
}
|
||||
|
||||
if ((!inverse && node[rule.field].endsWith(expr)) || (inverse && !node[rule.field].endsWith(expr))) return null;
|
||||
|
||||
ctx.path.push(rule.field);
|
||||
const error = ctx.createError(rule.message
|
||||
|| `Field ${rule.field} does not ends with: ${expr}. Error was generated by ${rule.name} validation rule.`,
|
||||
'value', undefined, rule.level);
|
||||
ctx.path.pop();
|
||||
return error;
|
||||
};
|
||||
|
||||
const checkForRegexp = (node, ctx, expr, inverse) => {
|
||||
if ((!inverse && !node[rule.field].match(expr)) || (inverse && node[rule.field].match(expr))) {
|
||||
ctx.path.push(rule.field);
|
||||
const error = ctx.createError(rule.message
|
||||
|| `Field ${rule.field} does not match regexp: ${expr}. Error was generated by ${rule.name} validation rule.`,
|
||||
'value', undefined, rule.level);
|
||||
ctx.path.pop();
|
||||
return error;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const validateRule = (node, ctx) => {
|
||||
let error = null;
|
||||
if (regexp) {
|
||||
error = error || checkForRegexp(node, ctx, regexp, rule.not);
|
||||
}
|
||||
if (rule.startsWith) {
|
||||
error = error || startsWithHelper(node, ctx, rule.startsWith, rule.not);
|
||||
}
|
||||
if (rule.endsWith) {
|
||||
error = error || endsWithHelper(node, ctx, rule.endsWith, rule.not);
|
||||
}
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
rule.validate = validateRule;
|
||||
|
||||
if (!this.ruleSets[rule.typeName]) {
|
||||
this.ruleSets[rule.typeName] = [];
|
||||
}
|
||||
|
||||
this.ruleSets[rule.typeName].push(rule);
|
||||
}
|
||||
}
|
||||
|
||||
any() {
|
||||
return {
|
||||
onExit: (node, definition, ctx) => {
|
||||
if (!this.ruleSets[definition.name]) return [];
|
||||
const errors = [];
|
||||
for (const rule of this.ruleSets[definition.name]) {
|
||||
const validationResult = rule.validate(node, ctx);
|
||||
if (validationResult) errors.push(validationResult);
|
||||
}
|
||||
return errors;
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StringMatcher;
|
||||
43
src/visitors/rules/registry/registryDependencies.js
Normal file
43
src/visitors/rules/registry/registryDependencies.js
Normal file
@@ -0,0 +1,43 @@
|
||||
const RedoclyClient = require('../../../redocly').default;
|
||||
const { isFullyQualifiedUrl } = require('../../../utils');
|
||||
|
||||
class RegsitryDependencies {
|
||||
static get rule() {
|
||||
return 'registry-dependencies';
|
||||
}
|
||||
|
||||
OpenAPIRoot() {
|
||||
return {
|
||||
onEnter: async (_node, _definition, ctx) => {
|
||||
this.redoclyClient = new RedoclyClient();
|
||||
if (this.redoclyClient.hasToken()) {
|
||||
ctx.headers = [...ctx.headers, {
|
||||
matches: `https://api.${process.env.REDOCLY_DOMAIN || 'redoc.ly'}/registry/**`,
|
||||
name: 'Authorization',
|
||||
value: (this.redoclyClient && await this.redoclyClient.getAuthorizationHeader()) || '',
|
||||
}];
|
||||
}
|
||||
},
|
||||
onExit: async (_node, _definition, ctx) => {
|
||||
if (process.env.UPDATE_REGISTRY && this.redoclyClient.hasToken()) {
|
||||
await this.redoclyClient.updateDependencies(ctx.registryDependencies);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
any() {
|
||||
return {
|
||||
onEnter: (_node, _definition, ctx, unresolvedNode) => {
|
||||
if (unresolvedNode.$ref) {
|
||||
const link = unresolvedNode.$ref.split('#/')[0];
|
||||
if (isFullyQualifiedUrl(link) && RedoclyClient.isRegistryURL(link)) {
|
||||
ctx.registryDependencies.push(link);
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RegsitryDependencies;
|
||||
@@ -8,7 +8,8 @@ class ValidateOpenAPIExternalDocumentation {
|
||||
get validators() {
|
||||
return {
|
||||
description(node, ctx) {
|
||||
return node && node.description && typeof node.description !== 'string' ? ctx.createError(ctx.messageHelpers.fieldTypeMismatchMessageHelper('string'), 'value') : null;
|
||||
return node && node.description && typeof node.description !== 'string'
|
||||
? ctx.createError(ctx.messageHelpers.fieldTypeMismatchMessageHelper('string'), 'value') : null;
|
||||
},
|
||||
url(node, ctx) {
|
||||
if (node && !node.url) return ctx.createError(ctx.messageHelpers.missingRequiredField('url'), 'key');
|
||||
|
||||
@@ -4,7 +4,10 @@ import { getLocationByPath, getCodeFrameForLocation } from "../index";
|
||||
describe("getLocationByPath", () => {
|
||||
test("", () => {
|
||||
const context = {
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
source: fs.readFileSync(
|
||||
"./definitions/syntetic/syntetic-1.yaml",
|
||||
"utf-8"
|
||||
),
|
||||
};
|
||||
|
||||
expect(
|
||||
@@ -27,7 +30,10 @@ describe("getLocationByPath", () => {
|
||||
|
||||
test("", () => {
|
||||
const context = {
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
source: fs.readFileSync(
|
||||
"./definitions/syntetic/syntetic-1.yaml",
|
||||
"utf-8"
|
||||
),
|
||||
};
|
||||
|
||||
expect(getLocationByPath([""], context, "key")).toMatchInlineSnapshot(`
|
||||
@@ -44,7 +50,10 @@ describe("getLocationByPath", () => {
|
||||
|
||||
test("", () => {
|
||||
const context = {
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
source: fs.readFileSync(
|
||||
"./definitions/syntetic/syntetic-1.yaml",
|
||||
"utf-8"
|
||||
),
|
||||
};
|
||||
|
||||
expect(
|
||||
@@ -63,7 +72,10 @@ describe("getLocationByPath", () => {
|
||||
|
||||
test("", () => {
|
||||
const context = {
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
source: fs.readFileSync(
|
||||
"./definitions/syntetic/syntetic-1.yaml",
|
||||
"utf-8"
|
||||
),
|
||||
};
|
||||
|
||||
expect(
|
||||
@@ -86,7 +98,10 @@ describe("getLocationByPath", () => {
|
||||
|
||||
test("", () => {
|
||||
const context = {
|
||||
source: fs.readFileSync("./definitions/syntetic/syntetic-1.yaml", "utf-8")
|
||||
source: fs.readFileSync(
|
||||
"./definitions/syntetic/syntetic-1.yaml",
|
||||
"utf-8"
|
||||
),
|
||||
};
|
||||
|
||||
expect(
|
||||
@@ -115,13 +130,13 @@ describe("getCodeFrameForLocation", () => {
|
||||
"utf-8"
|
||||
);
|
||||
expect(getCodeFrameForLocation(276, 281, source)).toMatchInlineSnapshot(`
|
||||
"[90m1| [39m
|
||||
[90m0| servers:[39m
|
||||
[90m1| - url: 'http://example[4m[31m.org'[90m[24m[39m
|
||||
[90m2|[39m[31m [39m
|
||||
[90m3| paths:[39m
|
||||
[90m4| user:[39m"
|
||||
`);
|
||||
"[90m1| [39m
|
||||
[90m0| servers:[39m
|
||||
[90m1| - url: 'http://example[4m[31m.org'[39m[90m[24m[39m
|
||||
[90m2|[39m[31m [39m
|
||||
[90m3| paths:[39m
|
||||
[90m4| user:[39m"
|
||||
`);
|
||||
});
|
||||
|
||||
test("", () => {
|
||||
@@ -130,20 +145,20 @@ describe("getCodeFrameForLocation", () => {
|
||||
"utf-8"
|
||||
);
|
||||
expect(getCodeFrameForLocation(276, 425, source)).toMatchInlineSnapshot(`
|
||||
"[90m-1| [39m
|
||||
[90m00| servers:[39m
|
||||
[90m01| - url: 'http://example[4m[31m.org'[90m[24m[39m
|
||||
[90m02|[39m[31m [4m[31m[31m[24m[39m
|
||||
[90m03|[39m[31m [4m[31mpaths:[31m[24m[39m
|
||||
[90m04|[39m[31m [4m[31m user:[31m[24m[39m
|
||||
[90m05|[39m[31m [4m[31m parameters:[31m[24m[39m
|
||||
[90m06|[39m[31m [4m[31m - $ref: '#/components/parameters/example'[31m[24m[39m
|
||||
[90m07|[39m[31m [4m[31m get:[31m[24m[39m
|
||||
[90m08|[39m[31m [4m[31m operationId: userGet[31m[24m[39m
|
||||
[90m09|[39m[31m [4m[31m description: Get user[31m[24m[39m
|
||||
[90m10| responses:[39m
|
||||
[90m11| '200':[39m"
|
||||
`);
|
||||
"[90m-1| [39m
|
||||
[90m00| servers:[39m
|
||||
[90m01| - url: 'http://example[4m[31m.org'[39m[90m[24m[39m
|
||||
[90m02|[39m[31m [4m[31m[39m[31m[24m[39m
|
||||
[90m03|[39m[31m [4m[31mpaths:[39m[31m[24m[39m
|
||||
[90m04|[39m[31m [4m[31m user:[39m[31m[24m[39m
|
||||
[90m05|[39m[31m [4m[31m parameters:[39m[31m[24m[39m
|
||||
[90m06|[39m[31m [4m[31m - $ref: '#/components/parameters/example'[39m[31m[24m[39m
|
||||
[90m07|[39m[31m [4m[31m get:[39m[31m[24m[39m
|
||||
[90m08|[39m[31m [4m[31m operationId: userGet[39m[31m[24m[39m
|
||||
[90m09|[39m[31m [4m[31m description: Get user[39m[31m[24m[39m
|
||||
[90m10| responses:[39m
|
||||
[90m11| '200':[39m"
|
||||
`);
|
||||
});
|
||||
|
||||
test("", () => {
|
||||
@@ -152,10 +167,10 @@ describe("getCodeFrameForLocation", () => {
|
||||
"utf-8"
|
||||
);
|
||||
expect(getCodeFrameForLocation(0, 7, source)).toMatchInlineSnapshot(`
|
||||
"[90m1| [4m[31mopenapi[90m[24m: 3.0.2[39m
|
||||
[90m2| info:[39m
|
||||
[90m3| x-redocly-overlay:[39m"
|
||||
`);
|
||||
"[90m1| [4m[31mopenapi[39m[90m[24m: 3.0.2[39m
|
||||
[90m2| info:[39m
|
||||
[90m3| x-redocly-overlay:[39m"
|
||||
`);
|
||||
});
|
||||
|
||||
test("", () => {
|
||||
@@ -164,9 +179,9 @@ describe("getCodeFrameForLocation", () => {
|
||||
"utf-8"
|
||||
);
|
||||
expect(getCodeFrameForLocation(0, 14, source)).toMatchInlineSnapshot(`
|
||||
"[90m1|[39m[31m [4m[31mopenapi: 3.0.2[31m[24m[39m
|
||||
[90m2| info:[39m
|
||||
[90m3| x-redocly-overlay:[39m"
|
||||
`);
|
||||
"[90m1|[39m[31m [4m[31mopenapi: 3.0.2[39m[31m[24m[39m
|
||||
[90m2| info:[39m
|
||||
[90m3| x-redocly-overlay:[39m"
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user