mirror of
https://github.com/LukeHagar/libopenapi.git
synced 2025-12-07 04:20:14 +00:00
Working through fix-73 and v0.6.0
This commit is contained in:
19
datamodel/document_config.go
Normal file
19
datamodel/document_config.go
Normal file
@@ -0,0 +1,19 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package datamodel
|
||||
|
||||
import "net/url"
|
||||
|
||||
// DocumentConfiguration is used to configure the document creation process. It was added in v0.6.0 to allow
|
||||
// for more fine-grained control over controls and new features.
|
||||
type DocumentConfiguration struct {
|
||||
// if the document uses relative file references, this is the base url to use when resolving them.
|
||||
BaseURL *url.URL
|
||||
|
||||
// AllowFileReferences will allow the index to locate relative file references. This is disabled by default.
|
||||
AllowFileReferences bool
|
||||
|
||||
// AllowRemoteReferences will allow the index to lookup remote references. This is disabled by default.
|
||||
AllowRemoteReferences bool
|
||||
}
|
||||
@@ -398,6 +398,18 @@ func TestAsanaAsDoc(t *testing.T) {
|
||||
fmt.Println(d)
|
||||
}
|
||||
|
||||
func TestDigitalOceanAsDoc(t *testing.T) {
|
||||
data, _ := ioutil.ReadFile("../../../test_specs/asana.yaml")
|
||||
info, _ := datamodel.ExtractSpecInfo(data)
|
||||
var err []error
|
||||
lowDoc, err = lowv3.CreateDocument(info)
|
||||
if err != nil {
|
||||
panic("broken something")
|
||||
}
|
||||
d := NewDocument(lowDoc)
|
||||
fmt.Println(d)
|
||||
}
|
||||
|
||||
func TestPetstoreAsDoc(t *testing.T) {
|
||||
data, _ := ioutil.ReadFile("../../../test_specs/petstorev3.json")
|
||||
info, _ := datamodel.ExtractSpecInfo(data)
|
||||
|
||||
@@ -188,6 +188,10 @@ func IsCircular(node *yaml.Node, idx *index.SpecIndex) bool {
|
||||
if refs[i].Journey[k].Node == node {
|
||||
return true
|
||||
}
|
||||
isRef, _, refValue := utils.IsNodeRefValue(node)
|
||||
if isRef && refs[i].Journey[k].Definition == refValue {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
// check mapped references in case we didn't find it.
|
||||
@@ -216,6 +220,10 @@ func GetCircularReferenceResult(node *yaml.Node, idx *index.SpecIndex) *index.Ci
|
||||
if refs[i].Journey[k].Node == node {
|
||||
return refs[i]
|
||||
}
|
||||
isRef, _, refValue := utils.IsNodeRefValue(node)
|
||||
if isRef && refs[i].Journey[k].Definition == refValue {
|
||||
return refs[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
// check mapped references in case we didn't find it.
|
||||
|
||||
@@ -121,13 +121,32 @@ func (s *Swagger) GetExtensions() map[low.KeyReference[string]]low.ValueReferenc
|
||||
return s.Extensions
|
||||
}
|
||||
|
||||
func CreateDocument(info *datamodel.SpecInfo) (*Swagger, []error) {
|
||||
// CreateDocumentFromConfig will create a new Swagger document from the provided SpecInfo and DocumentConfiguration.
|
||||
func CreateDocumentFromConfig(info *datamodel.SpecInfo,
|
||||
configuration *datamodel.DocumentConfiguration) (*Swagger, []error) {
|
||||
return createDocument(info, configuration)
|
||||
}
|
||||
|
||||
// CreateDocument will create a new Swagger document from the provided SpecInfo.
|
||||
//
|
||||
// Deprecated: Use CreateDocumentFromConfig instead.
|
||||
func CreateDocument(info *datamodel.SpecInfo) (*Swagger, []error) {
|
||||
return createDocument(info, &datamodel.DocumentConfiguration{
|
||||
AllowRemoteReferences: true,
|
||||
AllowFileReferences: true,
|
||||
})
|
||||
}
|
||||
|
||||
func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Swagger, []error) {
|
||||
doc := Swagger{Swagger: low.ValueReference[string]{Value: info.Version, ValueNode: info.RootNode}}
|
||||
doc.Extensions = low.ExtractExtensions(info.RootNode.Content[0])
|
||||
|
||||
// build an index
|
||||
idx := index.NewSpecIndex(info.RootNode)
|
||||
idx := index.NewSpecIndexWithConfig(info.RootNode, &index.SpecIndexConfig{
|
||||
BaseURL: config.BaseURL,
|
||||
AllowRemoteLookup: config.AllowRemoteReferences,
|
||||
AllowFileLookup: config.AllowFileReferences,
|
||||
})
|
||||
doc.Index = idx
|
||||
doc.SpecInfo = info
|
||||
|
||||
|
||||
@@ -11,8 +11,24 @@ import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
// CreateDocument will create a new Document instance from the provided SpecInfo.
|
||||
//
|
||||
// Deprecated: Use CreateDocumentFromConfig instead. This function will be removed in a later version, it
|
||||
// defaults to allowing file and remote references, and does not support relative file references.
|
||||
func CreateDocument(info *datamodel.SpecInfo) (*Document, []error) {
|
||||
config := datamodel.DocumentConfiguration{
|
||||
AllowFileReferences: true,
|
||||
AllowRemoteReferences: true,
|
||||
}
|
||||
return createDocument(info, &config)
|
||||
}
|
||||
|
||||
// CreateDocumentFromConfig Create a new document from the provided SpecInfo and DocumentConfiguration pointer.
|
||||
func CreateDocumentFromConfig(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, []error) {
|
||||
return createDocument(info, config)
|
||||
}
|
||||
|
||||
func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, []error) {
|
||||
_, labelNode, versionNode := utils.FindKeyNodeFull(OpenAPILabel, info.RootNode.Content)
|
||||
var version low.NodeReference[string]
|
||||
if versionNode == nil {
|
||||
@@ -22,10 +38,16 @@ func CreateDocument(info *datamodel.SpecInfo) (*Document, []error) {
|
||||
doc := Document{Version: version}
|
||||
|
||||
// build an index
|
||||
idx := index.NewSpecIndex(info.RootNode)
|
||||
idx := index.NewSpecIndexWithConfig(info.RootNode, &index.SpecIndexConfig{
|
||||
BaseURL: config.BaseURL,
|
||||
AllowFileLookup: config.AllowFileReferences,
|
||||
AllowRemoteLookup: config.AllowRemoteReferences,
|
||||
})
|
||||
doc.Index = idx
|
||||
|
||||
var errors []error
|
||||
var errs []error
|
||||
|
||||
errs = idx.GetReferenceIndexErrors()
|
||||
|
||||
// create resolver and check for circular references.
|
||||
resolve := resolver.NewResolver(idx)
|
||||
@@ -33,7 +55,7 @@ func CreateDocument(info *datamodel.SpecInfo) (*Document, []error) {
|
||||
|
||||
if len(resolvingErrors) > 0 {
|
||||
for r := range resolvingErrors {
|
||||
errors = append(errors, resolvingErrors[r])
|
||||
errs = append(errs, resolvingErrors[r])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,10 +93,10 @@ func CreateDocument(info *datamodel.SpecInfo) (*Document, []error) {
|
||||
|
||||
wg.Add(len(extractionFuncs))
|
||||
for _, f := range extractionFuncs {
|
||||
go runExtraction(info, &doc, idx, f, &errors, &wg)
|
||||
go runExtraction(info, &doc, idx, f, &errs, &wg)
|
||||
}
|
||||
wg.Wait()
|
||||
return &doc, errors
|
||||
return &doc, errs
|
||||
}
|
||||
|
||||
func extractInfo(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
|
||||
|
||||
61
document.go
61
document.go
@@ -39,6 +39,10 @@ type Document interface {
|
||||
// GetSpecInfo will return the *datamodel.SpecInfo instance that contains all specification information.
|
||||
GetSpecInfo() *datamodel.SpecInfo
|
||||
|
||||
// SetConfiguration will set the configuration for the document. This allows for finer grained control over
|
||||
// allowing remote or local references, as well as a BaseURL to allow for relative file references.
|
||||
SetConfiguration(configuration *datamodel.DocumentConfiguration)
|
||||
|
||||
// BuildV2Model will build out a Swagger (version 2) model from the specification used to create the document
|
||||
// If there are any issues, then no model will be returned, instead a slice of errors will explain all the
|
||||
// problems that occurred. This method will only support version 2 specifications and will throw an error for
|
||||
@@ -63,6 +67,7 @@ type Document interface {
|
||||
type document struct {
|
||||
version string
|
||||
info *datamodel.SpecInfo
|
||||
config *datamodel.DocumentConfiguration
|
||||
}
|
||||
|
||||
// DocumentModel represents either a Swagger document (version 2) or an OpenAPI document (version 3) that is
|
||||
@@ -89,6 +94,16 @@ func NewDocument(specByteArray []byte) (Document, error) {
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// NewDocumentWithConfiguration is the same as NewDocument, except it's a convenience function that calls NewDocument
|
||||
// under the hood and then calls SetConfiguration() on the returned Document.
|
||||
func NewDocumentWithConfiguration(specByteArray []byte, configuration *datamodel.DocumentConfiguration) (Document, error) {
|
||||
d, err := NewDocument(specByteArray)
|
||||
if d != nil {
|
||||
d.SetConfiguration(configuration)
|
||||
}
|
||||
return d, err
|
||||
}
|
||||
|
||||
func (d *document) GetVersion() string {
|
||||
return d.version
|
||||
}
|
||||
@@ -97,6 +112,10 @@ func (d *document) GetSpecInfo() *datamodel.SpecInfo {
|
||||
return d.info
|
||||
}
|
||||
|
||||
func (d *document) SetConfiguration(configuration *datamodel.DocumentConfiguration) {
|
||||
d.config = configuration
|
||||
}
|
||||
|
||||
func (d *document) Serialize() ([]byte, error) {
|
||||
if d.info == nil {
|
||||
return nil, fmt.Errorf("unable to serialize, document has not yet been initialized")
|
||||
@@ -120,23 +139,32 @@ func (d *document) BuildV2Model() (*DocumentModel[v2high.Swagger], []error) {
|
||||
"supplied spec is a different version (%v). Try 'BuildV3Model()'", d.info.SpecFormat))
|
||||
return nil, errors
|
||||
}
|
||||
lowDoc, errs := v2low.CreateDocument(d.info)
|
||||
// Do not shortcircuit on circular reference errors, so the client
|
||||
|
||||
var lowDoc *v2low.Swagger
|
||||
if d.config == nil {
|
||||
d.config = &datamodel.DocumentConfiguration{
|
||||
AllowFileReferences: true,
|
||||
AllowRemoteReferences: true,
|
||||
}
|
||||
}
|
||||
|
||||
lowDoc, errors = v2low.CreateDocumentFromConfig(d.info, d.config)
|
||||
// Do not short-circuit on circular reference errors, so the client
|
||||
// has the option of ignoring them.
|
||||
for _, err := range errs {
|
||||
for _, err := range errors {
|
||||
if refErr, ok := err.(*resolver.ResolvingError); ok {
|
||||
if refErr.CircularReference == nil {
|
||||
return nil, errs
|
||||
return nil, errors
|
||||
}
|
||||
} else {
|
||||
return nil, errs
|
||||
return nil, errors
|
||||
}
|
||||
}
|
||||
highDoc := v2high.NewSwaggerDocument(lowDoc)
|
||||
return &DocumentModel[v2high.Swagger]{
|
||||
Model: *highDoc,
|
||||
Index: lowDoc.Index,
|
||||
}, errs
|
||||
}, errors
|
||||
}
|
||||
|
||||
func (d *document) BuildV3Model() (*DocumentModel[v3high.Document], []error) {
|
||||
@@ -150,23 +178,32 @@ func (d *document) BuildV3Model() (*DocumentModel[v3high.Document], []error) {
|
||||
"supplied spec is a different version (%v). Try 'BuildV2Model()'", d.info.SpecFormat))
|
||||
return nil, errors
|
||||
}
|
||||
lowDoc, errs := v3low.CreateDocument(d.info)
|
||||
// Do not shortcircuit on circular reference errors, so the client
|
||||
|
||||
var lowDoc *v3low.Document
|
||||
if d.config == nil {
|
||||
d.config = &datamodel.DocumentConfiguration{
|
||||
AllowFileReferences: true,
|
||||
AllowRemoteReferences: true,
|
||||
}
|
||||
}
|
||||
|
||||
lowDoc, errors = v3low.CreateDocumentFromConfig(d.info, d.config)
|
||||
// Do not short-circuit on circular reference errors, so the client
|
||||
// has the option of ignoring them.
|
||||
for _, err := range errs {
|
||||
for _, err := range errors {
|
||||
if refErr, ok := err.(*resolver.ResolvingError); ok {
|
||||
if refErr.CircularReference == nil {
|
||||
return nil, errs
|
||||
return nil, errors
|
||||
}
|
||||
} else {
|
||||
return nil, errs
|
||||
return nil, errors
|
||||
}
|
||||
}
|
||||
highDoc := v3high.NewDocument(lowDoc)
|
||||
return &DocumentModel[v3high.Document]{
|
||||
Model: *highDoc,
|
||||
Index: lowDoc.Index,
|
||||
}, errs
|
||||
}, errors
|
||||
}
|
||||
|
||||
// CompareDocuments will accept a left and right Document implementing struct, build a model for the correct
|
||||
|
||||
@@ -5,7 +5,9 @@ package libopenapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/pb33f/libopenapi/datamodel"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
@@ -247,6 +249,77 @@ func ExampleNewDocument_fromOpenAPI3Document() {
|
||||
// Output: There are 13 paths and 8 schemas in the document
|
||||
}
|
||||
|
||||
func ExampleNewDocument_fromWithDocumentConfigurationFailure() {
|
||||
|
||||
// This example shows how to create a document that prevents the loading of external references/
|
||||
// from files or the network
|
||||
|
||||
// load in the Digital Ocean OpenAPI specification
|
||||
digitalOcean, _ := ioutil.ReadFile("test_specs/digitalocean.yaml")
|
||||
|
||||
// create a DocumentConfiguration that prevents loading file and remote references
|
||||
config := datamodel.DocumentConfiguration{
|
||||
AllowFileReferences: false,
|
||||
AllowRemoteReferences: false,
|
||||
}
|
||||
|
||||
// create a new document from specification bytes
|
||||
doc, err := NewDocumentWithConfiguration(digitalOcean, &config)
|
||||
|
||||
// if anything went wrong, an error is thrown
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("cannot create new document: %e", err))
|
||||
}
|
||||
|
||||
// only errors will be thrown, so just capture them and print the number of errors.
|
||||
_, errors := doc.BuildV3Model()
|
||||
|
||||
// if anything went wrong when building the v3 model, a slice of errors will be returned
|
||||
if len(errors) > 0 {
|
||||
fmt.Println("Error building Digital Ocean spec errors reported")
|
||||
}
|
||||
// Output: Error building Digital Ocean spec errors reported
|
||||
}
|
||||
|
||||
func ExampleNewDocument_fromWithDocumentConfigurationSuccess() {
|
||||
|
||||
// This example shows how to create a document that prevents the loading of external references/
|
||||
// from files or the network
|
||||
|
||||
// load in the Digital Ocean OpenAPI specification
|
||||
digitalOcean, _ := ioutil.ReadFile("test_specs/digitalocean.yaml")
|
||||
|
||||
// Digital Ocean needs a baseURL to be set, so we can resolve relative references.
|
||||
baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
|
||||
|
||||
// create a DocumentConfiguration that allows loading file and remote references, and sets the baseURL
|
||||
// to somewhere that can resolve the relative references.
|
||||
config := datamodel.DocumentConfiguration{
|
||||
AllowFileReferences: true,
|
||||
AllowRemoteReferences: true,
|
||||
BaseURL: baseURL,
|
||||
}
|
||||
|
||||
// create a new document from specification bytes
|
||||
doc, err := NewDocumentWithConfiguration(digitalOcean, &config)
|
||||
|
||||
// if anything went wrong, an error is thrown
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("cannot create new document: %e", err))
|
||||
}
|
||||
|
||||
// only errors will be thrown, so just capture them and print the number of errors.
|
||||
_, errors := doc.BuildV3Model()
|
||||
|
||||
// if anything went wrong when building the v3 model, a slice of errors will be returned
|
||||
if len(errors) > 0 {
|
||||
fmt.Println("Error building Digital Ocean spec errors reported")
|
||||
} else {
|
||||
fmt.Println("Digital Ocean spec built successfully")
|
||||
}
|
||||
// Output: Digital Ocean spec built successfully
|
||||
}
|
||||
|
||||
func ExampleNewDocument_fromSwaggerDocument() {
|
||||
|
||||
// How to read in a Swagger / OpenAPI 2 Specification, into a Document.
|
||||
|
||||
82
index/extract_references.go
Normal file
82
index/extract_references.go
Normal file
@@ -0,0 +1,82 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/pb33f/libopenapi/utils"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ExtractComponentsFromRefs returns located components from references. The returned nodes from here
|
||||
// can be used for resolving as they contain the actual object properties.
|
||||
func (index *SpecIndex) ExtractComponentsFromRefs(refs []*Reference) []*Reference {
|
||||
var found []*Reference
|
||||
|
||||
//run this async because when things get recursive, it can take a while
|
||||
c := make(chan bool)
|
||||
|
||||
locate := func(ref *Reference, refIndex int, sequence []*ReferenceMapped) {
|
||||
located := index.FindComponent(ref.Definition, ref.Node)
|
||||
if located != nil {
|
||||
index.refLock.Lock()
|
||||
if index.allMappedRefs[ref.Definition] == nil {
|
||||
found = append(found, located)
|
||||
index.allMappedRefs[ref.Definition] = located
|
||||
sequence[refIndex] = &ReferenceMapped{
|
||||
Reference: located,
|
||||
Definition: ref.Definition,
|
||||
}
|
||||
}
|
||||
index.refLock.Unlock()
|
||||
} else {
|
||||
|
||||
_, path := utils.ConvertComponentIdIntoFriendlyPathSearch(ref.Definition)
|
||||
indexError := &IndexingError{
|
||||
Err: fmt.Errorf("component '%s' does not exist in the specification", ref.Definition),
|
||||
Node: ref.Node,
|
||||
Path: path,
|
||||
}
|
||||
index.refErrors = append(index.refErrors, indexError)
|
||||
}
|
||||
c <- true
|
||||
}
|
||||
|
||||
var refsToCheck []*Reference
|
||||
for _, ref := range refs {
|
||||
|
||||
// check reference for backslashes (hah yeah seen this too!)
|
||||
if strings.Contains(ref.Definition, "\\") { // this was from blazemeter.com haha!
|
||||
_, path := utils.ConvertComponentIdIntoFriendlyPathSearch(ref.Definition)
|
||||
indexError := &IndexingError{
|
||||
Err: fmt.Errorf("component '%s' contains a backslash '\\'. It's not valid", ref.Definition),
|
||||
Node: ref.Node,
|
||||
Path: path,
|
||||
}
|
||||
index.refErrors = append(index.refErrors, indexError)
|
||||
continue
|
||||
|
||||
}
|
||||
refsToCheck = append(refsToCheck, ref)
|
||||
}
|
||||
mappedRefsInSequence := make([]*ReferenceMapped, len(refsToCheck))
|
||||
for r := range refsToCheck {
|
||||
// expand our index of all mapped refs
|
||||
go locate(refsToCheck[r], r, mappedRefsInSequence)
|
||||
}
|
||||
|
||||
completedRefs := 0
|
||||
for completedRefs < len(refsToCheck) {
|
||||
select {
|
||||
case <-c:
|
||||
completedRefs++
|
||||
}
|
||||
}
|
||||
for m := range mappedRefsInSequence {
|
||||
if mappedRefsInSequence[m] != nil {
|
||||
index.allMappedRefsSequenced = append(index.allMappedRefsSequenced, mappedRefsInSequence[m])
|
||||
}
|
||||
}
|
||||
return found
|
||||
}
|
||||
320
index/extract_refs.go
Normal file
320
index/extract_refs.go
Normal file
@@ -0,0 +1,320 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/pb33f/libopenapi/utils"
|
||||
"gopkg.in/yaml.v3"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ExtractRefs will return a deduplicated slice of references for every unique ref found in the document.
|
||||
// The total number of refs, will generally be much higher, you can extract those from GetRawReferenceCount()
|
||||
func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string, level int, poly bool, pName string) []*Reference {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
var found []*Reference
|
||||
if len(node.Content) > 0 {
|
||||
var prev, polyName string
|
||||
for i, n := range node.Content {
|
||||
|
||||
if utils.IsNodeMap(n) || utils.IsNodeArray(n) {
|
||||
level++
|
||||
// check if we're using polymorphic values. These tend to create rabbit warrens of circular
|
||||
// references if every single link is followed. We don't resolve polymorphic values.
|
||||
isPoly, _ := index.checkPolymorphicNode(prev)
|
||||
polyName = pName
|
||||
if isPoly {
|
||||
poly = true
|
||||
if prev != "" {
|
||||
polyName = prev
|
||||
}
|
||||
}
|
||||
found = append(found, index.ExtractRefs(n, node, seenPath, level, poly, polyName)...)
|
||||
}
|
||||
|
||||
// check if we're dealing with an inline schema definition, that isn't part of an array
|
||||
// (which means it's being used as a value in an array, and it's not a label)
|
||||
// https://github.com/pb33f/libopenapi/issues/76
|
||||
if i%2 == 0 && n.Value == "schema" && !utils.IsNodeArray(node) && (i+1 < len(node.Content)) {
|
||||
isRef, _, _ := utils.IsNodeRefValue(node.Content[i+1])
|
||||
if isRef {
|
||||
continue
|
||||
}
|
||||
ref := &Reference{
|
||||
Node: node.Content[i+1],
|
||||
Path: fmt.Sprintf("$.%s", strings.Join(seenPath, ".")),
|
||||
}
|
||||
index.allInlineSchemaDefinitions = append(index.allInlineSchemaDefinitions, ref)
|
||||
|
||||
// check if the schema is an object or an array,
|
||||
// and if so, add it to the list of inline schema object definitions.
|
||||
k, v := utils.FindKeyNodeTop("type", node.Content[i+1].Content)
|
||||
if k != nil && v != nil {
|
||||
if v.Value == "object" || v.Value == "array" {
|
||||
index.allInlineSchemaObjectDefinitions = append(index.allInlineSchemaObjectDefinitions, ref)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if i%2 == 0 && n.Value == "$ref" {
|
||||
|
||||
// only look at scalar values, not maps (looking at you k8s)
|
||||
if !utils.IsNodeStringValue(node.Content[i+1]) {
|
||||
continue
|
||||
}
|
||||
|
||||
index.linesWithRefs[n.Line] = true
|
||||
|
||||
fp := make([]string, len(seenPath))
|
||||
for x, foundPathNode := range seenPath {
|
||||
fp[x] = foundPathNode
|
||||
}
|
||||
|
||||
value := node.Content[i+1].Value
|
||||
|
||||
segs := strings.Split(value, "/")
|
||||
name := segs[len(segs)-1]
|
||||
_, p := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
|
||||
ref := &Reference{
|
||||
Definition: value,
|
||||
Name: name,
|
||||
Node: node,
|
||||
Path: p,
|
||||
}
|
||||
|
||||
// add to raw sequenced refs
|
||||
index.rawSequencedRefs = append(index.rawSequencedRefs, ref)
|
||||
|
||||
// add ref by line number
|
||||
refNameIndex := strings.LastIndex(value, "/")
|
||||
refName := value[refNameIndex+1:]
|
||||
if len(index.refsByLine[refName]) > 0 {
|
||||
index.refsByLine[refName][n.Line] = true
|
||||
} else {
|
||||
v := make(map[int]bool)
|
||||
v[n.Line] = true
|
||||
index.refsByLine[refName] = v
|
||||
}
|
||||
|
||||
// if this ref value has any siblings (node.Content is larger than two elements)
|
||||
// then add to refs with siblings
|
||||
if len(node.Content) > 2 {
|
||||
copiedNode := *node
|
||||
copied := Reference{
|
||||
Definition: ref.Definition,
|
||||
Name: ref.Name,
|
||||
Node: &copiedNode,
|
||||
Path: p,
|
||||
}
|
||||
// protect this data using a copy, prevent the resolver from destroying things.
|
||||
index.refsWithSiblings[value] = copied
|
||||
}
|
||||
|
||||
// if this is a polymorphic reference, we're going to leave it out
|
||||
// allRefs. We don't ever want these resolved, so instead of polluting
|
||||
// the timeline, we will keep each poly ref in its own collection for later
|
||||
// analysis.
|
||||
if poly {
|
||||
index.polymorphicRefs[value] = ref
|
||||
|
||||
// index each type
|
||||
switch pName {
|
||||
case "anyOf":
|
||||
index.polymorphicAnyOfRefs = append(index.polymorphicAnyOfRefs, ref)
|
||||
case "allOf":
|
||||
index.polymorphicAllOfRefs = append(index.polymorphicAllOfRefs, ref)
|
||||
case "oneOf":
|
||||
index.polymorphicOneOfRefs = append(index.polymorphicOneOfRefs, ref)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// check if this is a dupe, if so, skip it, we don't care now.
|
||||
if index.allRefs[value] != nil { // seen before, skip.
|
||||
continue
|
||||
}
|
||||
|
||||
if value == "" {
|
||||
|
||||
completedPath := fmt.Sprintf("$.%s", strings.Join(fp, "."))
|
||||
|
||||
indexError := &IndexingError{
|
||||
Err: errors.New("schema reference is empty and cannot be processed"),
|
||||
Node: node.Content[i+1],
|
||||
Path: completedPath,
|
||||
}
|
||||
|
||||
index.refErrors = append(index.refErrors, indexError)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
index.allRefs[value] = ref
|
||||
found = append(found, ref)
|
||||
}
|
||||
|
||||
if i%2 == 0 && n.Value != "$ref" && n.Value != "" {
|
||||
|
||||
nodePath := fmt.Sprintf("$.%s", strings.Join(seenPath, "."))
|
||||
|
||||
// capture descriptions and summaries
|
||||
if n.Value == "description" {
|
||||
|
||||
// if the parent is a sequence, ignore.
|
||||
if utils.IsNodeArray(node) {
|
||||
continue
|
||||
}
|
||||
|
||||
ref := &DescriptionReference{
|
||||
Content: node.Content[i+1].Value,
|
||||
Path: nodePath,
|
||||
Node: node.Content[i+1],
|
||||
IsSummary: false,
|
||||
}
|
||||
|
||||
index.allDescriptions = append(index.allDescriptions, ref)
|
||||
index.descriptionCount++
|
||||
}
|
||||
|
||||
if n.Value == "summary" {
|
||||
|
||||
var b *yaml.Node
|
||||
if len(node.Content) == i+1 {
|
||||
b = node.Content[i]
|
||||
} else {
|
||||
b = node.Content[i+1]
|
||||
}
|
||||
ref := &DescriptionReference{
|
||||
Content: b.Value,
|
||||
Path: nodePath,
|
||||
Node: b,
|
||||
IsSummary: true,
|
||||
}
|
||||
|
||||
index.allSummaries = append(index.allSummaries, ref)
|
||||
index.summaryCount++
|
||||
}
|
||||
|
||||
// capture security requirement references (these are not traditional references, but they
|
||||
// are used as a look-up. This is the only exception to the design.
|
||||
if n.Value == "security" {
|
||||
var b *yaml.Node
|
||||
if len(node.Content) == i+1 {
|
||||
b = node.Content[i]
|
||||
} else {
|
||||
b = node.Content[i+1]
|
||||
}
|
||||
if utils.IsNodeArray(b) {
|
||||
var secKey string
|
||||
for k := range b.Content {
|
||||
if utils.IsNodeMap(b.Content[k]) {
|
||||
for g := range b.Content[k].Content {
|
||||
if g%2 == 0 {
|
||||
secKey = b.Content[k].Content[g].Value
|
||||
continue
|
||||
}
|
||||
if utils.IsNodeArray(b.Content[k].Content[g]) {
|
||||
var refMap map[string][]*Reference
|
||||
if index.securityRequirementRefs[secKey] == nil {
|
||||
index.securityRequirementRefs[secKey] = make(map[string][]*Reference)
|
||||
refMap = index.securityRequirementRefs[secKey]
|
||||
} else {
|
||||
refMap = index.securityRequirementRefs[secKey]
|
||||
}
|
||||
for r := range b.Content[k].Content[g].Content {
|
||||
var refs []*Reference
|
||||
if refMap[b.Content[k].Content[g].Content[r].Value] != nil {
|
||||
refs = refMap[b.Content[k].Content[g].Content[r].Value]
|
||||
}
|
||||
|
||||
refs = append(refs, &Reference{
|
||||
Definition: b.Content[k].Content[g].Content[r].Value,
|
||||
Path: fmt.Sprintf("%s.security[%d].%s[%d]", nodePath, k, secKey, r),
|
||||
Node: b.Content[k].Content[g].Content[r],
|
||||
})
|
||||
|
||||
index.securityRequirementRefs[secKey][b.Content[k].Content[g].Content[r].Value] = refs
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// capture enums
|
||||
if n.Value == "enum" {
|
||||
|
||||
// all enums need to have a type, extract the type from the node where the enum was found.
|
||||
_, enumKeyValueNode := utils.FindKeyNodeTop("type", node.Content)
|
||||
|
||||
if enumKeyValueNode != nil {
|
||||
ref := &EnumReference{
|
||||
Path: nodePath,
|
||||
Node: node.Content[i+1],
|
||||
Type: enumKeyValueNode,
|
||||
SchemaNode: node,
|
||||
ParentNode: parent,
|
||||
}
|
||||
|
||||
index.allEnums = append(index.allEnums, ref)
|
||||
index.enumCount++
|
||||
}
|
||||
}
|
||||
// capture all objects with properties
|
||||
if n.Value == "properties" {
|
||||
_, typeKeyValueNode := utils.FindKeyNodeTop("type", node.Content)
|
||||
|
||||
if typeKeyValueNode != nil {
|
||||
isObject := false
|
||||
|
||||
if typeKeyValueNode.Value == "object" {
|
||||
isObject = true
|
||||
}
|
||||
|
||||
for _, v := range typeKeyValueNode.Content {
|
||||
if v.Value == "object" {
|
||||
isObject = true
|
||||
}
|
||||
}
|
||||
|
||||
if isObject {
|
||||
index.allObjectsWithProperties = append(index.allObjectsWithProperties, &ObjectReference{
|
||||
Path: nodePath,
|
||||
Node: node,
|
||||
ParentNode: parent,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seenPath = append(seenPath, n.Value)
|
||||
prev = n.Value
|
||||
}
|
||||
|
||||
// if next node is map, don't add segment.
|
||||
if i < len(node.Content)-1 {
|
||||
next := node.Content[i+1]
|
||||
|
||||
if i%2 != 0 && next != nil && !utils.IsNodeArray(next) && !utils.IsNodeMap(next) {
|
||||
seenPath = seenPath[:len(seenPath)-1]
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(seenPath) > 0 {
|
||||
seenPath = seenPath[:len(seenPath)-1]
|
||||
}
|
||||
|
||||
}
|
||||
if len(seenPath) > 0 {
|
||||
seenPath = seenPath[:len(seenPath)-1]
|
||||
}
|
||||
|
||||
index.refCount = len(index.allRefs)
|
||||
|
||||
return found
|
||||
}
|
||||
278
index/find_component.go
Normal file
278
index/find_component.go
Normal file
@@ -0,0 +1,278 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/pb33f/libopenapi/utils"
|
||||
"github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
|
||||
"gopkg.in/yaml.v3"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// FindComponent will locate a component by its reference, returns nil if nothing is found.
|
||||
// This method will recurse through remote, local and file references. For each new external reference
|
||||
// a new index will be created. These indexes can then be traversed recursively.
|
||||
func (index *SpecIndex) FindComponent(componentId string, parent *yaml.Node) *Reference {
|
||||
if index.root == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
remoteLookup := func(id string) (*yaml.Node, *yaml.Node, error) {
|
||||
if index.config.AllowRemoteLookup {
|
||||
return index.lookupRemoteReference(id)
|
||||
} else {
|
||||
return nil, nil, fmt.Errorf("remote lookups are not premitted, " +
|
||||
"please set AllowRemoteLookup to true in the configuration")
|
||||
}
|
||||
}
|
||||
|
||||
fileLookup := func(id string) (*yaml.Node, *yaml.Node, error) {
|
||||
if index.config.AllowFileLookup {
|
||||
return index.lookupFileReference(id)
|
||||
} else {
|
||||
return nil, nil, fmt.Errorf("local lookups are not permitted, " +
|
||||
"please set AllowFileLookup to true in the configuration")
|
||||
}
|
||||
}
|
||||
|
||||
switch DetermineReferenceResolveType(componentId) {
|
||||
case LocalResolve: // ideally, every single ref in every single spec is local. however, this is not the case.
|
||||
return index.FindComponentInRoot(componentId)
|
||||
|
||||
case HttpResolve:
|
||||
uri := strings.Split(componentId, "#")
|
||||
if len(uri) >= 2 {
|
||||
return index.performExternalLookup(uri, componentId, remoteLookup, parent)
|
||||
}
|
||||
if len(uri) == 1 {
|
||||
// if there is no reference, second segment is empty / has no name
|
||||
// this means there is no component to look-up and the entire file should be pulled in.
|
||||
// to stop all the other code from breaking (that is expecting a component), let's just post-pend
|
||||
// a hash to the end of the componentId and ensure the uri slice is as expected.
|
||||
// described in https://github.com/pb33f/libopenapi/issues/37
|
||||
componentId = fmt.Sprintf("%s#", componentId)
|
||||
uri = append(uri, "")
|
||||
return index.performExternalLookup(uri, componentId, remoteLookup, parent)
|
||||
}
|
||||
|
||||
case FileResolve:
|
||||
uri := strings.Split(componentId, "#")
|
||||
if len(uri) == 2 {
|
||||
return index.performExternalLookup(uri, componentId, fileLookup, parent)
|
||||
}
|
||||
if len(uri) == 1 {
|
||||
// if there is no reference, second segment is empty / has no name
|
||||
// this means there is no component to look-up and the entire file should be pulled in.
|
||||
// to stop all the other code from breaking (that is expecting a component), let's just post-pend
|
||||
// a hash to the end of the componentId and ensure the uri slice is as expected.
|
||||
// described in https://github.com/pb33f/libopenapi/issues/37
|
||||
//
|
||||
// ^^ this same issue was re-reported in file based lookups in vacuum.
|
||||
// more info here: https://github.com/daveshanley/vacuum/issues/225
|
||||
componentId = fmt.Sprintf("%s#", componentId)
|
||||
uri = append(uri, "")
|
||||
return index.performExternalLookup(uri, componentId, fileLookup, parent)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (index *SpecIndex) lookupRemoteReference(ref string) (*yaml.Node, *yaml.Node, error) {
|
||||
// split string to remove file reference
|
||||
uri := strings.Split(ref, "#")
|
||||
|
||||
var parsedRemoteDocument *yaml.Node
|
||||
if index.seenRemoteSources[uri[0]] != nil {
|
||||
parsedRemoteDocument = index.seenRemoteSources[uri[0]]
|
||||
} else {
|
||||
index.httpLock.Lock()
|
||||
resp, err := index.httpClient.Get(uri[0])
|
||||
index.httpLock.Unlock()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var remoteDoc yaml.Node
|
||||
err = yaml.Unmarshal(body, &remoteDoc)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
parsedRemoteDocument = &remoteDoc
|
||||
index.remoteLock.Lock()
|
||||
index.seenRemoteSources[uri[0]] = &remoteDoc
|
||||
index.remoteLock.Unlock()
|
||||
}
|
||||
|
||||
// lookup item from reference by using a path query.
|
||||
var query string
|
||||
if len(uri) >= 2 {
|
||||
query = fmt.Sprintf("$%s", strings.ReplaceAll(uri[1], "/", "."))
|
||||
} else {
|
||||
query = "$"
|
||||
}
|
||||
|
||||
// remove any URL encoding
|
||||
query = strings.Replace(query, "~1", "./", 1)
|
||||
query = strings.ReplaceAll(query, "~1", "/")
|
||||
|
||||
path, err := yamlpath.NewPath(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
result, _ := path.Find(parsedRemoteDocument)
|
||||
if len(result) == 1 {
|
||||
return result[0], parsedRemoteDocument, nil
|
||||
}
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
func (index *SpecIndex) lookupFileReference(ref string) (*yaml.Node, *yaml.Node, error) {
|
||||
// split string to remove file reference
|
||||
uri := strings.Split(ref, "#")
|
||||
|
||||
file := strings.ReplaceAll(uri[0], "file:", "")
|
||||
|
||||
var parsedRemoteDocument *yaml.Node
|
||||
|
||||
if index.seenRemoteSources[file] != nil {
|
||||
parsedRemoteDocument = index.seenRemoteSources[file]
|
||||
|
||||
} else {
|
||||
|
||||
// try and read the file off the local file system, if it fails
|
||||
// check for a baseURL and then ask our remote lookup function to go try and get it.
|
||||
// index.fileLock.Lock()
|
||||
body, err := ioutil.ReadFile(file)
|
||||
// index.fileLock.Unlock()
|
||||
|
||||
if err != nil {
|
||||
|
||||
// if we have a baseURL, then we can try and get the file from there.
|
||||
if index.config != nil && index.config.BaseURL != nil {
|
||||
|
||||
u := index.config.BaseURL
|
||||
remoteRef := fmt.Sprintf("%s://%s%s/%s", u.Scheme, u.Host, u.Path, ref)
|
||||
a, b, e := index.lookupRemoteReference(remoteRef)
|
||||
if e != nil {
|
||||
// give up, we can't find the file, not locally, not remotely. It's toast.
|
||||
return nil, nil, e
|
||||
}
|
||||
|
||||
// everything looks good, lets just make sure we also add a key to the raw reference name.
|
||||
if _, ok := index.seenRemoteSources[file]; !ok {
|
||||
index.seenRemoteSources[file] = b
|
||||
}
|
||||
|
||||
return a, b, nil
|
||||
|
||||
} else {
|
||||
// no baseURL? then we can't do anything, give up.
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var remoteDoc yaml.Node
|
||||
err = yaml.Unmarshal(body, &remoteDoc)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
parsedRemoteDocument = &remoteDoc
|
||||
index.seenRemoteSources[file] = &remoteDoc
|
||||
}
|
||||
|
||||
// lookup item from reference by using a path query.
|
||||
var query string
|
||||
if len(uri) >= 2 {
|
||||
query = fmt.Sprintf("$%s", strings.ReplaceAll(uri[1], "/", "."))
|
||||
} else {
|
||||
query = "$"
|
||||
}
|
||||
|
||||
// remove any URL encoding
|
||||
query = strings.Replace(query, "~1", "./", 1)
|
||||
query = strings.ReplaceAll(query, "~1", "/")
|
||||
|
||||
path, err := yamlpath.NewPath(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
result, _ := path.Find(parsedRemoteDocument)
|
||||
if len(result) == 1 {
|
||||
return result[0], parsedRemoteDocument, nil
|
||||
}
|
||||
|
||||
return nil, parsedRemoteDocument, nil
|
||||
}
|
||||
|
||||
func (index *SpecIndex) FindComponentInRoot(componentId string) *Reference {
|
||||
if index.root != nil {
|
||||
name, friendlySearch := utils.ConvertComponentIdIntoFriendlyPathSearch(componentId)
|
||||
path, err := yamlpath.NewPath(friendlySearch)
|
||||
if path == nil || err != nil {
|
||||
return nil // no component found
|
||||
}
|
||||
res, _ := path.Find(index.root)
|
||||
if len(res) == 1 {
|
||||
ref := &Reference{
|
||||
Definition: componentId,
|
||||
Name: name,
|
||||
Node: res[0],
|
||||
Path: friendlySearch,
|
||||
RequiredRefProperties: index.extractDefinitionRequiredRefProperties(res[0], map[string][]string{}),
|
||||
}
|
||||
|
||||
return ref
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (index *SpecIndex) performExternalLookup(uri []string, componentId string,
|
||||
lookupFunction ExternalLookupFunction, parent *yaml.Node,
|
||||
) *Reference {
|
||||
if len(uri) > 0 {
|
||||
externalSpecIndex := index.externalSpecIndex[uri[0]]
|
||||
if externalSpecIndex == nil {
|
||||
_, newRoot, err := lookupFunction(componentId)
|
||||
if err != nil {
|
||||
indexError := &IndexingError{
|
||||
Err: err,
|
||||
Node: parent,
|
||||
Path: componentId,
|
||||
}
|
||||
index.refErrors = append(index.refErrors, indexError)
|
||||
return nil
|
||||
}
|
||||
|
||||
// cool, cool, lets index this spec also. This is a recursive action and will keep going
|
||||
// until all remote references have been found.
|
||||
newIndex := NewSpecIndexWithConfig(newRoot, index.config)
|
||||
index.fileLock.Lock()
|
||||
index.externalSpecIndex[uri[0]] = newIndex
|
||||
index.fileLock.Unlock()
|
||||
externalSpecIndex = newIndex
|
||||
}
|
||||
|
||||
foundRef := externalSpecIndex.FindComponentInRoot(uri[1])
|
||||
if foundRef != nil {
|
||||
nameSegs := strings.Split(uri[1], "/")
|
||||
ref := &Reference{
|
||||
Definition: componentId,
|
||||
Name: nameSegs[len(nameSegs)-1],
|
||||
Node: foundRef.Node,
|
||||
IsRemote: true,
|
||||
RemoteLocation: componentId,
|
||||
Path: foundRef.Path,
|
||||
}
|
||||
return ref
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
218
index/index_model.go
Normal file
218
index/index_model.go
Normal file
@@ -0,0 +1,218 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"gopkg.in/yaml.v3"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Constants used to determine if resolving is local, file based or remote file based.
|
||||
const (
|
||||
LocalResolve = iota
|
||||
HttpResolve
|
||||
FileResolve
|
||||
)
|
||||
|
||||
// Reference is a wrapper around *yaml.Node results to make things more manageable when performing
|
||||
// algorithms on data models. the *yaml.Node def is just a bit too low level for tracking state.
|
||||
type Reference struct {
|
||||
Definition string
|
||||
Name string
|
||||
Node *yaml.Node
|
||||
ParentNode *yaml.Node
|
||||
Resolved bool
|
||||
Circular bool
|
||||
Seen bool
|
||||
IsRemote bool
|
||||
RemoteLocation string
|
||||
Path string // this won't always be available.
|
||||
RequiredRefProperties map[string][]string // definition names (eg, #/definitions/One) to a list of required properties on this definition which reference that definition
|
||||
}
|
||||
|
||||
// ReferenceMapped is a helper struct for mapped references put into sequence (we lose the key)
|
||||
type ReferenceMapped struct {
|
||||
Reference *Reference
|
||||
Definition string
|
||||
}
|
||||
|
||||
// SpecIndexConfig is a configuration struct for the SpecIndex introduced in 0.6.0 that provides an expandable
|
||||
// set of granular options. The first being the ability to set the Base URL for resolving relative references, and
|
||||
// allowing or disallowing remote or local file lookups.
|
||||
// - https://github.com/pb33f/libopenapi/issues/73
|
||||
type SpecIndexConfig struct {
|
||||
// The BaseURL will be the root from which relative references will be resolved from if they can't be found locally.
|
||||
//
|
||||
// For example:
|
||||
// - $ref: somefile.yaml#/components/schemas/SomeSchema
|
||||
//
|
||||
// Might not be found locally, if the file was pulled in from a remote server (a good example is the DigitalOcean API).
|
||||
// so by setting a BaseURL, the reference will try to be resolved from the remote server.
|
||||
//
|
||||
// If our baseURL is set to https://pb33f.io/libopenapi then our reference will try to be resolved from:
|
||||
// - $ref: https://pb33f.io/libopenapi/somefile.yaml#/components/schemas/SomeSchema
|
||||
//
|
||||
// More details on relative references can be found in issue #73: https://github.com/pb33f/libopenapi/issues/73
|
||||
BaseURL *url.URL // set the Base URL for resolving relative references if the spec is exploded.
|
||||
|
||||
// In an earlier version of libopenapi (pre 0.6.0) the index would automatically resolve all references
|
||||
// They could have been local, or they could have been remote. This was a problem because it meant
|
||||
// There was a potential for a remote exploit if a remote reference was malicious. There aren't any known
|
||||
// exploits, but it's better to be safe than sorry.
|
||||
//
|
||||
// To read more about this, you can find a discussion here: https://github.com/pb33f/libopenapi/pull/64
|
||||
AllowRemoteLookup bool // Allow remote lookups for references. Defaults to false
|
||||
AllowFileLookup bool // Allow file lookups for references. Defaults to false
|
||||
}
|
||||
|
||||
// SpecIndex is a complete pre-computed index of the entire specification. Numbers are pre-calculated and
|
||||
// quick direct access to paths, operations, tags are all available. No need to walk the entire node tree in rules,
|
||||
// everything is pre-walked if you need it.
|
||||
type SpecIndex struct {
|
||||
allRefs map[string]*Reference // all (deduplicated) refs
|
||||
rawSequencedRefs []*Reference // all raw references in sequence as they are scanned, not deduped.
|
||||
linesWithRefs map[int]bool // lines that link to references.
|
||||
allMappedRefs map[string]*Reference // these are the located mapped refs
|
||||
allMappedRefsSequenced []*ReferenceMapped // sequenced mapped refs
|
||||
refsByLine map[string]map[int]bool // every reference and the lines it's referenced from
|
||||
pathRefs map[string]map[string]*Reference // all path references
|
||||
paramOpRefs map[string]map[string]map[string]*Reference // params in operations.
|
||||
paramCompRefs map[string]*Reference // params in components
|
||||
paramAllRefs map[string]*Reference // combined components and ops
|
||||
paramInlineDuplicates map[string][]*Reference // inline params all with the same name
|
||||
globalTagRefs map[string]*Reference // top level global tags
|
||||
securitySchemeRefs map[string]*Reference // top level security schemes
|
||||
requestBodiesRefs map[string]*Reference // top level request bodies
|
||||
responsesRefs map[string]*Reference // top level responses
|
||||
headersRefs map[string]*Reference // top level responses
|
||||
examplesRefs map[string]*Reference // top level examples
|
||||
securityRequirementRefs map[string]map[string][]*Reference // (NOT $ref) but a name based lookup for requirements
|
||||
callbacksRefs map[string]map[string][]*Reference // all links
|
||||
linksRefs map[string]map[string][]*Reference // all callbacks
|
||||
operationTagsRefs map[string]map[string][]*Reference // tags found in operations
|
||||
operationDescriptionRefs map[string]map[string]*Reference // descriptions in operations.
|
||||
operationSummaryRefs map[string]map[string]*Reference // summaries in operations
|
||||
callbackRefs map[string]*Reference // top level callback refs
|
||||
serversRefs []*Reference // all top level server refs
|
||||
rootServersNode *yaml.Node // servers root node
|
||||
opServersRefs map[string]map[string][]*Reference // all operation level server overrides.
|
||||
polymorphicRefs map[string]*Reference // every reference to a polymorphic ref
|
||||
polymorphicAllOfRefs []*Reference // every reference to 'allOf' references
|
||||
polymorphicOneOfRefs []*Reference // every reference to 'oneOf' references
|
||||
polymorphicAnyOfRefs []*Reference // every reference to 'anyOf' references
|
||||
externalDocumentsRef []*Reference // all external documents in spec
|
||||
rootSecurity []*Reference // root security definitions.
|
||||
rootSecurityNode *yaml.Node // root security node.
|
||||
refsWithSiblings map[string]Reference // references with sibling elements next to them
|
||||
pathRefsLock sync.Mutex // create lock for all refs maps, we want to build data as fast as we can
|
||||
externalDocumentsCount int // number of externalDocument nodes found
|
||||
operationTagsCount int // number of unique tags in operations
|
||||
globalTagsCount int // number of global tags defined
|
||||
totalTagsCount int // number unique tags in spec
|
||||
securitySchemesCount int // security schemes
|
||||
globalRequestBodiesCount int // component request bodies
|
||||
globalResponsesCount int // component responses
|
||||
globalHeadersCount int // component headers
|
||||
globalExamplesCount int // component examples
|
||||
globalLinksCount int // component links
|
||||
globalCallbacksCount int // component callbacks
|
||||
globalCallbacks int // component callbacks.
|
||||
pathCount int // number of paths
|
||||
operationCount int // number of operations
|
||||
operationParamCount int // number of params defined in operations
|
||||
componentParamCount int // number of params defined in components
|
||||
componentsInlineParamUniqueCount int // number of inline params with unique names
|
||||
componentsInlineParamDuplicateCount int // number of inline params with duplicate names
|
||||
schemaCount int // number of schemas
|
||||
refCount int // total ref count
|
||||
root *yaml.Node // the root document
|
||||
pathsNode *yaml.Node // paths node
|
||||
tagsNode *yaml.Node // tags node
|
||||
componentsNode *yaml.Node // components node
|
||||
parametersNode *yaml.Node // components/parameters node
|
||||
allParametersNode map[string]*Reference // all parameters node
|
||||
allParameters map[string]*Reference // all parameters (components/defs)
|
||||
schemasNode *yaml.Node // components/schemas node
|
||||
allInlineSchemaDefinitions []*Reference // all schemas found in document outside of components (openapi) or definitions (swagger).
|
||||
allInlineSchemaObjectDefinitions []*Reference // all schemas that are objects found in document outside of components (openapi) or definitions (swagger).
|
||||
allComponentSchemaDefinitions map[string]*Reference // all schemas found in components (openapi) or definitions (swagger).
|
||||
securitySchemesNode *yaml.Node // components/securitySchemes node
|
||||
allSecuritySchemes map[string]*Reference // all security schemes / definitions.
|
||||
requestBodiesNode *yaml.Node // components/requestBodies node
|
||||
allRequestBodies map[string]*Reference // all request bodies
|
||||
responsesNode *yaml.Node // components/responses node
|
||||
allResponses map[string]*Reference // all responses
|
||||
headersNode *yaml.Node // components/headers node
|
||||
allHeaders map[string]*Reference // all headers
|
||||
examplesNode *yaml.Node // components/examples node
|
||||
allExamples map[string]*Reference // all components examples
|
||||
linksNode *yaml.Node // components/links node
|
||||
allLinks map[string]*Reference // all links
|
||||
callbacksNode *yaml.Node // components/callbacks node
|
||||
allCallbacks map[string]*Reference // all components examples
|
||||
externalDocumentsNode *yaml.Node // external documents node
|
||||
allExternalDocuments map[string]*Reference // all external documents
|
||||
externalSpecIndex map[string]*SpecIndex // create a primary index of all external specs and componentIds
|
||||
refErrors []error // errors when indexing references
|
||||
operationParamErrors []error // errors when indexing parameters
|
||||
allDescriptions []*DescriptionReference // every single description found in the spec.
|
||||
allSummaries []*DescriptionReference // every single summary found in the spec.
|
||||
allEnums []*EnumReference // every single enum found in the spec.
|
||||
allObjectsWithProperties []*ObjectReference // every single object with properties found in the spec.
|
||||
enumCount int
|
||||
descriptionCount int
|
||||
summaryCount int
|
||||
seenRemoteSources map[string]*yaml.Node
|
||||
remoteLock sync.Mutex
|
||||
httpLock sync.Mutex
|
||||
fileLock sync.Mutex
|
||||
refLock sync.Mutex
|
||||
circularReferences []*CircularReferenceResult // only available when the resolver has been used.
|
||||
allowCircularReferences bool // decide if you want to error out, or allow circular references, default is false.
|
||||
config *SpecIndexConfig // configuration for the index
|
||||
httpClient *http.Client
|
||||
componentIndexChan chan bool
|
||||
polyComponentIndexChan chan bool
|
||||
}
|
||||
|
||||
// ExternalLookupFunction is for lookup functions that take a JSONSchema reference and tries to find that node in the
|
||||
// URI based document. Decides if the reference is local, remote or in a file.
|
||||
type ExternalLookupFunction func(id string) (foundNode *yaml.Node, rootNode *yaml.Node, lookupError error)
|
||||
|
||||
// IndexingError holds data about something that went wrong during indexing.
|
||||
type IndexingError struct {
|
||||
Err error
|
||||
Node *yaml.Node
|
||||
Path string
|
||||
}
|
||||
|
||||
func (i *IndexingError) Error() string {
|
||||
return i.Err.Error()
|
||||
}
|
||||
|
||||
// DescriptionReference holds data about a description that was found and where it was found.
|
||||
type DescriptionReference struct {
|
||||
Content string
|
||||
Path string
|
||||
Node *yaml.Node
|
||||
IsSummary bool
|
||||
}
|
||||
|
||||
type EnumReference struct {
|
||||
Node *yaml.Node
|
||||
Type *yaml.Node
|
||||
Path string
|
||||
SchemaNode *yaml.Node
|
||||
ParentNode *yaml.Node
|
||||
}
|
||||
|
||||
type ObjectReference struct {
|
||||
Node *yaml.Node
|
||||
Path string
|
||||
ParentNode *yaml.Node
|
||||
}
|
||||
|
||||
var methodTypes = []string{"get", "post", "put", "patch", "options", "head", "delete"}
|
||||
90
index/index_utils.go
Normal file
90
index/index_utils.go
Normal file
@@ -0,0 +1,90 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"gopkg.in/yaml.v3"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func isHttpMethod(val string) bool {
|
||||
switch strings.ToLower(val) {
|
||||
case methodTypes[0]:
|
||||
return true
|
||||
case methodTypes[1]:
|
||||
return true
|
||||
case methodTypes[2]:
|
||||
return true
|
||||
case methodTypes[3]:
|
||||
return true
|
||||
case methodTypes[4]:
|
||||
return true
|
||||
case methodTypes[5]:
|
||||
return true
|
||||
case methodTypes[6]:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func DetermineReferenceResolveType(ref string) int {
|
||||
if ref != "" && ref[0] == '#' {
|
||||
return LocalResolve
|
||||
}
|
||||
if ref != "" && len(ref) >= 5 && (ref[:5] == "https" || ref[:5] == "http:") {
|
||||
return HttpResolve
|
||||
}
|
||||
if strings.Contains(ref, ".json") ||
|
||||
strings.Contains(ref, ".yaml") ||
|
||||
strings.Contains(ref, ".yml") {
|
||||
return FileResolve
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func boostrapIndexCollections(rootNode *yaml.Node, index *SpecIndex) {
|
||||
index.root = rootNode
|
||||
index.allRefs = make(map[string]*Reference)
|
||||
index.allMappedRefs = make(map[string]*Reference)
|
||||
index.refsByLine = make(map[string]map[int]bool)
|
||||
index.linesWithRefs = make(map[int]bool)
|
||||
index.pathRefs = make(map[string]map[string]*Reference)
|
||||
index.paramOpRefs = make(map[string]map[string]map[string]*Reference)
|
||||
index.operationTagsRefs = make(map[string]map[string][]*Reference)
|
||||
index.operationDescriptionRefs = make(map[string]map[string]*Reference)
|
||||
index.operationSummaryRefs = make(map[string]map[string]*Reference)
|
||||
index.paramCompRefs = make(map[string]*Reference)
|
||||
index.paramAllRefs = make(map[string]*Reference)
|
||||
index.paramInlineDuplicates = make(map[string][]*Reference)
|
||||
index.globalTagRefs = make(map[string]*Reference)
|
||||
index.securitySchemeRefs = make(map[string]*Reference)
|
||||
index.requestBodiesRefs = make(map[string]*Reference)
|
||||
index.responsesRefs = make(map[string]*Reference)
|
||||
index.headersRefs = make(map[string]*Reference)
|
||||
index.examplesRefs = make(map[string]*Reference)
|
||||
index.callbacksRefs = make(map[string]map[string][]*Reference)
|
||||
index.linksRefs = make(map[string]map[string][]*Reference)
|
||||
index.callbackRefs = make(map[string]*Reference)
|
||||
index.externalSpecIndex = make(map[string]*SpecIndex)
|
||||
index.allComponentSchemaDefinitions = make(map[string]*Reference)
|
||||
index.allParameters = make(map[string]*Reference)
|
||||
index.allSecuritySchemes = make(map[string]*Reference)
|
||||
index.allRequestBodies = make(map[string]*Reference)
|
||||
index.allResponses = make(map[string]*Reference)
|
||||
index.allHeaders = make(map[string]*Reference)
|
||||
index.allExamples = make(map[string]*Reference)
|
||||
index.allLinks = make(map[string]*Reference)
|
||||
index.allCallbacks = make(map[string]*Reference)
|
||||
index.allExternalDocuments = make(map[string]*Reference)
|
||||
index.securityRequirementRefs = make(map[string]map[string][]*Reference)
|
||||
index.polymorphicRefs = make(map[string]*Reference)
|
||||
index.refsWithSiblings = make(map[string]Reference)
|
||||
index.seenRemoteSources = make(map[string]*yaml.Node)
|
||||
index.opServersRefs = make(map[string]map[string][]*Reference)
|
||||
index.httpClient = &http.Client{Timeout: time.Duration(5) * time.Second}
|
||||
index.componentIndexChan = make(chan bool)
|
||||
index.polyComponentIndexChan = make(chan bool)
|
||||
}
|
||||
1213
index/spec_index.go
1213
index/spec_index.go
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ package index
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
@@ -81,6 +82,55 @@ func TestSpecIndex_Asana(t *testing.T) {
|
||||
assert.Equal(t, 69, index.componentsInlineParamUniqueCount)
|
||||
}
|
||||
|
||||
func TestSpecIndex_DigitalOcean(t *testing.T) {
|
||||
asana, _ := ioutil.ReadFile("../test_specs/digitalocean.yaml")
|
||||
var rootNode yaml.Node
|
||||
yaml.Unmarshal(asana, &rootNode)
|
||||
|
||||
baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
|
||||
index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
|
||||
BaseURL: baseURL,
|
||||
AllowRemoteLookup: true,
|
||||
AllowFileLookup: true,
|
||||
})
|
||||
|
||||
assert.Len(t, index.GetAllExternalIndexes(), 291)
|
||||
assert.NotNil(t, index)
|
||||
}
|
||||
|
||||
func TestSpecIndex_DigitalOcean_LookupsNotAllowed(t *testing.T) {
|
||||
asana, _ := ioutil.ReadFile("../test_specs/digitalocean.yaml")
|
||||
var rootNode yaml.Node
|
||||
yaml.Unmarshal(asana, &rootNode)
|
||||
|
||||
baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
|
||||
index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
|
||||
BaseURL: baseURL,
|
||||
})
|
||||
|
||||
// no lookups allowed, bits have not been set, so there should just be a bunch of errors.
|
||||
assert.Len(t, index.GetAllExternalIndexes(), 0)
|
||||
assert.True(t, len(index.GetReferenceIndexErrors()) > 0)
|
||||
}
|
||||
|
||||
func TestSpecIndex_BaseURLError(t *testing.T) {
|
||||
asana, _ := ioutil.ReadFile("../test_specs/digitalocean.yaml")
|
||||
var rootNode yaml.Node
|
||||
yaml.Unmarshal(asana, &rootNode)
|
||||
|
||||
// this should fail because the base url is not a valid url and digital ocean won't be able to resolve
|
||||
// anything.
|
||||
baseURL, _ := url.Parse("https://githerbs.com/fresh/herbs/for/you")
|
||||
index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
|
||||
BaseURL: baseURL,
|
||||
AllowRemoteLookup: true,
|
||||
AllowFileLookup: true,
|
||||
})
|
||||
|
||||
assert.Len(t, index.GetAllExternalIndexes(), 0)
|
||||
assert.Len(t, index.GetReferenceIndexErrors(), 582)
|
||||
}
|
||||
|
||||
func TestSpecIndex_k8s(t *testing.T) {
|
||||
asana, _ := ioutil.ReadFile("../test_specs/k8s.json")
|
||||
var rootNode yaml.Node
|
||||
@@ -591,7 +641,9 @@ paths:
|
||||
}
|
||||
|
||||
func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_BadJSON(t *testing.T) {
|
||||
index := new(SpecIndex)
|
||||
index := NewSpecIndexWithConfig(nil, &SpecIndexConfig{
|
||||
AllowRemoteLookup: true,
|
||||
})
|
||||
index.seenRemoteSources = make(map[string]*yaml.Node)
|
||||
a, b, err := index.lookupRemoteReference("https://google.com//logos/doodles/2022/labor-day-2022-6753651837109490.3-l.png#/hey")
|
||||
assert.Error(t, err)
|
||||
|
||||
372
index/utility_methods.go
Normal file
372
index/utility_methods.go
Normal file
@@ -0,0 +1,372 @@
|
||||
// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/pb33f/libopenapi/utils"
|
||||
"gopkg.in/yaml.v3"
|
||||
"sync"
|
||||
)
|
||||
|
||||
func (index *SpecIndex) extractDefinitionsAndSchemas(schemasNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, schema := range schemasNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = schema.Value
|
||||
continue
|
||||
}
|
||||
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: schema,
|
||||
Path: fmt.Sprintf("$.components.schemas.%s", name),
|
||||
ParentNode: schemasNode,
|
||||
RequiredRefProperties: index.extractDefinitionRequiredRefProperties(schemasNode, map[string][]string{}),
|
||||
}
|
||||
index.allComponentSchemaDefinitions[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
// extractDefinitionRequiredRefProperties goes through the direct properties of a schema and extracts the map of required definitions from within it
|
||||
func (index *SpecIndex) extractDefinitionRequiredRefProperties(schemaNode *yaml.Node, reqRefProps map[string][]string) map[string][]string {
|
||||
if schemaNode == nil {
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
// If the node we're looking at is a direct ref to another model without any properties, mark it as required, but still continue to look for required properties
|
||||
isRef, _, defPath := utils.IsNodeRefValue(schemaNode)
|
||||
if isRef {
|
||||
if _, ok := reqRefProps[defPath]; !ok {
|
||||
reqRefProps[defPath] = []string{}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for a required parameters list, and return if none exists, as any properties will be optional
|
||||
_, requiredSeqNode := utils.FindKeyNodeTop("required", schemaNode.Content)
|
||||
if requiredSeqNode == nil {
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
_, propertiesMapNode := utils.FindKeyNodeTop("properties", schemaNode.Content)
|
||||
if propertiesMapNode == nil {
|
||||
// TODO: Log a warning on the resolver, because if you have required properties, but no actual properties, something is wrong
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
name := ""
|
||||
for i, param := range propertiesMapNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = param.Value
|
||||
continue
|
||||
}
|
||||
|
||||
// Check to see if the current property is directly embedded within the current schema, and handle its properties if so
|
||||
_, paramPropertiesMapNode := utils.FindKeyNodeTop("properties", param.Content)
|
||||
if paramPropertiesMapNode != nil {
|
||||
reqRefProps = index.extractDefinitionRequiredRefProperties(param, reqRefProps)
|
||||
}
|
||||
|
||||
// Check to see if the current property is polymorphic, and dive into that model if so
|
||||
for _, key := range []string{"allOf", "oneOf", "anyOf"} {
|
||||
_, ofNode := utils.FindKeyNodeTop(key, param.Content)
|
||||
if ofNode != nil {
|
||||
for _, ofNodeItem := range ofNode.Content {
|
||||
reqRefProps = index.extractRequiredReferenceProperties(ofNodeItem, name, reqRefProps)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run through each of the required properties and extract _their_ required references
|
||||
for _, requiredPropertyNode := range requiredSeqNode.Content {
|
||||
_, requiredPropDefNode := utils.FindKeyNodeTop(requiredPropertyNode.Value, propertiesMapNode.Content)
|
||||
if requiredPropDefNode == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
reqRefProps = index.extractRequiredReferenceProperties(requiredPropDefNode, requiredPropertyNode.Value, reqRefProps)
|
||||
}
|
||||
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
// extractRequiredReferenceProperties returns a map of definition names to the property or properties which reference it within a node
|
||||
func (index *SpecIndex) extractRequiredReferenceProperties(requiredPropDefNode *yaml.Node, propName string, reqRefProps map[string][]string) map[string][]string {
|
||||
isRef, _, defPath := utils.IsNodeRefValue(requiredPropDefNode)
|
||||
if !isRef {
|
||||
_, defItems := utils.FindKeyNodeTop("items", requiredPropDefNode.Content)
|
||||
if defItems != nil {
|
||||
isRef, _, defPath = utils.IsNodeRefValue(defItems)
|
||||
}
|
||||
}
|
||||
|
||||
if /* still */ !isRef {
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
if _, ok := reqRefProps[defPath]; !ok {
|
||||
reqRefProps[defPath] = []string{}
|
||||
}
|
||||
reqRefProps[defPath] = append(reqRefProps[defPath], propName)
|
||||
|
||||
return reqRefProps
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentParameters(paramsNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, param := range paramsNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = param.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: param,
|
||||
}
|
||||
index.allParameters[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentRequestBodies(requestBodiesNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, reqBod := range requestBodiesNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = reqBod.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: reqBod,
|
||||
}
|
||||
index.allRequestBodies[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentResponses(responsesNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, response := range responsesNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = response.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: response,
|
||||
}
|
||||
index.allResponses[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentHeaders(headersNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, header := range headersNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = header.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: header,
|
||||
}
|
||||
index.allHeaders[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentCallbacks(callbacksNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, callback := range callbacksNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = callback.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: callback,
|
||||
}
|
||||
index.allCallbacks[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentLinks(linksNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, link := range linksNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = link.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: link,
|
||||
}
|
||||
index.allLinks[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentExamples(examplesNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, example := range examplesNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = example.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: example,
|
||||
}
|
||||
index.allExamples[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) extractComponentSecuritySchemes(securitySchemesNode *yaml.Node, pathPrefix string) {
|
||||
var name string
|
||||
for i, secScheme := range securitySchemesNode.Content {
|
||||
if i%2 == 0 {
|
||||
name = secScheme.Value
|
||||
continue
|
||||
}
|
||||
def := fmt.Sprintf("%s%s", pathPrefix, name)
|
||||
ref := &Reference{
|
||||
Definition: def,
|
||||
Name: name,
|
||||
Node: secScheme,
|
||||
ParentNode: securitySchemesNode,
|
||||
Path: fmt.Sprintf("$.components.securitySchemes.%s", name),
|
||||
}
|
||||
index.allSecuritySchemes[def] = ref
|
||||
}
|
||||
}
|
||||
|
||||
func (index *SpecIndex) countUniqueInlineDuplicates() int {
|
||||
if index.componentsInlineParamUniqueCount > 0 {
|
||||
return index.componentsInlineParamUniqueCount
|
||||
}
|
||||
unique := 0
|
||||
for _, p := range index.paramInlineDuplicates {
|
||||
if len(p) == 1 {
|
||||
unique++
|
||||
}
|
||||
}
|
||||
index.componentsInlineParamUniqueCount = unique
|
||||
return unique
|
||||
}
|
||||
|
||||
func (index *SpecIndex) scanOperationParams(params []*yaml.Node, pathItemNode *yaml.Node, method string) {
|
||||
for i, param := range params {
|
||||
// param is ref
|
||||
if len(param.Content) > 0 && param.Content[0].Value == "$ref" {
|
||||
|
||||
paramRefName := param.Content[1].Value
|
||||
paramRef := index.allMappedRefs[paramRefName]
|
||||
|
||||
if index.paramOpRefs[pathItemNode.Value] == nil {
|
||||
index.paramOpRefs[pathItemNode.Value] = make(map[string]map[string]*Reference)
|
||||
index.paramOpRefs[pathItemNode.Value][method] = make(map[string]*Reference)
|
||||
|
||||
}
|
||||
// if we know the path, but it's a new method
|
||||
if index.paramOpRefs[pathItemNode.Value][method] == nil {
|
||||
index.paramOpRefs[pathItemNode.Value][method] = make(map[string]*Reference)
|
||||
}
|
||||
|
||||
// if this is a duplicate, add an error and ignore it
|
||||
if index.paramOpRefs[pathItemNode.Value][method][paramRefName] != nil {
|
||||
path := fmt.Sprintf("$.paths.%s.%s.parameters[%d]", pathItemNode.Value, method, i)
|
||||
if method == "top" {
|
||||
path = fmt.Sprintf("$.paths.%s.parameters[%d]", pathItemNode.Value, i)
|
||||
}
|
||||
|
||||
index.operationParamErrors = append(index.operationParamErrors, &IndexingError{
|
||||
Err: fmt.Errorf("the `%s` operation parameter at path `%s`, "+
|
||||
"index %d has a duplicate ref `%s`", method, pathItemNode.Value, i, paramRefName),
|
||||
Node: param,
|
||||
Path: path,
|
||||
})
|
||||
} else {
|
||||
index.paramOpRefs[pathItemNode.Value][method][paramRefName] = paramRef
|
||||
}
|
||||
|
||||
continue
|
||||
|
||||
} else {
|
||||
|
||||
// param is inline.
|
||||
_, vn := utils.FindKeyNode("name", param.Content)
|
||||
|
||||
path := fmt.Sprintf("$.paths.%s.%s.parameters[%d]", pathItemNode.Value, method, i)
|
||||
if method == "top" {
|
||||
path = fmt.Sprintf("$.paths.%s.parameters[%d]", pathItemNode.Value, i)
|
||||
}
|
||||
|
||||
if vn == nil {
|
||||
index.operationParamErrors = append(index.operationParamErrors, &IndexingError{
|
||||
Err: fmt.Errorf("the '%s' operation parameter at path '%s', index %d has no 'name' value",
|
||||
method, pathItemNode.Value, i),
|
||||
Node: param,
|
||||
Path: path,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
ref := &Reference{
|
||||
Definition: vn.Value,
|
||||
Name: vn.Value,
|
||||
Node: param,
|
||||
Path: path,
|
||||
}
|
||||
if index.paramOpRefs[pathItemNode.Value] == nil {
|
||||
index.paramOpRefs[pathItemNode.Value] = make(map[string]map[string]*Reference)
|
||||
index.paramOpRefs[pathItemNode.Value][method] = make(map[string]*Reference)
|
||||
}
|
||||
|
||||
// if we know the path but this is a new method.
|
||||
if index.paramOpRefs[pathItemNode.Value][method] == nil {
|
||||
index.paramOpRefs[pathItemNode.Value][method] = make(map[string]*Reference)
|
||||
}
|
||||
|
||||
// if this is a duplicate, add an error and ignore it
|
||||
if index.paramOpRefs[pathItemNode.Value][method][ref.Name] != nil {
|
||||
path := fmt.Sprintf("$.paths.%s.%s.parameters[%d]", pathItemNode.Value, method, i)
|
||||
if method == "top" {
|
||||
path = fmt.Sprintf("$.paths.%s.parameters[%d]", pathItemNode.Value, i)
|
||||
}
|
||||
|
||||
index.operationParamErrors = append(index.operationParamErrors, &IndexingError{
|
||||
Err: fmt.Errorf("the `%s` operation parameter at path `%s`, "+
|
||||
"index %d has a duplicate name `%s`", method, pathItemNode.Value, i, vn.Value),
|
||||
Node: param,
|
||||
Path: path,
|
||||
})
|
||||
} else {
|
||||
index.paramOpRefs[pathItemNode.Value][method][ref.Name] = ref
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runIndexFunction(funcs []func() int, wg *sync.WaitGroup) {
|
||||
for _, cFunc := range funcs {
|
||||
go func(wg *sync.WaitGroup, cf func() int) {
|
||||
cf()
|
||||
wg.Done()
|
||||
}(wg, cFunc)
|
||||
}
|
||||
}
|
||||
@@ -104,8 +104,10 @@ func (resolver *Resolver) Resolve() []*ResolvingError {
|
||||
for _, ref := range mapped {
|
||||
seenReferences := make(map[string]bool)
|
||||
var journey []*index.Reference
|
||||
if ref != nil && ref.Reference != nil {
|
||||
ref.Reference.Node.Content = resolver.VisitReference(ref.Reference, seenReferences, journey, true)
|
||||
}
|
||||
}
|
||||
|
||||
schemas := resolver.specIndex.GetAllComponentSchemas()
|
||||
for s, schemaRef := range schemas {
|
||||
|
||||
1587
test_specs/digitalocean.yaml
Normal file
1587
test_specs/digitalocean.yaml
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user