diff --git a/.github/sponsors/scalar-dark.png b/.github/sponsors/scalar-dark.png
new file mode 100644
index 0000000..fe5b953
Binary files /dev/null and b/.github/sponsors/scalar-dark.png differ
diff --git a/.github/sponsors/scalar-light.png b/.github/sponsors/scalar-light.png
new file mode 100644
index 0000000..1e757de
Binary files /dev/null and b/.github/sponsors/scalar-light.png differ
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index d8b4754..a3532a5 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -16,7 +16,7 @@ jobs:
- name: Set up Go 1.x
uses: actions/setup-go@v3
with:
- go-version: 1.19
+ go-version: 1.21
id: go
- name: Checkout code
diff --git a/README.md b/README.md
index d507b4f..9249024 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,17 @@ like our _very kind_ sponsors:
[Speakeasy](https://speakeasyapi.dev/?utm_source=libopenapi+repo&utm_medium=github+sponsorship)
+
+
+
+
+
+
+
+[scalar](https://scalar.com)
+
+
+
---
`libopenapi` is pretty new, so our list of notable projects that depend on `libopenapi` is small (let me know if you'd like to add your project)
@@ -39,7 +50,7 @@ like our _very kind_ sponsors:
- [github.com/danielgtaylor/restish](https://github.com/danielgtaylor/restish) - "Restish is a CLI for interacting with REST-ish HTTP APIs"
- [github.com/speakeasy-api/speakeasy](https://github.com/speakeasy-api/speakeasy) - "Speakeasy CLI makes validating OpenAPI docs and generating idiomatic SDKs easy!"
- [github.com/apicat/apicat](https://github.com/apicat/apicat) - "AI-powered API development tool"
-- [github.com/mattermost/mattermost](https://github.com/mattermost/mattermost) = "Software development lifecycle platform"
+- [github.com/mattermost/mattermost](https://github.com/mattermost/mattermost) - "Software development lifecycle platform"
- Your project here?
---
@@ -67,6 +78,7 @@ See all the documentation at https://pb33f.io/libopenapi/
- [Using Vendor Extensions](https://pb33f.io/libopenapi/extensions/)
- [The Index](https://pb33f.io/libopenapi/index/)
- [The Resolver](https://pb33f.io/libopenapi/resolver/)
+- [The Rolodex](https://pb33f.io/libopenapi/rolodex/)
- [Circular References](https://pb33f.io/libopenapi/circular-references/)
- [What Changed / Diff Engine](https://pb33f.io/libopenapi/what-changed/)
- [FAQ](https://pb33f.io/libopenapi/faq/)
diff --git a/datamodel/document_config.go b/datamodel/document_config.go
index 7b7d113..289db3f 100644
--- a/datamodel/document_config.go
+++ b/datamodel/document_config.go
@@ -4,8 +4,11 @@
package datamodel
import (
- "net/http"
+ "github.com/pb33f/libopenapi/utils"
+ "io/fs"
+ "log/slog"
"net/url"
+ "os"
)
// DocumentConfiguration is used to configure the document creation process. It was added in v0.6.0 to allow
@@ -20,17 +23,54 @@ type DocumentConfiguration struct {
// RemoteURLHandler is a function that will be used to retrieve remote documents. If not set, the default
// remote document getter will be used.
+ //
+ // The remote handler is only used if the BaseURL is set. If the BaseURL is not set, then the remote handler
+ // will not be used, as there will be nothing to use it against.
+ //
// Resolves [#132]: https://github.com/pb33f/libopenapi/issues/132
- RemoteURLHandler func(url string) (*http.Response, error)
+ RemoteURLHandler utils.RemoteURLHandler
// If resolving locally, the BasePath will be the root from which relative references will be resolved from.
// It's usually the location of the root specification.
+ //
+ // Be warned, setting this value will instruct the rolodex to index EVERY yaml and JSON file it finds from the
+ // base path. The rolodex will recurse into every directory and pick up everything form this location down.
+ //
+ // To avoid sucking in all the files, set the FileFilter to a list of specific files to be included.
BasePath string // set the Base Path for resolving relative references if the spec is exploded.
+ // FileFilter is a list of specific files to be included by the rolodex when looking up references. If this value
+ // is set, then only these specific files will be included. If this value is not set, then all files will be included.
+ FileFilter []string
+
+ // RemoteFS is a filesystem that will be used to retrieve remote documents. If not set, then the rolodex will
+ // use its own internal remote filesystem implementation. The RemoteURLHandler will be used to retrieve remote
+ // documents if it has been set. The default is to use the internal remote filesystem loader.
+ RemoteFS fs.FS
+
+ // LocalFS is a filesystem that will be used to retrieve local documents. If not set, then the rolodex will
+ // use its own internal local filesystem implementation. The default is to use the internal local filesystem loader.
+ LocalFS fs.FS
+
// AllowFileReferences will allow the index to locate relative file references. This is disabled by default.
+ //
+ // This behavior is now driven by the inclusion of a BasePath. If a BasePath is set, then the
+ // rolodex will look for relative file references. If no BasePath is set, then the rolodex will not look for
+ // relative file references.
+ //
+ // This value when set, will force the creation of a local file system even when the BasePath has not been set.
+ // it will suck in and index everything from the current working directory, down... so be warned
+ // FileFilter should be used to limit the scope of the rolodex.
AllowFileReferences bool
// AllowRemoteReferences will allow the index to lookup remote references. This is disabled by default.
+ //
+ // This behavior is now driven by the inclusion of a BaseURL. If a BaseURL is set, then the
+ // rolodex will look for remote references. If no BaseURL is set, then the rolodex will not look for
+ // remote references. This value has no effect as of version 0.13.0 and will be removed in a future release.
+ //
+ // This value when set, will force the creation of a remote file system even when the BaseURL has not been set.
+ // it will suck in every http link it finds, and recurse through all references located in each document.
AllowRemoteReferences bool
// AvoidIndexBuild will avoid building the index. This is disabled by default, only use if you are sure you don't need it.
@@ -52,18 +92,21 @@ type DocumentConfiguration struct {
// So if libopenapi is returning circular references for this use case, then this option should be enabled.
// this is disabled by default, which means array circular references will be checked.
IgnoreArrayCircularReferences bool
+
+ // SkipCircularReferenceCheck will skip over checking for circular references. This is disabled by default, which
+ // means circular references will be checked. This is useful for developers building out models that should be
+ // indexed later on.
+ SkipCircularReferenceCheck bool
+
+ // Logger is a structured logger that will be used for logging errors and warnings. If not set, a default logger
+ // will be used, set to the Error level.
+ Logger *slog.Logger
}
-func NewOpenDocumentConfiguration() *DocumentConfiguration {
+func NewDocumentConfiguration() *DocumentConfiguration {
return &DocumentConfiguration{
- AllowFileReferences: true,
- AllowRemoteReferences: true,
- }
-}
-
-func NewClosedDocumentConfiguration() *DocumentConfiguration {
- return &DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ })),
}
}
diff --git a/datamodel/document_config_test.go b/datamodel/document_config_test.go
index 8d18f38..334932c 100644
--- a/datamodel/document_config_test.go
+++ b/datamodel/document_config_test.go
@@ -9,13 +9,6 @@ import (
)
func TestNewClosedDocumentConfiguration(t *testing.T) {
- cfg := NewClosedDocumentConfiguration()
- assert.False(t, cfg.AllowRemoteReferences)
- assert.False(t, cfg.AllowFileReferences)
-}
-
-func TestNewOpenDocumentConfiguration(t *testing.T) {
- cfg := NewOpenDocumentConfiguration()
- assert.True(t, cfg.AllowRemoteReferences)
- assert.True(t, cfg.AllowFileReferences)
+ cfg := NewDocumentConfiguration()
+ assert.NotNil(t, cfg)
}
diff --git a/datamodel/high/base/contact_test.go b/datamodel/high/base/contact_test.go
index b854eb5..a35be27 100644
--- a/datamodel/high/base/contact_test.go
+++ b/datamodel/high/base/contact_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"fmt"
lowmodel "github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
@@ -70,7 +71,7 @@ email: buckaroo@pb33f.io
// build low
var lowContact lowbase.Contact
_ = lowmodel.BuildModel(cNode.Content[0], &lowContact)
- _ = lowContact.Build(nil, cNode.Content[0], nil)
+ _ = lowContact.Build(context.Background(), nil, cNode.Content[0], nil)
// build high
highContact := NewContact(&lowContact)
diff --git a/datamodel/high/base/dynamic_value_test.go b/datamodel/high/base/dynamic_value_test.go
index bbd05e3..799be06 100644
--- a/datamodel/high/base/dynamic_value_test.go
+++ b/datamodel/high/base/dynamic_value_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -116,7 +117,7 @@ func TestDynamicValue_MarshalYAMLInline(t *testing.T) {
_ = yaml.Unmarshal([]byte(ymlSchema), &node)
lowProxy := new(lowbase.SchemaProxy)
- err := lowProxy.Build(nil, node.Content[0], idx)
+ err := lowProxy.Build(context.Background(), nil, node.Content[0], idx)
assert.NoError(t, err)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
@@ -160,7 +161,7 @@ func TestDynamicValue_MarshalYAMLInline_Error(t *testing.T) {
_ = yaml.Unmarshal([]byte(ymlSchema), &node)
lowProxy := new(lowbase.SchemaProxy)
- err := lowProxy.Build(nil, node.Content[0], idx)
+ err := lowProxy.Build(context.Background(), nil, node.Content[0], idx)
assert.NoError(t, err)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
diff --git a/datamodel/high/base/example_test.go b/datamodel/high/base/example_test.go
index 204f319..c7149bd 100644
--- a/datamodel/high/base/example_test.go
+++ b/datamodel/high/base/example_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"fmt"
"strings"
"testing"
@@ -31,7 +32,7 @@ x-hack: code`
var lowExample lowbase.Example
_ = lowmodel.BuildModel(cNode.Content[0], &lowExample)
- _ = lowExample.Build(&cNode, cNode.Content[0], nil)
+ _ = lowExample.Build(context.Background(), &cNode, cNode.Content[0], nil)
// build high
highExample := NewExample(&lowExample)
@@ -61,7 +62,7 @@ func TestExtractExamples(t *testing.T) {
var lowExample lowbase.Example
_ = lowmodel.BuildModel(cNode.Content[0], &lowExample)
- _ = lowExample.Build(nil, cNode.Content[0], nil)
+ _ = lowExample.Build(context.Background(), nil, cNode.Content[0], nil)
examplesMap := orderedmap.New[lowmodel.KeyReference[string], lowmodel.ValueReference[*lowbase.Example]]()
examplesMap.Set(
@@ -90,7 +91,7 @@ x-hack: code`
_ = lowmodel.BuildModel(node.Content[0], &lowExample)
// build out low-level example
- _ = lowExample.Build(nil, node.Content[0], nil)
+ _ = lowExample.Build(context.Background(), nil, node.Content[0], nil)
// create a new high-level example
highExample := NewExample(&lowExample)
diff --git a/datamodel/high/base/external_doc_test.go b/datamodel/high/base/external_doc_test.go
index e4fd204..224e2a9 100644
--- a/datamodel/high/base/external_doc_test.go
+++ b/datamodel/high/base/external_doc_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"fmt"
lowmodel "github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
@@ -26,7 +27,7 @@ x-hack: code`
var lowExt lowbase.ExternalDoc
_ = lowmodel.BuildModel(cNode.Content[0], &lowExt)
- _ = lowExt.Build(nil, cNode.Content[0], nil)
+ _ = lowExt.Build(context.Background(), nil, cNode.Content[0], nil)
highExt := NewExternalDoc(&lowExt)
@@ -61,7 +62,7 @@ x-hack: code`
_ = lowmodel.BuildModel(node.Content[0], &lowExt)
// build out low-level properties (like extensions)
- _ = lowExt.Build(nil, node.Content[0], nil)
+ _ = lowExt.Build(context.Background(), nil, node.Content[0], nil)
// create new high-level ExternalDoc
highExt := NewExternalDoc(&lowExt)
diff --git a/datamodel/high/base/info_test.go b/datamodel/high/base/info_test.go
index 33e31b9..c510fd3 100644
--- a/datamodel/high/base/info_test.go
+++ b/datamodel/high/base/info_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"fmt"
"testing"
@@ -32,7 +33,7 @@ x-cli-name: chicken cli`
var lowInfo lowbase.Info
_ = lowmodel.BuildModel(cNode.Content[0], &lowInfo)
- _ = lowInfo.Build(nil, cNode.Content[0], nil)
+ _ = lowInfo.Build(context.Background(), nil, cNode.Content[0], nil)
highInfo := NewInfo(&lowInfo)
@@ -74,7 +75,7 @@ version: 1.2.3`
// build out the low-level model
var lowInfo lowbase.Info
_ = lowmodel.BuildModel(&node, &lowInfo)
- _ = lowInfo.Build(nil, node.Content[0], nil)
+ _ = lowInfo.Build(context.Background(), nil, node.Content[0], nil)
// build the high level model
highInfo := NewInfo(&lowInfo)
@@ -97,7 +98,7 @@ url: https://opensource.org/licenses/MIT`
// build out the low-level model
var lowLicense lowbase.License
_ = lowmodel.BuildModel(node.Content[0], &lowLicense)
- _ = lowLicense.Build(nil, node.Content[0], nil)
+ _ = lowLicense.Build(context.Background(), nil, node.Content[0], nil)
// build the high level model
highLicense := NewLicense(&lowLicense)
@@ -140,7 +141,7 @@ func TestInfo_Render(t *testing.T) {
// build low
var lowInfo lowbase.Info
_ = lowmodel.BuildModel(cNode.Content[0], &lowInfo)
- _ = lowInfo.Build(nil, cNode.Content[0], nil)
+ _ = lowInfo.Build(context.Background(), nil, cNode.Content[0], nil)
// build high
highInfo := NewInfo(&lowInfo)
@@ -181,7 +182,7 @@ x-cake:
// build low
var lowInfo lowbase.Info
_ = lowmodel.BuildModel(cNode.Content[0], &lowInfo)
- _ = lowInfo.Build(nil, cNode.Content[0], nil)
+ _ = lowInfo.Build(context.Background(), nil, cNode.Content[0], nil)
// build high
highInfo := NewInfo(&lowInfo)
diff --git a/datamodel/high/base/licence_test.go b/datamodel/high/base/licence_test.go
index 1658298..152ca3b 100644
--- a/datamodel/high/base/licence_test.go
+++ b/datamodel/high/base/licence_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
lowmodel "github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/stretchr/testify/assert"
@@ -44,7 +45,7 @@ url: https://pb33f.io/not-real
// build low
var lowLicense lowbase.License
_ = lowmodel.BuildModel(cNode.Content[0], &lowLicense)
- _ = lowLicense.Build(nil, cNode.Content[0], nil)
+ _ = lowLicense.Build(context.Background(), nil, cNode.Content[0], nil)
// build high
highLicense := NewLicense(&lowLicense)
@@ -92,7 +93,7 @@ func TestLicense_Render_IdentifierAndURL_Error(t *testing.T) {
// build low
var lowLicense lowbase.License
_ = lowmodel.BuildModel(cNode.Content[0], &lowLicense)
- err := lowLicense.Build(nil, cNode.Content[0], nil)
+ err := lowLicense.Build(context.Background(), nil, cNode.Content[0], nil)
assert.Error(t, err)
}
diff --git a/datamodel/high/base/schema.go b/datamodel/high/base/schema.go
index adb5ad6..7c6e137 100644
--- a/datamodel/high/base/schema.go
+++ b/datamodel/high/base/schema.go
@@ -64,44 +64,44 @@ type Schema struct {
// in 3.1 UnevaluatedProperties can be a Schema or a boolean
// https://github.com/pb33f/libopenapi/issues/118
- UnevaluatedProperties *DynamicValue[*SchemaProxy, *bool] `json:"unevaluatedProperties,omitempty" yaml:"unevaluatedProperties,omitempty"`
+ UnevaluatedProperties *DynamicValue[*SchemaProxy, bool] `json:"unevaluatedProperties,omitempty" yaml:"unevaluatedProperties,omitempty"`
// in 3.1 Items can be a Schema or a boolean
Items *DynamicValue[*SchemaProxy, bool] `json:"items,omitempty" yaml:"items,omitempty"`
- // 3.1 only, part of the JSON Schema spec provides a way to identify a subschema
+ // 3.1 only, part of the JSON Schema spec provides a way to identify a sub-schema
Anchor string `json:"$anchor,omitempty" yaml:"$anchor,omitempty"`
// Compatible with all versions
- Not *SchemaProxy `json:"not,omitempty" yaml:"not,omitempty"`
+ Not *SchemaProxy `json:"not,omitempty" yaml:"not,omitempty"`
Properties orderedmap.Map[string, *SchemaProxy] `json:"properties,omitempty" yaml:"properties,omitempty"`
- Title string `json:"title,omitempty" yaml:"title,omitempty"`
- MultipleOf *float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
- Maximum *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
- Minimum *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
- MaxLength *int64 `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
- MinLength *int64 `json:"minLength,omitempty" yaml:"minLength,omitempty"`
- Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
- Format string `json:"format,omitempty" yaml:"format,omitempty"`
- MaxItems *int64 `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
- MinItems *int64 `json:"minItems,omitempty" yaml:"minItems,omitempty"`
- UniqueItems *bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
- MaxProperties *int64 `json:"maxProperties,omitempty" yaml:"maxProperties,omitempty"`
- MinProperties *int64 `json:"minProperties,omitempty" yaml:"minProperties,omitempty"`
- Required []string `json:"required,omitempty" yaml:"required,omitempty"`
- Enum []any `json:"enum,omitempty" yaml:"enum,omitempty"`
- AdditionalProperties any `json:"additionalProperties,omitempty" yaml:"additionalProperties,renderZero,omitempty"`
- Description string `json:"description,omitempty" yaml:"description,omitempty"`
- Default any `json:"default,omitempty" yaml:"default,renderZero,omitempty"`
- Const any `json:"const,omitempty" yaml:"const,renderZero,omitempty"`
- Nullable *bool `json:"nullable,omitempty" yaml:"nullable,omitempty"`
- ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"` // https://github.com/pb33f/libopenapi/issues/30
- WriteOnly bool `json:"writeOnly,omitempty" yaml:"writeOnly,omitempty"` // https://github.com/pb33f/libopenapi/issues/30
- XML *XML `json:"xml,omitempty" yaml:"xml,omitempty"`
- ExternalDocs *ExternalDoc `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
- Example any `json:"example,omitempty" yaml:"example,omitempty"`
- Deprecated *bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
- Extensions map[string]any `json:"-" yaml:"-"`
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ MultipleOf *float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
+ Maximum *float64 `json:"maximum,renderZero,omitempty" yaml:"maximum,renderZero,omitempty"`
+ Minimum *float64 `json:"minimum,renderZero,omitempty," yaml:"minimum,renderZero,omitempty"`
+ MaxLength *int64 `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ MinLength *int64 `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ MaxItems *int64 `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ MinItems *int64 `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ UniqueItems *bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
+ MaxProperties *int64 `json:"maxProperties,omitempty" yaml:"maxProperties,omitempty"`
+ MinProperties *int64 `json:"minProperties,omitempty" yaml:"minProperties,omitempty"`
+ Required []string `json:"required,omitempty" yaml:"required,omitempty"`
+ Enum []any `json:"enum,omitempty" yaml:"enum,omitempty"`
+ AdditionalProperties *DynamicValue[*SchemaProxy, bool] `json:"additionalProperties,renderZero,omitempty" yaml:"additionalProperties,renderZero,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Default any `json:"default,omitempty" yaml:"default,renderZero,omitempty"`
+ Const any `json:"const,omitempty" yaml:"const,renderZero,omitempty"`
+ Nullable *bool `json:"nullable,omitempty" yaml:"nullable,omitempty"`
+ ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"` // https://github.com/pb33f/libopenapi/issues/30
+ WriteOnly bool `json:"writeOnly,omitempty" yaml:"writeOnly,omitempty"` // https://github.com/pb33f/libopenapi/issues/30
+ XML *XML `json:"xml,omitempty" yaml:"xml,omitempty"`
+ ExternalDocs *ExternalDoc `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+ Example any `json:"example,omitempty" yaml:"example,omitempty"`
+ Deprecated *bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+ Extensions map[string]any `json:"-" yaml:"-"`
low *base.Schema
// Parent Proxy refers back to the low level SchemaProxy that is proxying this schema.
@@ -212,29 +212,22 @@ func NewSchema(schema *base.Schema) *Schema {
Value: schema.UnevaluatedItems.Value,
})
}
- // check if unevaluated properties is a schema
- if !schema.UnevaluatedProperties.IsEmpty() && schema.UnevaluatedProperties.Value.IsA() {
- s.UnevaluatedProperties = &DynamicValue[*SchemaProxy, *bool]{
- A: NewSchemaProxy(
- &lowmodel.NodeReference[*base.SchemaProxy]{
+
+ var unevaluatedProperties *DynamicValue[*SchemaProxy, bool]
+ if !schema.UnevaluatedProperties.IsEmpty() {
+ if schema.UnevaluatedProperties.Value.IsA() {
+ unevaluatedProperties = &DynamicValue[*SchemaProxy, bool]{
+ A: NewSchemaProxy(&lowmodel.NodeReference[*base.SchemaProxy]{
ValueNode: schema.UnevaluatedProperties.ValueNode,
Value: schema.UnevaluatedProperties.Value.A,
- },
- ),
- N: 0,
+ KeyNode: schema.UnevaluatedProperties.KeyNode,
+ }),
+ }
+ } else {
+ unevaluatedProperties = &DynamicValue[*SchemaProxy, bool]{N: 1, B: schema.UnevaluatedProperties.Value.B}
}
}
-
- // check if unevaluated properties is a bool
- if !schema.UnevaluatedProperties.IsEmpty() && schema.UnevaluatedProperties.Value.IsB() {
- s.UnevaluatedProperties = &DynamicValue[*SchemaProxy, *bool]{
- B: schema.UnevaluatedProperties.Value.B,
- N: 1,
- }
- }
-
- if !schema.UnevaluatedProperties.IsEmpty() {
- }
+ s.UnevaluatedProperties = unevaluatedProperties
s.Pattern = schema.Pattern.Value
s.Format = schema.Format.Value
@@ -249,19 +242,23 @@ func NewSchema(schema *base.Schema) *Schema {
s.Type = append(s.Type, schema.Type.Value.B[i].Value)
}
}
- if schema.AdditionalProperties.Value != nil {
- if addPropSchema, ok := schema.AdditionalProperties.Value.(*base.SchemaProxy); ok {
- s.AdditionalProperties = NewSchemaProxy(&lowmodel.NodeReference[*base.SchemaProxy]{
- KeyNode: schema.AdditionalProperties.KeyNode,
- ValueNode: schema.AdditionalProperties.ValueNode,
- Value: addPropSchema,
- })
- } else {
- // TODO: check for slice and map types and unpack correctly.
- s.AdditionalProperties = schema.AdditionalProperties.Value
+ var additionalProperties *DynamicValue[*SchemaProxy, bool]
+ if !schema.AdditionalProperties.IsEmpty() {
+ if schema.AdditionalProperties.Value.IsA() {
+ additionalProperties = &DynamicValue[*SchemaProxy, bool]{
+ A: NewSchemaProxy(&lowmodel.NodeReference[*base.SchemaProxy]{
+ ValueNode: schema.AdditionalProperties.ValueNode,
+ Value: schema.AdditionalProperties.Value.A,
+ KeyNode: schema.AdditionalProperties.KeyNode,
+ }),
+ }
+ } else {
+ additionalProperties = &DynamicValue[*SchemaProxy, bool]{N: 1, B: schema.AdditionalProperties.Value.B}
}
}
+ s.AdditionalProperties = additionalProperties
+
s.Description = schema.Description.Value
s.Default = schema.Default.Value
s.Const = schema.Const.Value
@@ -306,7 +303,6 @@ func NewSchema(schema *base.Schema) *Schema {
s.Anchor = schema.Anchor.Value
}
- // TODO: check this behavior.
for i := range schema.Enum.Value {
enum = append(enum, schema.Enum.Value[i].Value)
}
@@ -423,7 +419,8 @@ func NewSchema(schema *base.Schema) *Schema {
Value: schema.Items.Value.A,
KeyNode: schema.Items.KeyNode,
},
- )}
+ ),
+ }
} else {
items = &DynamicValue[*SchemaProxy, bool]{N: 1, B: schema.Items.Value.B}
}
@@ -437,7 +434,7 @@ func NewSchema(schema *base.Schema) *Schema {
completeChildren := 0
if children > 0 {
allDone:
- for true {
+ for {
select {
case <-polyCompletedChan:
completeChildren++
@@ -471,8 +468,8 @@ func (s *Schema) Render() ([]byte, error) {
return yaml.Marshal(s)
}
-// RenderInline will return a YAML representation of the Schema object as a byte slice. All of the
-// $ref values will be inlined, as in resolved in place.
+// RenderInline will return a YAML representation of the Schema object as a byte slice.
+// All the $ref values will be inlined, as in resolved in place.
//
// Make sure you don't have any circular references!
func (s *Schema) RenderInline() ([]byte, error) {
@@ -483,11 +480,26 @@ func (s *Schema) RenderInline() ([]byte, error) {
// MarshalYAML will create a ready to render YAML representation of the ExternalDoc object.
func (s *Schema) MarshalYAML() (interface{}, error) {
nb := high.NewNodeBuilder(s, s.low)
+
+ // determine index version
+ idx := s.GoLow().Index
+ if idx != nil {
+ if idx.GetConfig().SpecInfo != nil {
+ nb.Version = idx.GetConfig().SpecInfo.VersionNumeric
+ }
+ }
return nb.Render(), nil
}
func (s *Schema) MarshalYAMLInline() (interface{}, error) {
nb := high.NewNodeBuilder(s, s.low)
nb.Resolve = true
+ // determine index version
+ idx := s.GoLow().Index
+ if idx != nil {
+ if idx.GetConfig().SpecInfo != nil {
+ nb.Version = idx.GetConfig().SpecInfo.VersionNumeric
+ }
+ }
return nb.Render(), nil
}
diff --git a/datamodel/high/base/schema_proxy.go b/datamodel/high/base/schema_proxy.go
index 49b7e0f..78a7c9c 100644
--- a/datamodel/high/base/schema_proxy.go
+++ b/datamodel/high/base/schema_proxy.go
@@ -7,6 +7,7 @@ import (
"github.com/pb33f/libopenapi/datamodel/high"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
+ "github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/utils"
"gopkg.in/yaml.v3"
"sync"
@@ -114,6 +115,15 @@ func (sp *SchemaProxy) GetReference() string {
return sp.schema.Value.GetSchemaReference()
}
+// GetReferenceOrigin returns a pointer to the index.NodeOrigin of the $ref if this SchemaProxy is a reference to another Schema.
+// returns nil if the origin cannot be found (which, means there is a bug, and we need to fix it).
+func (sp *SchemaProxy) GetReferenceOrigin() *index.NodeOrigin {
+ if sp.schema != nil {
+ return sp.schema.Value.GetSchemaReferenceLocation()
+ }
+ return nil
+}
+
// BuildSchema operates the same way as Schema, except it will return any error along with the *Schema
func (sp *SchemaProxy) BuildSchema() (*Schema, error) {
if sp.rendered != nil {
diff --git a/datamodel/high/base/schema_proxy_test.go b/datamodel/high/base/schema_proxy_test.go
index 38e98fd..ca6abdf 100644
--- a/datamodel/high/base/schema_proxy_test.go
+++ b/datamodel/high/base/schema_proxy_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -40,7 +41,7 @@ func TestSchemaProxy_MarshalYAML(t *testing.T) {
_ = yaml.Unmarshal([]byte(ymlSchema), &node)
lowProxy := new(lowbase.SchemaProxy)
- err := lowProxy.Build(nil, node.Content[0], idx)
+ err := lowProxy.Build(context.Background(), nil, node.Content[0], idx)
assert.NoError(t, err)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
@@ -49,6 +50,9 @@ func TestSchemaProxy_MarshalYAML(t *testing.T) {
sp := NewSchemaProxy(&lowRef)
+ origin := sp.GetReferenceOrigin()
+ assert.Nil(t, origin)
+
rend, _ := sp.Render()
assert.Equal(t, "$ref: '#/components/schemas/nice'", strings.TrimSpace(string(rend)))
@@ -65,3 +69,8 @@ func TestCreateSchemaProxyRef(t *testing.T) {
assert.Equal(t, "#/components/schemas/MySchema", sp.GetReference())
assert.True(t, sp.IsReference())
}
+
+func TestSchemaProxy_NoSchema_GetOrigin(t *testing.T) {
+ sp := &SchemaProxy{}
+ assert.Nil(t, sp.GetReferenceOrigin())
+}
diff --git a/datamodel/high/base/schema_test.go b/datamodel/high/base/schema_test.go
index 0e4df24..f11b0e3 100644
--- a/datamodel/high/base/schema_test.go
+++ b/datamodel/high/base/schema_test.go
@@ -4,10 +4,13 @@
package base
import (
+ "context"
"fmt"
"strings"
"testing"
+ "github.com/pb33f/libopenapi/datamodel"
+
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -49,7 +52,7 @@ func TestNewSchemaProxy(t *testing.T) {
_ = yaml.Unmarshal([]byte(yml), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], idx)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -90,7 +93,7 @@ func TestNewSchemaProxyRender(t *testing.T) {
_ = yaml.Unmarshal([]byte(yml), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], idx)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -111,7 +114,6 @@ func TestNewSchemaProxyRender(t *testing.T) {
rice:
$ref: '#/components/schemas/rice'`
assert.Equal(t, desired, strings.TrimSpace(string(rend)))
-
}
func TestNewSchemaProxy_WithObject(t *testing.T) {
@@ -217,10 +219,7 @@ properties:
type: number
description: a number
example: "2"
- additionalProperties:
- - chicken
- - nugget
- - soup
+ additionalProperties: false
somethingB:
type: object
exclusiveMinimum: true
@@ -241,8 +240,7 @@ properties:
attribute: true
x-pizza: love
additionalProperties:
- why: yes
- thatIs: true
+ type: string
additionalProperties: true
required:
- them
@@ -274,7 +272,7 @@ $anchor: anchor`
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -315,12 +313,12 @@ $anchor: anchor`
assert.Equal(t, "anchor", compiled.Anchor)
wentLow := compiled.GoLow()
- assert.Equal(t, 129, wentLow.AdditionalProperties.ValueNode.Line)
+ assert.Equal(t, 125, wentLow.AdditionalProperties.ValueNode.Line)
assert.NotNil(t, compiled.GoLowUntyped())
// now render it out!
schemaBytes, _ := compiled.Render()
- assert.Len(t, schemaBytes, 3494)
+ assert.Len(t, schemaBytes, 3417)
}
func TestSchemaObjectWithAllOfSequenceOrder(t *testing.T) {
@@ -348,7 +346,7 @@ func TestSchemaObjectWithAllOfSequenceOrder(t *testing.T) {
}
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -485,7 +483,7 @@ required: [cake, fish]`
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -504,7 +502,7 @@ required: [cake, fish]`
assert.Equal(t, float64(334), compiled.Properties.GetOrZero("somethingB").Schema().ExclusiveMaximum.B)
assert.Len(t, compiled.Properties.GetOrZero("somethingB").Schema().Properties.GetOrZero("somethingBProp").Schema().Type, 2)
- assert.Equal(t, "nice", compiled.AdditionalProperties.(*SchemaProxy).Schema().Description)
+ assert.Equal(t, "nice", compiled.AdditionalProperties.A.Schema().Description)
wentLow := compiled.GoLow()
assert.Equal(t, 97, wentLow.AdditionalProperties.ValueNode.Line)
@@ -541,7 +539,7 @@ func TestSchemaProxy_GoLow(t *testing.T) {
_ = yaml.Unmarshal([]byte(ymlSchema), &node)
lowProxy := new(lowbase.SchemaProxy)
- err := lowProxy.Build(nil, node.Content[0], idx)
+ err := lowProxy.Build(context.Background(), nil, node.Content[0], idx)
assert.NoError(t, err)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
@@ -556,7 +554,6 @@ func TestSchemaProxy_GoLow(t *testing.T) {
spNil := NewSchemaProxy(nil)
assert.Nil(t, spNil.GoLow())
assert.Nil(t, spNil.GoLowUntyped())
-
}
func getHighSchema(t *testing.T, yml string) *Schema {
@@ -567,7 +564,7 @@ func getHighSchema(t *testing.T, yml string) *Schema {
// build out the low-level model
var lowSchema lowbase.Schema
assert.NoError(t, low.BuildModel(node.Content[0], &lowSchema))
- assert.NoError(t, lowSchema.Build(node.Content[0], nil))
+ assert.NoError(t, lowSchema.Build(context.Background(), node.Content[0], nil))
// build the high level model
return NewSchema(&lowSchema)
@@ -728,7 +725,7 @@ properties:
// build out the low-level model
var lowSchema lowbase.Schema
_ = low.BuildModel(node.Content[0], &lowSchema)
- _ = lowSchema.Build(node.Content[0], nil)
+ _ = lowSchema.Build(context.Background(), node.Content[0], nil)
// build the high level model
highSchema := NewSchema(&lowSchema)
@@ -757,7 +754,7 @@ properties:
// build out the low-level model
var lowSchema lowbase.SchemaProxy
_ = low.BuildModel(node.Content[0], &lowSchema)
- _ = lowSchema.Build(nil, node.Content[0], nil)
+ _ = lowSchema.Build(context.Background(), nil, node.Content[0], nil)
// build the high level schema proxy
highSchema := NewSchemaProxy(&low.NodeReference[*lowbase.SchemaProxy]{
@@ -817,7 +814,7 @@ allOf:
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -836,7 +833,6 @@ allOf:
// now render it out, it should be identical.
schemaBytes, _ := compiled.Render()
assert.Equal(t, testSpec, string(schemaBytes))
-
}
func TestNewSchemaProxy_RenderSchemaWithMultipleObjectTypes(t *testing.T) {
@@ -881,7 +877,7 @@ items:
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -934,8 +930,7 @@ func TestNewSchemaProxy_RenderSchemaEnsurePropertyOrdering(t *testing.T) {
attribute: true
x-pizza: love
additionalProperties:
- why: yes
- thatIs: true
+ type: string
additionalProperties: true
xml:
name: XML Thing`
@@ -944,7 +939,7 @@ xml:
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -973,7 +968,7 @@ func TestNewSchemaProxy_RenderSchemaCheckDiscriminatorMappingOrder(t *testing.T)
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -989,60 +984,6 @@ func TestNewSchemaProxy_RenderSchemaCheckDiscriminatorMappingOrder(t *testing.T)
assert.Equal(t, testSpec, strings.TrimSpace(string(schemaBytes)))
}
-func TestNewSchemaProxy_RenderSchemaCheckAdditionalPropertiesSlice(t *testing.T) {
- testSpec := `additionalProperties:
- - one
- - two
- - miss a few
- - ninety nine
- - hundred`
-
- var compNode yaml.Node
- _ = yaml.Unmarshal([]byte(testSpec), &compNode)
-
- sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
- assert.NoError(t, err)
-
- lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
- Value: sp,
- ValueNode: compNode.Content[0],
- }
-
- schemaProxy := NewSchemaProxy(&lowproxy)
- compiled := schemaProxy.Schema()
-
- // now render it out, it should be identical.
- schemaBytes, _ := compiled.Render()
- assert.Len(t, schemaBytes, 91)
-}
-
-func TestNewSchemaProxy_RenderSchemaCheckAdditionalPropertiesSliceMap(t *testing.T) {
- testSpec := `additionalProperties:
- - nice: cake
- - yummy: beer
- - hot: coffee`
-
- var compNode yaml.Node
- _ = yaml.Unmarshal([]byte(testSpec), &compNode)
-
- sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
- assert.NoError(t, err)
-
- lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
- Value: sp,
- ValueNode: compNode.Content[0],
- }
-
- schemaProxy := NewSchemaProxy(&lowproxy)
- compiled := schemaProxy.Schema()
-
- // now render it out, it should be identical.
- schemaBytes, _ := compiled.Render()
- assert.Len(t, schemaBytes, 75)
-}
-
func TestNewSchemaProxy_CheckDefaultBooleanFalse(t *testing.T) {
testSpec := `default: false`
@@ -1050,7 +991,7 @@ func TestNewSchemaProxy_CheckDefaultBooleanFalse(t *testing.T) {
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -1073,7 +1014,7 @@ func TestNewSchemaProxy_RenderAdditionalPropertiesFalse(t *testing.T) {
_ = yaml.Unmarshal([]byte(testSpec), &compNode)
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], nil)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -1117,7 +1058,7 @@ components:
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], idx)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -1169,7 +1110,7 @@ components:
sp := new(lowbase.SchemaProxy)
- err := sp.Build(nil, compNode.Content[0], idx)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
assert.NoError(t, err)
lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
@@ -1192,8 +1133,7 @@ unevaluatedProperties: true
`
highSchema := getHighSchema(t, yml)
- value := true
- assert.EqualValues(t, &value, highSchema.UnevaluatedProperties.B)
+ assert.True(t, highSchema.UnevaluatedProperties.B)
}
func TestUnevaluatedPropertiesBoolean_False(t *testing.T) {
@@ -1203,6 +1143,147 @@ unevaluatedProperties: false
`
highSchema := getHighSchema(t, yml)
- value := false
- assert.EqualValues(t, &value, highSchema.UnevaluatedProperties.B)
+ assert.False(t, highSchema.UnevaluatedProperties.B)
+}
+
+func TestUnevaluatedPropertiesBoolean_Unset(t *testing.T) {
+ yml := `
+type: number
+`
+ highSchema := getHighSchema(t, yml)
+
+ assert.Nil(t, highSchema.UnevaluatedProperties)
+}
+
+func TestAdditionalProperties(t *testing.T) {
+ testSpec := `type: object
+properties:
+ additionalPropertiesSimpleSchema:
+ type: object
+ additionalProperties:
+ type: string
+ additionalPropertiesBool:
+ type: object
+ additionalProperties: true
+ additionalPropertiesAnyOf:
+ type: object
+ additionalProperties:
+ anyOf:
+ - type: string
+ - type: array
+ items:
+ type: string
+`
+
+ var compNode yaml.Node
+ _ = yaml.Unmarshal([]byte(testSpec), &compNode)
+
+ sp := new(lowbase.SchemaProxy)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
+ assert.NoError(t, err)
+
+ lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
+ Value: sp,
+ ValueNode: compNode.Content[0],
+ }
+
+ schemaProxy := NewSchemaProxy(&lowproxy)
+ compiled := schemaProxy.Schema()
+
+ assert.Equal(t, []string{"string"}, compiled.Properties.GetOrZero("additionalPropertiesSimpleSchema").Schema().AdditionalProperties.A.Schema().Type)
+ assert.Equal(t, true, compiled.Properties.GetOrZero("additionalPropertiesBool").Schema().AdditionalProperties.B)
+ assert.Equal(t, []string{"string"}, compiled.Properties.GetOrZero("additionalPropertiesAnyOf").Schema().AdditionalProperties.A.Schema().AnyOf[0].Schema().Type)
+}
+
+func TestSchema_RenderProxyWithConfig_3(t *testing.T) {
+ testSpec := `exclusiveMinimum: true`
+
+ var compNode yaml.Node
+ _ = yaml.Unmarshal([]byte(testSpec), &compNode)
+
+ sp := new(lowbase.SchemaProxy)
+ err := sp.Build(context.Background(), nil, compNode.Content[0], nil)
+ assert.NoError(t, err)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+ lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
+ Value: sp,
+ ValueNode: compNode.Content[0],
+ }
+
+ schemaProxy := NewSchemaProxy(&lowproxy)
+ compiled := schemaProxy.Schema()
+
+ // now render it out, it should be identical.
+ schemaBytes, _ := compiled.Render()
+ assert.Equal(t, testSpec, strings.TrimSpace(string(schemaBytes)))
+}
+
+func TestSchema_RenderProxyWithConfig_Corrected_31(t *testing.T) {
+ testSpec := `exclusiveMinimum: true`
+ testSpecCorrect := `exclusiveMinimum: 0`
+
+ var compNode yaml.Node
+ _ = yaml.Unmarshal([]byte(testSpec), &compNode)
+
+ sp := new(lowbase.SchemaProxy)
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.1,
+ }
+ idx := index.NewSpecIndexWithConfig(compNode.Content[0], config)
+
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
+ assert.NoError(t, err)
+
+ lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
+ Value: sp,
+ ValueNode: compNode.Content[0],
+ }
+
+ schemaProxy := NewSchemaProxy(&lowproxy)
+ compiled := schemaProxy.Schema()
+
+ // now render it out, it should be identical.
+ schemaBytes, _ := compiled.Render()
+ assert.Equal(t, testSpecCorrect, strings.TrimSpace(string(schemaBytes)))
+
+ schemaBytes, _ = compiled.RenderInline()
+ assert.Equal(t, testSpecCorrect, strings.TrimSpace(string(schemaBytes)))
+}
+
+func TestSchema_RenderProxyWithConfig_Corrected_3(t *testing.T) {
+ testSpec := `exclusiveMinimum: 0`
+ testSpecCorrect := `exclusiveMinimum: false`
+
+ var compNode yaml.Node
+ _ = yaml.Unmarshal([]byte(testSpec), &compNode)
+
+ sp := new(lowbase.SchemaProxy)
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+ idx := index.NewSpecIndexWithConfig(compNode.Content[0], config)
+
+ err := sp.Build(context.Background(), nil, compNode.Content[0], idx)
+ assert.NoError(t, err)
+
+ lowproxy := low.NodeReference[*lowbase.SchemaProxy]{
+ Value: sp,
+ ValueNode: compNode.Content[0],
+ }
+
+ schemaProxy := NewSchemaProxy(&lowproxy)
+ compiled := schemaProxy.Schema()
+
+ // now render it out, it should be identical.
+ schemaBytes, _ := compiled.Render()
+ assert.Equal(t, testSpecCorrect, strings.TrimSpace(string(schemaBytes)))
+
+ schemaBytes, _ = compiled.RenderInline()
+ assert.Equal(t, testSpecCorrect, strings.TrimSpace(string(schemaBytes)))
}
diff --git a/datamodel/high/base/security_requirement_test.go b/datamodel/high/base/security_requirement_test.go
index b6f8b71..c5acfac 100644
--- a/datamodel/high/base/security_requirement_test.go
+++ b/datamodel/high/base/security_requirement_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"strings"
"testing"
@@ -30,7 +31,7 @@ cake:
var lowExt lowbase.SecurityRequirement
_ = lowmodel.BuildModel(cNode.Content[0], &lowExt)
- _ = lowExt.Build(nil, cNode.Content[0], nil)
+ _ = lowExt.Build(context.Background(), nil, cNode.Content[0], nil)
highExt := NewSecurityRequirement(&lowExt)
diff --git a/datamodel/high/base/tag_test.go b/datamodel/high/base/tag_test.go
index bf3b1b3..44ee50a 100644
--- a/datamodel/high/base/tag_test.go
+++ b/datamodel/high/base/tag_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"fmt"
"strings"
"testing"
@@ -28,7 +29,7 @@ x-hack: code`
var lowTag lowbase.Tag
_ = lowmodel.BuildModel(cNode.Content[0], &lowTag)
- _ = lowTag.Build(nil, cNode.Content[0], nil)
+ _ = lowTag.Build(context.Background(), nil, cNode.Content[0], nil)
highTag := NewTag(&lowTag)
@@ -75,7 +76,7 @@ x-hack: code`
// build out the low-level model
var lowTag lowbase.Tag
_ = lowmodel.BuildModel(node.Content[0], &lowTag)
- _ = lowTag.Build(nil, node.Content[0], nil)
+ _ = lowTag.Build(context.Background(), nil, node.Content[0], nil)
// build the high level tag
highTag := NewTag(&lowTag)
diff --git a/datamodel/high/node_builder.go b/datamodel/high/node_builder.go
index a7bc980..6479c33 100644
--- a/datamodel/high/node_builder.go
+++ b/datamodel/high/node_builder.go
@@ -30,6 +30,7 @@ type NodeEntry struct {
// NodeBuilder is a structure used by libopenapi high-level objects, to render themselves back to YAML.
// this allows high-level objects to be 'mutable' because all changes will be rendered out.
type NodeBuilder struct {
+ Version float32
Nodes []*NodeEntry
High any
Low any
@@ -587,8 +588,12 @@ func (n *NodeBuilder) AddYAMLNode(parent *yaml.Node, entry *NodeEntry) *yaml.Nod
}
if b, bok := value.(*float64); bok {
encodeSkip = true
- if *b > 0 {
- valueNode = utils.CreateFloatNode(strconv.FormatFloat(*b, 'f', -1, 64))
+ if *b > 0 || (entry.RenderZero && entry.Line > 0) {
+ if *b > 0 {
+ valueNode = utils.CreateFloatNode(strconv.FormatFloat(*b, 'f', -1, 64))
+ } else {
+ valueNode = utils.CreateIntNode(strconv.FormatFloat(*b, 'f', -1, 64))
+ }
valueNode.Line = line
}
}
@@ -642,7 +647,7 @@ func (n *NodeBuilder) extractLowMapKeysWrapped(iu reflect.Value, x string, order
}
func (n *NodeBuilder) extractLowMapKeys(fg reflect.Value, x string, found bool, orderedCollection []*NodeEntry, m reflect.Value, k reflect.Value) (bool, []*NodeEntry) {
- if !fg.IsZero() {
+ if fg.IsValid() && !fg.IsZero() {
for j, ky := range fg.MapKeys() {
hu := ky.Interface()
if we, wok := hu.(low.HasKeyNode); wok {
diff --git a/datamodel/high/node_builder_test.go b/datamodel/high/node_builder_test.go
index 80589ca..9383e72 100644
--- a/datamodel/high/node_builder_test.go
+++ b/datamodel/high/node_builder_test.go
@@ -90,6 +90,7 @@ type test1 struct {
Thugg *bool `yaml:"thugg,renderZero"`
Thurr *int64 `yaml:"thurr,omitempty"`
Thral *float64 `yaml:"thral,omitempty"`
+ Throo *float64 `yaml:"throo,renderZero,omitempty"`
Tharg []string `yaml:"tharg,omitempty"`
Type []string `yaml:"type,omitempty"`
Throg []*key `yaml:"throg,omitempty"`
@@ -421,8 +422,9 @@ func TestNewNodeBuilder_MapKeyHasValue(t *testing.T) {
}
type test1low struct {
- Thrug key `yaml:"thrug"`
- Thugg *bool `yaml:"thugg"`
+ Thrug key `yaml:"thrug"`
+ Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
t2 := test1low{
@@ -454,8 +456,9 @@ func TestNewNodeBuilder_MapKeyHasValueThatHasValue(t *testing.T) {
}
type test1low struct {
- Thomp key `yaml:"thomp"`
- Thugg *bool `yaml:"thugg"`
+ Thomp key `yaml:"thomp"`
+ Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
t2 := test1low{
@@ -495,6 +498,7 @@ func TestNewNodeBuilder_MapKeyHasValueThatHasValueMatch(t *testing.T) {
type test1low struct {
Thomp low.NodeReference[map[key]string] `yaml:"thomp"`
Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
g := low.NodeReference[map[key]string]{
@@ -529,6 +533,7 @@ func TestNewNodeBuilder_MapKeyHasValueThatHasValueMatchKeyNode(t *testing.T) {
type test1low struct {
Thomp low.NodeReference[map[key]string] `yaml:"thomp"`
Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
g := low.NodeReference[map[key]string]{
@@ -563,6 +568,7 @@ func TestNewNodeBuilder_MapKeyHasValueThatHasValueMatch_NoWrap(t *testing.T) {
type test1low struct {
Thomp map[key]string `yaml:"thomp"`
Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
t2 := test1low{
@@ -922,6 +928,40 @@ func TestNewNodeBuilder_TestRenderZero(t *testing.T) {
assert.Equal(t, desired, strings.TrimSpace(string(data)))
}
+func TestNewNodeBuilder_TestRenderZero_Float(t *testing.T) {
+
+ f := 0.0
+ t1 := test1{
+ Throo: &f,
+ }
+
+ nb := NewNodeBuilder(&t1, &t1)
+ node := nb.Render()
+
+ data, _ := yaml.Marshal(node)
+
+ desired := `throo: 0`
+
+ assert.Equal(t, desired, strings.TrimSpace(string(data)))
+}
+
+func TestNewNodeBuilder_TestRenderZero_Float_NotZero(t *testing.T) {
+
+ f := 0.12
+ t1 := test1{
+ Throo: &f,
+ }
+
+ nb := NewNodeBuilder(&t1, &t1)
+ node := nb.Render()
+
+ data, _ := yaml.Marshal(node)
+
+ desired := `throo: 0.12`
+
+ assert.Equal(t, desired, strings.TrimSpace(string(data)))
+}
+
func TestNewNodeBuilder_TestRenderServerVariableSimulation(t *testing.T) {
t1 := test1{
@@ -961,7 +1001,8 @@ func TestNewNodeBuilder_ShouldHaveNotDoneTestsLikeThisOhWell(t *testing.T) {
type t1low struct {
Thril low.NodeReference[map[low.KeyReference[string]]low.ValueReference[*key]]
- Thugg *bool `yaml:"thugg"`
+ Thugg *bool `yaml:"thugg"`
+ Throo *float32 `yaml:"throo"`
}
t1 := test1{
diff --git a/datamodel/high/v2/path_item_test.go b/datamodel/high/v2/path_item_test.go
index 752d0b8..acb53c2 100644
--- a/datamodel/high/v2/path_item_test.go
+++ b/datamodel/high/v2/path_item_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/index"
@@ -36,7 +37,7 @@ options:
var n v2.PathItem
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewPathItem(&n)
diff --git a/datamodel/high/v2/paths.go b/datamodel/high/v2/paths.go
index 462076a..ad9d68b 100644
--- a/datamodel/high/v2/paths.go
+++ b/datamodel/high/v2/paths.go
@@ -4,6 +4,7 @@
package v2
import (
+ "github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/high"
"github.com/pb33f/libopenapi/datamodel/low"
v2low "github.com/pb33f/libopenapi/datamodel/low/v2"
@@ -34,7 +35,7 @@ func NewPaths(paths *v2low.Paths) *Paths {
pathItems.Set(result.key, result.result)
return nil
}
- _ = orderedmap.TranslateMapParallel[low.KeyReference[string], low.ValueReference[*v2low.PathItem], asyncResult[*PathItem]](
+ _ = datamodel.TranslateMapParallel[low.KeyReference[string], low.ValueReference[*v2low.PathItem], asyncResult[*PathItem]](
paths.PathItems, translateFunc, resultFunc,
)
p.PathItems = pathItems
diff --git a/datamodel/high/v2/swagger_test.go b/datamodel/high/v2/swagger_test.go
index 75aefc9..935a8aa 100644
--- a/datamodel/high/v2/swagger_test.go
+++ b/datamodel/high/v2/swagger_test.go
@@ -4,22 +4,23 @@
package v2
import (
+ "os"
+
"github.com/pb33f/libopenapi/datamodel"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/orderedmap"
"github.com/stretchr/testify/assert"
- "io/ioutil"
"testing"
)
var doc *v2.Swagger
func initTest() {
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev2-complete.yaml")
+ data, _ := os.ReadFile("../../../test_specs/petstorev2-complete.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- doc, err = v2.CreateDocument(info)
+ var err error
+ doc, err = v2.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
if err != nil {
panic("broken something")
}
diff --git a/datamodel/high/v3/callback_test.go b/datamodel/high/v3/callback_test.go
index e00f1bc..64976e3 100644
--- a/datamodel/high/v3/callback_test.go
+++ b/datamodel/high/v3/callback_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -67,7 +68,7 @@ func TestCallback_MarshalYAML(t *testing.T) {
var n v3.Callback
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewCallback(&n)
diff --git a/datamodel/high/v3/components.go b/datamodel/high/v3/components.go
index 9836334..044e443 100644
--- a/datamodel/high/v3/components.go
+++ b/datamodel/high/v3/components.go
@@ -6,6 +6,7 @@ package v3
import (
"sync"
+ "github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/high"
highbase "github.com/pb33f/libopenapi/datamodel/high/base"
lowmodel "github.com/pb33f/libopenapi/datamodel/low"
@@ -121,7 +122,7 @@ func buildComponent[IN any, OUT any](inMap orderedmap.Map[lowmodel.KeyReference[
outMap.Set(value.key, value.res)
return nil
}
- _ = orderedmap.TranslateMapParallel(inMap, translateFunc, resultFunc)
+ _ = datamodel.TranslateMapParallel(inMap, translateFunc, resultFunc)
}
// buildSchema builds a schema from low level structs.
@@ -139,7 +140,7 @@ func buildSchema(inMap orderedmap.Map[lowmodel.KeyReference[string], lowmodel.Va
outMap.Set(value.key, value.res)
return nil
}
- _ = orderedmap.TranslateMapParallel(inMap, translateFunc, resultFunc)
+ _ = datamodel.TranslateMapParallel(inMap, translateFunc, resultFunc)
}
// GoLow returns the low-level Components instance used to create the high-level one.
diff --git a/datamodel/high/v3/components_test.go b/datamodel/high/v3/components_test.go
index b925c41..2f15822 100644
--- a/datamodel/high/v3/components_test.go
+++ b/datamodel/high/v3/components_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -48,7 +49,7 @@ func TestComponents_MarshalYAML(t *testing.T) {
var n v3.Components
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), idxNode.Content[0], idx)
r := NewComponents(&n)
diff --git a/datamodel/high/v3/document.go b/datamodel/high/v3/document.go
index d3806d5..30a0e42 100644
--- a/datamodel/high/v3/document.go
+++ b/datamodel/high/v3/document.go
@@ -91,7 +91,11 @@ type Document struct {
//
// This property is not a part of the OpenAPI schema, this is custom to libopenapi.
Index *index.SpecIndex `json:"-" yaml:"-"`
- low *low.Document
+
+ // Rolodex is the low-level rolodex used when creating this document.
+ // This in an internal structure and not part of the OpenAPI schema.
+ Rolodex *index.Rolodex `json:"-" yaml:"-"`
+ low *low.Document
}
// NewDocument will create a new high-level Document from a low-level one.
diff --git a/datamodel/high/v3/document_test.go b/datamodel/high/v3/document_test.go
index 43eb027..6113faf 100644
--- a/datamodel/high/v3/document_test.go
+++ b/datamodel/high/v3/document_test.go
@@ -5,17 +5,25 @@ package v3
import (
"fmt"
+ "log"
+ "log/slog"
+ "net/http"
"net/url"
"os"
+ "os/exec"
+ "path/filepath"
"strings"
"testing"
+ "time"
"github.com/pb33f/libopenapi/datamodel"
v2 "github.com/pb33f/libopenapi/datamodel/high/v2"
lowv2 "github.com/pb33f/libopenapi/datamodel/low/v2"
lowv3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/pb33f/libopenapi/orderedmap"
+ "github.com/pb33f/libopenapi/utils"
"github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
)
var lowDoc *lowv3.Document
@@ -23,7 +31,7 @@ var lowDoc *lowv3.Document
func initTest() {
data, _ := os.ReadFile("../../../test_specs/burgershop.openapi.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
lowDoc, err = lowv3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
AllowFileReferences: true,
AllowRemoteReferences: true,
@@ -221,7 +229,7 @@ func TestNewDocument_Components_Schemas(t *testing.T) {
d := h.Components.Schemas.GetOrZero("Drink")
assert.Len(t, d.Schema().Required, 2)
- assert.True(t, d.Schema().AdditionalProperties.(bool))
+ assert.True(t, d.Schema().AdditionalProperties.B)
assert.Equal(t, "drinkType", d.Schema().Discriminator.PropertyName)
assert.Equal(t, "some value", d.Schema().Discriminator.Mapping["drink"])
assert.Equal(t, 516, d.Schema().Discriminator.GoLow().PropertyName.ValueNode.Line)
@@ -378,15 +386,14 @@ func testBurgerShop(t *testing.T, h *Document, checkLines bool) {
assert.Equal(t, 310, okResp.Links.GetOrZero("LocateBurger").GoLow().OperationId.ValueNode.Line)
assert.Equal(t, 118, burgersOp.Post.Security[0].GoLow().Requirements.ValueNode.Line)
}
-
}
func TestStripeAsDoc(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/stripe.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
- assert.Len(t, err, 3)
+ var err error
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
+ assert.Len(t, utils.UnwrapErrors(err), 3)
d := NewDocument(lowDoc)
assert.NotNil(t, d)
}
@@ -394,18 +401,18 @@ func TestStripeAsDoc(t *testing.T) {
func TestK8sAsDoc(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/k8s.json")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- lowSwag, err := lowv2.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ var err error
+ lowSwag, err := lowv2.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
d := v2.NewSwaggerDocument(lowSwag)
- assert.Len(t, err, 0)
+ assert.Len(t, utils.UnwrapErrors(err), 0)
assert.NotNil(t, d)
}
func TestAsanaAsDoc(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/asana.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ var err error
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
if err != nil {
panic("broken something")
}
@@ -414,10 +421,53 @@ func TestAsanaAsDoc(t *testing.T) {
assert.Equal(t, 118, orderedmap.Len(d.Paths.PathItems))
}
+func TestDigitalOceanAsDocViaCheckout(t *testing.T) {
+ // this is a full checkout of the digitalocean API repo.
+ tmp, _ := os.MkdirTemp("", "openapi")
+ cmd := exec.Command("git", "clone", "https://github.com/digitalocean/openapi", tmp)
+ defer os.RemoveAll(filepath.Join(tmp, "openapi"))
+
+ err := cmd.Run()
+ if err != nil {
+ log.Fatalf("cmd.Run() failed with %s\n", err)
+ }
+
+ spec, _ := filepath.Abs(filepath.Join(tmp, "specification", "DigitalOcean-public.v2.yaml"))
+ doLocal, _ := os.ReadFile(spec)
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(doLocal, &rootNode)
+
+ basePath := filepath.Join(tmp, "specification")
+
+ data, _ := os.ReadFile("../../../test_specs/digitalocean.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ config := datamodel.DocumentConfiguration{
+ AllowFileReferences: true,
+ AllowRemoteReferences: true,
+ BasePath: basePath,
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ })),
+ }
+
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, &config)
+ if err != nil {
+ er := utils.UnwrapErrors(err)
+ for e := range er {
+ fmt.Println(er[e])
+ }
+ }
+ d := NewDocument(lowDoc)
+ assert.NotNil(t, d)
+ assert.Equal(t, 183, d.Paths.PathItems.Len())
+}
+
func TestDigitalOceanAsDocFromSHA(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/digitalocean.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/82e1d558e15a59edc1d47d2c5544e7138f5b3cbf/specification")
config := datamodel.DocumentConfiguration{
@@ -426,24 +476,68 @@ func TestDigitalOceanAsDocFromSHA(t *testing.T) {
BaseURL: baseURL,
}
+ if os.Getenv("GH_PAT") != "" {
+ client := &http.Client{
+ Timeout: time.Second * 60,
+ }
+ config.RemoteURLHandler = func(url string) (*http.Response, error) {
+ request, _ := http.NewRequest(http.MethodGet, url, nil)
+ request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("GH_PAT")))
+ return client.Do(request)
+ }
+ }
+
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, &config)
+ assert.Len(t, utils.UnwrapErrors(err), 3) // there are 3 404's in this release of the API.
+ d := NewDocument(lowDoc)
+ assert.NotNil(t, d)
+ assert.Equal(t, 183, d.Paths.PathItems.Len())
+}
+
+func TestDigitalOceanAsDocFromMain(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/digitalocean.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+ var err error
+
+ baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
+ config := datamodel.DocumentConfiguration{
+ AllowFileReferences: true,
+ AllowRemoteReferences: true,
+ BaseURL: baseURL,
+ }
+
+ config.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ if os.Getenv("GH_PAT") != "" {
+ client := &http.Client{
+ Timeout: time.Second * 60,
+ }
+ config.RemoteURLHandler = func(url string) (*http.Response, error) {
+ request, _ := http.NewRequest(http.MethodGet, url, nil)
+ request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("GITHUB_TOKEN")))
+ return client.Do(request)
+ }
+ }
+
lowDoc, err = lowv3.CreateDocumentFromConfig(info, &config)
if err != nil {
- for e := range err {
- fmt.Println(err[e])
+ er := utils.UnwrapErrors(err)
+ for e := range er {
+ fmt.Printf("Reported Error: %s\n", er[e])
}
- panic("broken something")
}
d := NewDocument(lowDoc)
assert.NotNil(t, d)
assert.Equal(t, 183, orderedmap.Len(d.Paths.PathItems))
-
}
func TestPetstoreAsDoc(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ var err error
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
if err != nil {
panic("broken something")
}
@@ -455,16 +549,15 @@ func TestPetstoreAsDoc(t *testing.T) {
func TestCircularReferencesDoc(t *testing.T) {
data, _ := os.ReadFile("../../../test_specs/circular-tests.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
- assert.Len(t, err, 3)
- d := NewDocument(lowDoc)
+
+ lDoc, err := lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
+ assert.Len(t, utils.UnwrapErrors(err), 3)
+ d := NewDocument(lDoc)
assert.Len(t, d.Components.Schemas, 9)
assert.Len(t, d.Index.GetCircularReferences(), 3)
}
func TestDocument_MarshalYAML(t *testing.T) {
-
// create a new document
initTest()
h := NewDocument(lowDoc)
@@ -473,20 +566,18 @@ func TestDocument_MarshalYAML(t *testing.T) {
r, _ := h.Render()
info, _ := datamodel.ExtractSpecInfo(r)
- lDoc, e := lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lDoc, e := lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
assert.Nil(t, e)
highDoc := NewDocument(lDoc)
testBurgerShop(t, highDoc, false)
-
}
func TestDocument_MarshalIndention(t *testing.T) {
-
data, _ := os.ReadFile("../../../test_specs/single-definition.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
highDoc := NewDocument(lowDoc)
rendered := highDoc.RenderWithIndention(2)
@@ -496,15 +587,13 @@ func TestDocument_MarshalIndention(t *testing.T) {
rendered = highDoc.RenderWithIndention(4)
assert.NotEqual(t, string(data), strings.TrimSpace(string(rendered)))
-
}
func TestDocument_MarshalIndention_Error(t *testing.T) {
-
data, _ := os.ReadFile("../../../test_specs/single-definition.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
highDoc := NewDocument(lowDoc)
rendered := highDoc.RenderWithIndention(2)
@@ -514,15 +603,13 @@ func TestDocument_MarshalIndention_Error(t *testing.T) {
rendered = highDoc.RenderWithIndention(4)
assert.NotEqual(t, string(data), strings.TrimSpace(string(rendered)))
-
}
func TestDocument_MarshalJSON(t *testing.T) {
-
data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
info, _ := datamodel.ExtractSpecInfo(data)
- lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
highDoc := NewDocument(lowDoc)
@@ -530,7 +617,7 @@ func TestDocument_MarshalJSON(t *testing.T) {
// now read back in the JSON
info, _ = datamodel.ExtractSpecInfo(rendered)
- lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lowDoc, _ = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
newDoc := NewDocument(lowDoc)
assert.Equal(t, orderedmap.Len(newDoc.Paths.PathItems), orderedmap.Len(highDoc.Paths.PathItems))
@@ -538,7 +625,6 @@ func TestDocument_MarshalJSON(t *testing.T) {
}
func TestDocument_MarshalYAMLInline(t *testing.T) {
-
// create a new document
initTest()
h := NewDocument(lowDoc)
@@ -547,16 +633,14 @@ func TestDocument_MarshalYAMLInline(t *testing.T) {
r, _ := h.RenderInline()
info, _ := datamodel.ExtractSpecInfo(r)
- lDoc, e := lowv3.CreateDocumentFromConfig(info, datamodel.NewOpenDocumentConfiguration())
+ lDoc, e := lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
assert.Nil(t, e)
highDoc := NewDocument(lDoc)
testBurgerShop(t, highDoc, false)
-
}
func TestDocument_MarshalYAML_TestRefs(t *testing.T) {
-
// create a new document
yml := `openapi: 3.1.0
paths:
@@ -617,7 +701,7 @@ components:
numPatties: 1`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
+ var err error
lowDoc, err = lowv3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
AllowFileReferences: true,
AllowRemoteReferences: true,
@@ -634,7 +718,6 @@ components:
}
func TestDocument_MarshalYAML_TestParamRefs(t *testing.T) {
-
// create a new document
yml := `openapi: 3.1.0
paths:
@@ -671,7 +754,7 @@ components:
required: true`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
+ var err error
lowDoc, err = lowv3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
AllowFileReferences: true,
AllowRemoteReferences: true,
@@ -687,7 +770,6 @@ components:
}
func TestDocument_MarshalYAML_TestModifySchemas(t *testing.T) {
-
// create a new document
yml := `openapi: 3.1.0
components:
@@ -700,7 +782,7 @@ components:
`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
+ var err error
lowDoc, err = lowv3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
AllowFileReferences: true,
AllowRemoteReferences: true,
diff --git a/datamodel/high/v3/media_type_test.go b/datamodel/high/v3/media_type_test.go
index 21f2e5f..3e8f91c 100644
--- a/datamodel/high/v3/media_type_test.go
+++ b/datamodel/high/v3/media_type_test.go
@@ -4,7 +4,8 @@
package v3
import (
- "io/ioutil"
+ "context"
+ "os"
"strings"
"testing"
@@ -18,9 +19,9 @@ import (
func TestMediaType_MarshalYAMLInline(t *testing.T) {
// load the petstore spec
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev3.json")
+ data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
lowDoc, err = v3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
if err != nil {
panic("broken something")
@@ -108,9 +109,9 @@ example: testing a nice mutation`
func TestMediaType_MarshalYAML(t *testing.T) {
// load the petstore spec
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev3.json")
+ data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
lowDoc, err = v3.CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
if err != nil {
panic("broken something")
@@ -161,7 +162,7 @@ func TestMediaType_Examples(t *testing.T) {
var n v3.MediaType
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewMediaType(&n)
diff --git a/datamodel/high/v3/oauth_flows_test.go b/datamodel/high/v3/oauth_flows_test.go
index 399909e..94e8369 100644
--- a/datamodel/high/v3/oauth_flows_test.go
+++ b/datamodel/high/v3/oauth_flows_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -44,7 +45,7 @@ clientCredentials:
var n v3.OAuthFlows
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewOAuthFlows(&n)
diff --git a/datamodel/high/v3/operation_test.go b/datamodel/high/v3/operation_test.go
index d1982e5..13c98db 100644
--- a/datamodel/high/v3/operation_test.go
+++ b/datamodel/high/v3/operation_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -43,7 +44,7 @@ callbacks:
var n v3.Operation
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewOperation(&n)
@@ -140,7 +141,7 @@ security: []`
var n v3.Operation
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewOperation(&n)
@@ -158,7 +159,7 @@ func TestOperation_NoSecurity(t *testing.T) {
var n v3.Operation
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewOperation(&n)
diff --git a/datamodel/high/v3/package_test.go b/datamodel/high/v3/package_test.go
index 264c6ab..5b2769c 100644
--- a/datamodel/high/v3/package_test.go
+++ b/datamodel/high/v3/package_test.go
@@ -5,7 +5,8 @@ package v3
import (
"fmt"
- "io/ioutil"
+ "github.com/pb33f/libopenapi/utils"
+ "os"
"github.com/pb33f/libopenapi/datamodel"
lowv3 "github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -15,22 +16,19 @@ import (
// An example of how to create a new high-level OpenAPI 3+ document from an OpenAPI specification.
func Example_createHighLevelOpenAPIDocument() {
// Load in an OpenAPI 3+ specification as a byte slice.
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev3.json")
+ data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
// Create a new *datamodel.SpecInfo from bytes.
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
// Create a new low-level Document, capture any errors thrown during creation.
- lowDoc, err = lowv3.CreateDocument(info)
+ lowDoc, err = lowv3.CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
// Get upset if any errors were thrown.
- if len(err) > 0 {
- for i := range err {
- fmt.Printf("error: %e", err[i])
- }
- panic("something went wrong")
+ for i := range utils.UnwrapErrors(err) {
+ fmt.Printf("error: %v", i)
}
// Create a high-level Document from the low-level one.
diff --git a/datamodel/high/v3/path_item_test.go b/datamodel/high/v3/path_item_test.go
index db03254..d70a3ba 100644
--- a/datamodel/high/v3/path_item_test.go
+++ b/datamodel/high/v3/path_item_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -28,7 +29,7 @@ func TestPathItem(t *testing.T) {
var n v3.PathItem
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewPathItem(&n)
@@ -62,7 +63,7 @@ trace:
var n v3.PathItem
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewPathItem(&n)
diff --git a/datamodel/high/v3/paths.go b/datamodel/high/v3/paths.go
index 696ba17..08156cc 100644
--- a/datamodel/high/v3/paths.go
+++ b/datamodel/high/v3/paths.go
@@ -6,6 +6,7 @@ package v3
import (
"sort"
+ "github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/high"
"github.com/pb33f/libopenapi/datamodel/low"
v3low "github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -45,7 +46,7 @@ func NewPaths(paths *v3low.Paths) *Paths {
items.Set(value.key, value.value)
return nil
}
- _ = orderedmap.TranslateMapParallel[low.KeyReference[string], low.ValueReference[*v3low.PathItem], pathItemResult](
+ _ = datamodel.TranslateMapParallel[low.KeyReference[string], low.ValueReference[*v3low.PathItem], pathItemResult](
paths.PathItems, translateFunc, resultFunc,
)
p.PathItems = items
diff --git a/datamodel/high/v3/paths_test.go b/datamodel/high/v3/paths_test.go
index 0f47bd7..eb3cd20 100644
--- a/datamodel/high/v3/paths_test.go
+++ b/datamodel/high/v3/paths_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -37,7 +38,7 @@ func TestPaths_MarshalYAML(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
high := NewPaths(&n)
@@ -89,7 +90,7 @@ func TestPaths_MarshalYAMLInline(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
high := NewPaths(&n)
diff --git a/datamodel/high/v3/response_test.go b/datamodel/high/v3/response_test.go
index 738dc56..42e9950 100644
--- a/datamodel/high/v3/response_test.go
+++ b/datamodel/high/v3/response_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -39,7 +40,7 @@ links:
var n v3.Response
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponse(&n)
@@ -70,7 +71,7 @@ links:
var n v3.Response
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponse(&n)
@@ -98,7 +99,7 @@ links:
var n v3.Response
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponse(&n)
diff --git a/datamodel/high/v3/responses.go b/datamodel/high/v3/responses.go
index 5a0fafb..e6bc695 100644
--- a/datamodel/high/v3/responses.go
+++ b/datamodel/high/v3/responses.go
@@ -7,6 +7,7 @@ import (
"fmt"
"sort"
+ "github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/high"
lowbase "github.com/pb33f/libopenapi/datamodel/low"
low "github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -47,14 +48,9 @@ func NewResponses(responses *low.Responses) *Responses {
}
codes := orderedmap.New[string, *Response]()
- type respRes struct {
- code string
- resp *Response
- }
-
translateFunc := func(pair orderedmap.Pair[lowbase.KeyReference[string], lowbase.ValueReference[*low.Response]]) (asyncResult[*Response], error) {
return asyncResult[*Response]{
- key: pair.Key().Value,
+ key: pair.Key().Value,
result: NewResponse(pair.Value().Value),
}, nil
}
@@ -62,7 +58,7 @@ func NewResponses(responses *low.Responses) *Responses {
codes.Set(value.key, value.result)
return nil
}
- _ = orderedmap.TranslateMapParallel[lowbase.KeyReference[string], lowbase.ValueReference[*low.Response], asyncResult[*Response]](responses.Codes, translateFunc, resultFunc)
+ _ = datamodel.TranslateMapParallel[lowbase.KeyReference[string], lowbase.ValueReference[*low.Response]](responses.Codes, translateFunc, resultFunc)
r.Codes = codes
return r
}
@@ -126,8 +122,10 @@ func (r *Responses) MarshalYAML() (interface{}, error) {
label = extNode.Content[u].Value
continue
}
- mapped = append(mapped, &responseItem{nil, label,
- extNode.Content[u].Line, extNode.Content[u]})
+ mapped = append(mapped, &responseItem{
+ nil, label,
+ extNode.Content[u].Line, extNode.Content[u],
+ })
}
}
@@ -183,8 +181,10 @@ func (r *Responses) MarshalYAMLInline() (interface{}, error) {
label = extNode.Content[u].Value
continue
}
- mapped = append(mapped, &responseItem{nil, label,
- extNode.Content[u].Line, extNode.Content[u]})
+ mapped = append(mapped, &responseItem{
+ nil, label,
+ extNode.Content[u].Line, extNode.Content[u],
+ })
}
}
diff --git a/datamodel/high/v3/responses_test.go b/datamodel/high/v3/responses_test.go
index 8bee83c..09042a5 100644
--- a/datamodel/high/v3/responses_test.go
+++ b/datamodel/high/v3/responses_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"strings"
"testing"
@@ -30,7 +31,7 @@ func TestNewResponses(t *testing.T) {
var n v3.Responses
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponses(&n)
@@ -60,7 +61,7 @@ func TestResponses_MarshalYAML(t *testing.T) {
var n v3.Responses
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponses(&n)
@@ -90,7 +91,7 @@ func TestResponses_MarshalYAMLInline(t *testing.T) {
var n v3.Responses
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewResponses(&n)
diff --git a/datamodel/high/v3/security_scheme_test.go b/datamodel/high/v3/security_scheme_test.go
index 18b35d4..c26addf 100644
--- a/datamodel/high/v3/security_scheme_test.go
+++ b/datamodel/high/v3/security_scheme_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/pb33f/libopenapi/index"
@@ -31,7 +32,7 @@ func TestSecurityScheme_MarshalYAML(t *testing.T) {
var n v3.SecurityScheme
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
r := NewSecurityScheme(&n)
diff --git a/datamodel/low/base/contact.go b/datamodel/low/base/contact.go
index d612305..c895820 100644
--- a/datamodel/low/base/contact.go
+++ b/datamodel/low/base/contact.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
@@ -23,7 +24,7 @@ type Contact struct {
}
// Build is not implemented for Contact (there is nothing to build).
-func (c *Contact) Build(_, _ *yaml.Node, _ *index.SpecIndex) error {
+func (c *Contact) Build(_ context.Context, _, _ *yaml.Node, _ *index.SpecIndex) error {
c.Reference = new(low.Reference)
// not implemented.
return nil
diff --git a/datamodel/low/base/example.go b/datamodel/low/base/example.go
index 7abef0c..4adfcd6 100644
--- a/datamodel/low/base/example.go
+++ b/datamodel/low/base/example.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -61,7 +62,7 @@ func (ex *Example) Hash() [32]byte {
}
// Build extracts extensions and example value
-func (ex *Example) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (ex *Example) Build(_ context.Context, _, root *yaml.Node, _ *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
ex.Reference = new(low.Reference)
diff --git a/datamodel/low/base/example_test.go b/datamodel/low/base/example_test.go
index d19bee1..912172f 100644
--- a/datamodel/low/base/example_test.go
+++ b/datamodel/low/base/example_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -26,7 +27,7 @@ x-cake: hot`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot", n.Summary.Value)
assert.Equal(t, "cakes", n.Description.Value)
@@ -52,7 +53,7 @@ x-cake: hot`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot", n.Summary.Value)
assert.Equal(t, "cakes", n.Description.Value)
@@ -79,7 +80,7 @@ value:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot", n.Summary.Value)
assert.Equal(t, "cakes", n.Description.Value)
@@ -110,7 +111,7 @@ value:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot", n.Summary.Value)
assert.Equal(t, "cakes", n.Description.Value)
@@ -142,7 +143,7 @@ func TestExample_Build_Success_MergeNode(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot", n.Summary.Value)
assert.Equal(t, "cakes", n.Description.Value)
@@ -237,8 +238,8 @@ x-burger: nice`
var rDoc Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
assert.Equal(t, lDoc.Hash(), rDoc.Hash())
assert.Len(t, lDoc.GetExtensions(), 1)
diff --git a/datamodel/low/base/external_doc.go b/datamodel/low/base/external_doc.go
index 652145b..a4056a7 100644
--- a/datamodel/low/base/external_doc.go
+++ b/datamodel/low/base/external_doc.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -33,7 +34,7 @@ func (ex *ExternalDoc) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions from the ExternalDoc instance.
-func (ex *ExternalDoc) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (ex *ExternalDoc) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
ex.Reference = new(low.Reference)
diff --git a/datamodel/low/base/external_doc_test.go b/datamodel/low/base/external_doc_test.go
index fadd21a..1f48896 100644
--- a/datamodel/low/base/external_doc_test.go
+++ b/datamodel/low/base/external_doc_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -23,7 +24,7 @@ func TestExternalDoc_FindExtension(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "cake", n.FindExtension("x-fish").Value)
@@ -44,7 +45,7 @@ x-b33f: princess`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "https://pb33f.io", n.URL.Value)
assert.Equal(t, "the ranch", n.Description.Value)
@@ -73,8 +74,8 @@ description: the ranch`
var rDoc ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
assert.Equal(t, lDoc.Hash(), rDoc.Hash())
assert.Len(t, lDoc.GetExtensions(), 1)
diff --git a/datamodel/low/base/info.go b/datamodel/low/base/info.go
index caf1c75..f7440d6 100644
--- a/datamodel/low/base/info.go
+++ b/datamodel/low/base/info.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/utils"
@@ -45,18 +46,18 @@ func (i *Info) GetExtensions() map[low.KeyReference[string]]low.ValueReference[a
}
// Build will extract out the Contact and Info objects from the supplied root node.
-func (i *Info) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (i *Info) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
i.Reference = new(low.Reference)
i.Extensions = low.ExtractExtensions(root)
// extract contact
- contact, _ := low.ExtractObject[*Contact](ContactLabel, root, idx)
+ contact, _ := low.ExtractObject[*Contact](ctx, ContactLabel, root, idx)
i.Contact = contact
// extract license
- lic, _ := low.ExtractObject[*License](LicenseLabel, root, idx)
+ lic, _ := low.ExtractObject[*License](ctx, LicenseLabel, root, idx)
i.License = lic
return nil
}
diff --git a/datamodel/low/base/info_test.go b/datamodel/low/base/info_test.go
index adc7378..c469a9f 100644
--- a/datamodel/low/base/info_test.go
+++ b/datamodel/low/base/info_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -34,7 +35,7 @@ x-cli-name: pizza cli`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "pizza", n.Title.Value)
@@ -61,13 +62,13 @@ x-cli-name: pizza cli`
func TestContact_Build(t *testing.T) {
n := &Contact{}
- k := n.Build(nil, nil, nil)
+ k := n.Build(context.Background(), nil, nil, nil)
assert.Nil(t, k)
}
func TestLicense_Build(t *testing.T) {
n := &License{}
- k := n.Build(nil, nil, nil)
+ k := n.Build(context.Background(), nil, nil, nil)
assert.Nil(t, k)
}
@@ -107,8 +108,8 @@ x-b33f: princess`
var rDoc Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
assert.Equal(t, lDoc.Hash(), rDoc.Hash())
}
diff --git a/datamodel/low/base/license.go b/datamodel/low/base/license.go
index c543875..aa5903b 100644
--- a/datamodel/low/base/license.go
+++ b/datamodel/low/base/license.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -25,7 +26,7 @@ type License struct {
}
// Build out a license, complain if both a URL and identifier are present as they are mutually exclusive
-func (l *License) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (l *License) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
l.Reference = new(low.Reference)
diff --git a/datamodel/low/base/license_test.go b/datamodel/low/base/license_test.go
index 9d29326..173825b 100644
--- a/datamodel/low/base/license_test.go
+++ b/datamodel/low/base/license_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
@@ -70,7 +71,7 @@ description: the ranch`
var lDoc License
err := low.BuildModel(lNode.Content[0], &lDoc)
- err = lDoc.Build(nil, lNode.Content[0], nil)
+ err = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
assert.Error(t, err)
assert.Equal(t, "license cannot have both a URL and an identifier, they are mutually exclusive", err.Error())
diff --git a/datamodel/low/base/schema.go b/datamodel/low/base/schema.go
index f2ee94d..418d78e 100644
--- a/datamodel/low/base/schema.go
+++ b/datamodel/low/base/schema.go
@@ -1,9 +1,9 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
- "reflect"
"sort"
"strconv"
"strings"
@@ -101,7 +101,7 @@ type Schema struct {
PatternProperties low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[*SchemaProxy]]]
PropertyNames low.NodeReference[*SchemaProxy]
UnevaluatedItems low.NodeReference[*SchemaProxy]
- UnevaluatedProperties low.NodeReference[*SchemaDynamicValue[*SchemaProxy, *bool]]
+ UnevaluatedProperties low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]
Anchor low.NodeReference[string]
// Compatible with all versions
@@ -122,7 +122,7 @@ type Schema struct {
Enum low.NodeReference[[]low.ValueReference[any]]
Not low.NodeReference[*SchemaProxy]
Properties low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[*SchemaProxy]]]
- AdditionalProperties low.NodeReference[any]
+ AdditionalProperties low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]
Description low.NodeReference[string]
ContentEncoding low.NodeReference[string]
ContentMediaType low.NodeReference[string]
@@ -139,6 +139,9 @@ type Schema struct {
// Parent Proxy refers back to the low level SchemaProxy that is proxying this schema.
ParentProxy *SchemaProxy
+
+ // Index is a reference to the SpecIndex that was used to build this schema.
+ Index *index.SpecIndex
*low.Reference
}
@@ -190,53 +193,7 @@ func (s *Schema) Hash() [32]byte {
d = append(d, fmt.Sprint(s.MinProperties.Value))
}
if !s.AdditionalProperties.IsEmpty() {
-
- // check type of properties, if we have a low level map, we need to hash the values in a repeatable
- // order.
- to := reflect.TypeOf(s.AdditionalProperties.Value)
- vo := reflect.ValueOf(s.AdditionalProperties.Value)
- var values []string
- switch to.Kind() {
- case reflect.Slice:
- for i := 0; i < vo.Len(); i++ {
- vn := vo.Index(i).Interface()
-
- if jh, ok := vn.(low.HasValueUnTyped); ok {
- vn = jh.GetValueUntyped()
- fg := reflect.TypeOf(vn)
- gf := reflect.ValueOf(vn)
-
- if fg.Kind() == reflect.Map {
- for _, ky := range gf.MapKeys() {
- hu := ky.Interface()
- values = append(values, fmt.Sprintf("%s:%s", hu, low.GenerateHashString(gf.MapIndex(ky).Interface())))
- }
- continue
- }
- values = append(values, fmt.Sprintf("%d:%s", i, low.GenerateHashString(vn)))
- }
- }
- sort.Strings(values)
- d = append(d, strings.Join(values, "||"))
-
- case reflect.Map:
- for _, k := range vo.MapKeys() {
- var x string
- var l int
- var v any
- // extract key
- if o, ok := k.Interface().(low.HasKeyNode); ok {
- x = o.GetKeyNode().Value
- l = o.GetKeyNode().Line
- v = vo.MapIndex(k).Interface().(low.HasValueNodeUntyped).GetValueNode().Value
- }
- values = append(values, fmt.Sprintf("%d:%s:%s", l, x, low.GenerateHashString(v)))
- }
- sort.Strings(values)
- d = append(d, strings.Join(values, "||"))
- default:
- d = append(d, low.GenerateHashString(s.AdditionalProperties.Value))
- }
+ d = append(d, low.GenerateHashString(s.AdditionalProperties.Value))
}
if !s.Description.IsEmpty() {
d = append(d, fmt.Sprint(s.Description.Value))
@@ -535,12 +492,13 @@ func (s *Schema) GetExtensions() map[low.KeyReference[string]]low.ValueReference
// - UnevaluatedItems
// - UnevaluatedProperties
// - Anchor
-func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
+func (s *Schema) Build(ctx context.Context, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
s.Reference = new(low.Reference)
+ s.Index = idx
if h, _, _ := utils.IsNodeRefValue(root); h {
- ref, err := low.LocateRefNode(root, idx)
+ ref, _, err := low.LocateRefNode(root, idx)
if ref != nil {
root = ref
if err != nil {
@@ -591,20 +549,43 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
// determine exclusive minimum type, bool (3.0) or int (3.1)
_, exMinLabel, exMinValue := utils.FindKeyNodeFullTop(ExclusiveMinimumLabel, root.Content)
if exMinValue != nil {
- if utils.IsNodeBoolValue(exMinValue) {
- val, _ := strconv.ParseBool(exMinValue.Value)
- s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
- KeyNode: exMinLabel,
- ValueNode: exMinValue,
- Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+
+ // if there is an index, determine if this a 3.0 or 3.1 schema
+ if idx != nil {
+ if idx.GetConfig().SpecInfo.VersionNumeric == 3.1 {
+ val, _ := strconv.ParseFloat(exMinValue.Value, 64)
+ s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMinLabel,
+ ValueNode: exMinValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ }
}
- }
- if utils.IsNodeIntValue(exMinValue) {
- val, _ := strconv.ParseFloat(exMinValue.Value, 64)
- s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
- KeyNode: exMinLabel,
- ValueNode: exMinValue,
- Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ if idx.GetConfig().SpecInfo.VersionNumeric <= 3.0 {
+ val, _ := strconv.ParseBool(exMinValue.Value)
+ s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMinLabel,
+ ValueNode: exMinValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+ }
+ }
+ } else {
+
+ // there is no index, so we have to determine the type based on the value
+ if utils.IsNodeBoolValue(exMinValue) {
+ val, _ := strconv.ParseBool(exMinValue.Value)
+ s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMinLabel,
+ ValueNode: exMinValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+ }
+ }
+ if utils.IsNodeIntValue(exMinValue) {
+ val, _ := strconv.ParseFloat(exMinValue.Value, 64)
+ s.ExclusiveMinimum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMinLabel,
+ ValueNode: exMinValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ }
}
}
}
@@ -612,20 +593,43 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
// determine exclusive maximum type, bool (3.0) or int (3.1)
_, exMaxLabel, exMaxValue := utils.FindKeyNodeFullTop(ExclusiveMaximumLabel, root.Content)
if exMaxValue != nil {
- if utils.IsNodeBoolValue(exMaxValue) {
- val, _ := strconv.ParseBool(exMaxValue.Value)
- s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
- KeyNode: exMaxLabel,
- ValueNode: exMaxValue,
- Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+
+ // if there is an index, determine if this a 3.0 or 3.1 schema
+ if idx != nil {
+ if idx.GetConfig().SpecInfo.VersionNumeric == 3.1 {
+ val, _ := strconv.ParseFloat(exMaxValue.Value, 64)
+ s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMaxLabel,
+ ValueNode: exMaxValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ }
}
- }
- if utils.IsNodeIntValue(exMaxValue) {
- val, _ := strconv.ParseFloat(exMaxValue.Value, 64)
- s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
- KeyNode: exMaxLabel,
- ValueNode: exMaxValue,
- Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ if idx.GetConfig().SpecInfo.VersionNumeric <= 3.0 {
+ val, _ := strconv.ParseBool(exMaxValue.Value)
+ s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMaxLabel,
+ ValueNode: exMaxValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+ }
+ }
+ } else {
+
+ // there is no index, so we have to determine the type based on the value
+ if utils.IsNodeBoolValue(exMaxValue) {
+ val, _ := strconv.ParseBool(exMaxValue.Value)
+ s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMaxLabel,
+ ValueNode: exMaxValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 0, A: val},
+ }
+ }
+ if utils.IsNodeIntValue(exMaxValue) {
+ val, _ := strconv.ParseFloat(exMaxValue.Value, 64)
+ s.ExclusiveMaximum = low.NodeReference[*SchemaDynamicValue[bool, float64]]{
+ KeyNode: exMaxLabel,
+ ValueNode: exMaxValue,
+ Value: &SchemaDynamicValue[bool, float64]{N: 1, B: val},
+ }
}
}
}
@@ -668,77 +672,24 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
}
- _, addPLabel, addPNode := utils.FindKeyNodeFullTop(AdditionalPropertiesLabel, root.Content)
- if addPNode != nil {
- if utils.IsNodeMap(addPNode) || utils.IsNodeArray(addPNode) {
- // check if this is a reference, or an inline schema.
- isRef, _, _ := utils.IsNodeRefValue(addPNode)
- var sp *SchemaProxy
- // now check if this object has a 'type' if so, it's a schema, if not... it's a random
- // object, and we should treat it as a raw map.
- if _, v := utils.FindKeyNodeTop(TypeLabel, addPNode.Content); v != nil {
- sp = &SchemaProxy{
- kn: addPLabel,
- vn: addPNode,
- idx: idx,
- }
- }
- if isRef {
- _, vn := utils.FindKeyNodeTop("$ref", addPNode.Content)
- sp = &SchemaProxy{
- kn: addPLabel,
- vn: addPNode,
- idx: idx,
- isReference: true,
- referenceLookup: vn.Value,
- }
- }
-
- // if this is a reference, or a schema, we're done.
- if sp != nil {
- s.AdditionalProperties = low.NodeReference[any]{Value: sp, KeyNode: addPLabel, ValueNode: addPNode}
- } else {
-
- // if this is a map, collect all the keys and values.
- if utils.IsNodeMap(addPNode) {
-
- addProps := make(map[low.KeyReference[string]]low.ValueReference[any])
- var label string
- for g := range addPNode.Content {
- if g%2 == 0 {
- label = addPNode.Content[g].Value
- continue
- } else {
- addProps[low.KeyReference[string]{Value: label, KeyNode: addPNode.Content[g-1]}] = low.ValueReference[any]{Value: addPNode.Content[g].Value, ValueNode: addPNode.Content[g]}
- }
- }
- s.AdditionalProperties = low.NodeReference[any]{Value: addProps, KeyNode: addPLabel, ValueNode: addPNode}
- }
-
- // if the node is an array, extract everything into a trackable structure
- if utils.IsNodeArray(addPNode) {
- var addProps []low.ValueReference[any]
-
- // if this is an array or maps, encode the map items correctly.
- for i := range addPNode.Content {
- if utils.IsNodeMap(addPNode.Content[i]) {
- var prop map[string]any
- _ = addPNode.Content[i].Decode(&prop)
- addProps = append(addProps,
- low.ValueReference[any]{Value: prop, ValueNode: addPNode.Content[i]})
- } else {
- addProps = append(addProps,
- low.ValueReference[any]{Value: addPNode.Content[i].Value, ValueNode: addPNode.Content[i]})
- }
- }
-
- s.AdditionalProperties = low.NodeReference[any]{Value: addProps, KeyNode: addPLabel, ValueNode: addPNode}
- }
- }
+ // check additionalProperties type for schema or bool
+ addPropsIsBool := false
+ addPropsBoolValue := true
+ _, addPLabel, addPValue := utils.FindKeyNodeFullTop(AdditionalPropertiesLabel, root.Content)
+ if addPValue != nil {
+ if utils.IsNodeBoolValue(addPValue) {
+ addPropsIsBool = true
+ addPropsBoolValue, _ = strconv.ParseBool(addPValue.Value)
}
- if utils.IsNodeBoolValue(addPNode) {
- b, _ := strconv.ParseBool(addPNode.Value)
- s.AdditionalProperties = low.NodeReference[any]{Value: b, KeyNode: addPLabel, ValueNode: addPNode}
+ }
+ if addPropsIsBool {
+ s.AdditionalProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]{
+ Value: &SchemaDynamicValue[*SchemaProxy, bool]{
+ B: addPropsBoolValue,
+ N: 1,
+ },
+ KeyNode: addPLabel,
+ ValueNode: addPValue,
}
}
@@ -755,7 +706,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
if extDocNode != nil {
var exDoc ExternalDoc
_ = low.BuildModel(extDocNode, &exDoc)
- _ = exDoc.Build(extDocLabel, extDocNode, idx) // throws no errors, can't check for one.
+ _ = exDoc.Build(ctx, extDocLabel, extDocNode, idx) // throws no errors, can't check for one.
s.ExternalDocs = low.NodeReference[*ExternalDoc]{Value: &exDoc, KeyNode: extDocLabel, ValueNode: extDocNode}
}
@@ -770,7 +721,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
// handle properties
- props, err := buildPropertyMap(root, idx, PropertiesLabel)
+ props, err := buildPropertyMap(ctx, root, idx, PropertiesLabel)
if err != nil {
return err
}
@@ -779,7 +730,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
// handle dependent schemas
- props, err = buildPropertyMap(root, idx, DependentSchemasLabel)
+ props, err = buildPropertyMap(ctx, root, idx, DependentSchemasLabel)
if err != nil {
return err
}
@@ -788,7 +739,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
// handle pattern properties
- props, err = buildPropertyMap(root, idx, PatternPropertiesLabel)
+ props, err = buildPropertyMap(ctx, root, idx, PatternPropertiesLabel)
if err != nil {
return err
}
@@ -828,9 +779,9 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
}
if unevalIsBool {
- s.UnevaluatedProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, *bool]]{
- Value: &SchemaDynamicValue[*SchemaProxy, *bool]{
- B: &unevalBoolValue,
+ s.UnevaluatedProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]{
+ Value: &SchemaDynamicValue[*SchemaProxy, bool]{
+ B: unevalBoolValue,
N: 1,
},
KeyNode: unevalLabel,
@@ -839,7 +790,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
var allOf, anyOf, oneOf, prefixItems []low.ValueReference[*SchemaProxy]
- var items, not, contains, sif, selse, sthen, propertyNames, unevalItems, unevalProperties low.ValueReference[*SchemaProxy]
+ var items, not, contains, sif, selse, sthen, propertyNames, unevalItems, unevalProperties, addProperties low.ValueReference[*SchemaProxy]
_, allOfLabel, allOfValue := utils.FindKeyNodeFullTop(AllOfLabel, root.Content)
_, anyOfLabel, anyOfValue := utils.FindKeyNodeFullTop(AnyOfLabel, root.Content)
@@ -853,6 +804,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
_, propNamesLabel, propNamesValue := utils.FindKeyNodeFullTop(PropertyNamesLabel, root.Content)
_, unevalItemsLabel, unevalItemsValue := utils.FindKeyNodeFullTop(UnevaluatedItemsLabel, root.Content)
_, unevalPropsLabel, unevalPropsValue := utils.FindKeyNodeFullTop(UnevaluatedPropertiesLabel, root.Content)
+ _, addPropsLabel, addPropsValue := utils.FindKeyNodeFullTop(AdditionalPropertiesLabel, root.Content)
errorChan := make(chan error)
allOfChan := make(chan schemaProxyBuildResult)
@@ -868,6 +820,7 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
propNamesChan := make(chan schemaProxyBuildResult)
unevalItemsChan := make(chan schemaProxyBuildResult)
unevalPropsChan := make(chan schemaProxyBuildResult)
+ addPropsChan := make(chan schemaProxyBuildResult)
totalBuilds := countSubSchemaItems(allOfValue) +
countSubSchemaItems(anyOfValue) +
@@ -875,52 +828,56 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
countSubSchemaItems(prefixItemsValue)
if allOfValue != nil {
- go buildSchema(allOfChan, allOfLabel, allOfValue, errorChan, idx)
+ go buildSchema(ctx, allOfChan, allOfLabel, allOfValue, errorChan, idx)
}
if anyOfValue != nil {
- go buildSchema(anyOfChan, anyOfLabel, anyOfValue, errorChan, idx)
+ go buildSchema(ctx, anyOfChan, anyOfLabel, anyOfValue, errorChan, idx)
}
if oneOfValue != nil {
- go buildSchema(oneOfChan, oneOfLabel, oneOfValue, errorChan, idx)
+ go buildSchema(ctx, oneOfChan, oneOfLabel, oneOfValue, errorChan, idx)
}
if prefixItemsValue != nil {
- go buildSchema(prefixItemsChan, prefixItemsLabel, prefixItemsValue, errorChan, idx)
+ go buildSchema(ctx, prefixItemsChan, prefixItemsLabel, prefixItemsValue, errorChan, idx)
}
if notValue != nil {
totalBuilds++
- go buildSchema(notChan, notLabel, notValue, errorChan, idx)
+ go buildSchema(ctx, notChan, notLabel, notValue, errorChan, idx)
}
if containsValue != nil {
totalBuilds++
- go buildSchema(containsChan, containsLabel, containsValue, errorChan, idx)
+ go buildSchema(ctx, containsChan, containsLabel, containsValue, errorChan, idx)
}
if !itemsIsBool && itemsValue != nil {
totalBuilds++
- go buildSchema(itemsChan, itemsLabel, itemsValue, errorChan, idx)
+ go buildSchema(ctx, itemsChan, itemsLabel, itemsValue, errorChan, idx)
}
if sifValue != nil {
totalBuilds++
- go buildSchema(ifChan, sifLabel, sifValue, errorChan, idx)
+ go buildSchema(ctx, ifChan, sifLabel, sifValue, errorChan, idx)
}
if selseValue != nil {
totalBuilds++
- go buildSchema(elseChan, selseLabel, selseValue, errorChan, idx)
+ go buildSchema(ctx, elseChan, selseLabel, selseValue, errorChan, idx)
}
if sthenValue != nil {
totalBuilds++
- go buildSchema(thenChan, sthenLabel, sthenValue, errorChan, idx)
+ go buildSchema(ctx, thenChan, sthenLabel, sthenValue, errorChan, idx)
}
if propNamesValue != nil {
totalBuilds++
- go buildSchema(propNamesChan, propNamesLabel, propNamesValue, errorChan, idx)
+ go buildSchema(ctx, propNamesChan, propNamesLabel, propNamesValue, errorChan, idx)
}
if unevalItemsValue != nil {
totalBuilds++
- go buildSchema(unevalItemsChan, unevalItemsLabel, unevalItemsValue, errorChan, idx)
+ go buildSchema(ctx, unevalItemsChan, unevalItemsLabel, unevalItemsValue, errorChan, idx)
}
if !unevalIsBool && unevalPropsValue != nil {
totalBuilds++
- go buildSchema(unevalPropsChan, unevalPropsLabel, unevalPropsValue, errorChan, idx)
+ go buildSchema(ctx, unevalPropsChan, unevalPropsLabel, unevalPropsValue, errorChan, idx)
+ }
+ if !addPropsIsBool && addPropsValue != nil {
+ totalBuilds++
+ go buildSchema(ctx, addPropsChan, addPropsLabel, addPropsValue, errorChan, idx)
}
completeCount := 0
@@ -967,6 +924,9 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
case r := <-unevalPropsChan:
completeCount++
unevalProperties = r.v
+ case r := <-addPropsChan:
+ completeCount++
+ addProperties = r.v
}
}
@@ -1057,22 +1017,31 @@ func (s *Schema) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
}
if !unevalIsBool && !unevalProperties.IsEmpty() {
- s.UnevaluatedProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, *bool]]{
- Value: &SchemaDynamicValue[*SchemaProxy, *bool]{
+ s.UnevaluatedProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]{
+ Value: &SchemaDynamicValue[*SchemaProxy, bool]{
A: unevalProperties.Value,
},
KeyNode: unevalPropsLabel,
ValueNode: unevalPropsValue,
}
}
+ if !addPropsIsBool && !addProperties.IsEmpty() {
+ s.AdditionalProperties = low.NodeReference[*SchemaDynamicValue[*SchemaProxy, bool]]{
+ Value: &SchemaDynamicValue[*SchemaProxy, bool]{
+ A: addProperties.Value,
+ },
+ KeyNode: addPropsLabel,
+ ValueNode: addPropsValue,
+ }
+ }
return nil
}
-func buildPropertyMap(root *yaml.Node, idx *index.SpecIndex, label string) (*low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[*SchemaProxy]]], error) {
+func buildPropertyMap(ctx context.Context, root *yaml.Node, idx *index.SpecIndex, label string) (*low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[*SchemaProxy]]], error) {
// for property, build in a new thread!
bChan := make(chan schemaProxyBuildResult)
- buildProperty := func(label *yaml.Node, value *yaml.Node, c chan schemaProxyBuildResult, isRef bool,
+ buildProperty := func(ctx context.Context, label *yaml.Node, value *yaml.Node, c chan schemaProxyBuildResult, isRef bool,
refString string,
) {
c <- schemaProxyBuildResult{
@@ -1081,7 +1050,7 @@ func buildPropertyMap(root *yaml.Node, idx *index.SpecIndex, label string) (*low
Value: label.Value,
},
v: low.ValueReference[*SchemaProxy]{
- Value: &SchemaProxy{kn: label, vn: value, idx: idx, isReference: isRef, referenceLookup: refString},
+ Value: &SchemaProxy{ctx: ctx, kn: label, vn: value, idx: idx, isReference: isRef, referenceLookup: refString},
ValueNode: value,
},
}
@@ -1098,22 +1067,24 @@ func buildPropertyMap(root *yaml.Node, idx *index.SpecIndex, label string) (*low
continue
}
+ foundCtx := ctx
// check our prop isn't reference
isRef := false
refString := ""
if h, _, l := utils.IsNodeRefValue(prop); h {
- ref, _ := low.LocateRefNode(prop, idx)
+ ref, _, _, fctx := low.LocateRefNodeWithContext(ctx, prop, idx)
if ref != nil {
isRef = true
prop = ref
refString = l
+ foundCtx = fctx
} else {
return nil, fmt.Errorf("schema properties build failed: cannot find reference %s, line %d, col %d",
prop.Content[1].Value, prop.Content[1].Line, prop.Content[1].Column)
}
}
totalProps++
- go buildProperty(currentProp, prop, bChan, isRef, refString)
+ go buildProperty(foundCtx, currentProp, prop, bChan, isRef, refString)
}
completedProps := 0
for completedProps < totalProps {
@@ -1155,7 +1126,7 @@ func (s *Schema) extractExtensions(root *yaml.Node) {
}
// build out a child schema for parent schema.
-func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml.Node, errors chan error, idx *index.SpecIndex) {
+func buildSchema(ctx context.Context, schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml.Node, errors chan error, idx *index.SpecIndex) {
if valueNode != nil {
type buildResult struct {
res *low.ValueReference[*SchemaProxy]
@@ -1165,7 +1136,7 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
syncChan := make(chan buildResult)
// build out a SchemaProxy for every sub-schema.
- build := func(kn *yaml.Node, vn *yaml.Node, schemaIdx int, c chan buildResult,
+ build := func(pctx context.Context, kn *yaml.Node, vn *yaml.Node, schemaIdx int, c chan buildResult,
isRef bool, refLocation string,
) {
// a proxy design works best here. polymorphism, pretty much guarantees that a sub-schema can
@@ -1178,6 +1149,7 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
sp.kn = kn
sp.vn = vn
sp.idx = idx
+ sp.ctx = pctx
if isRef {
sp.referenceLookup = refLocation
sp.isReference = true
@@ -1194,13 +1166,15 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
isRef := false
refLocation := ""
+ foundCtx := ctx
if utils.IsNodeMap(valueNode) {
h := false
if h, _, refLocation = utils.IsNodeRefValue(valueNode); h {
isRef = true
- ref, _ := low.LocateRefNode(valueNode, idx)
+ ref, _, _, fctx := low.LocateRefNodeWithContext(ctx, valueNode, idx)
if ref != nil {
valueNode = ref
+ foundCtx = fctx
} else {
errors <- fmt.Errorf("build schema failed: reference cannot be found: %s, line %d, col %d",
valueNode.Content[1].Value, valueNode.Content[1].Line, valueNode.Content[1].Column)
@@ -1209,7 +1183,7 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
// this only runs once, however to keep things consistent, it makes sense to use the same async method
// that arrays will use.
- go build(labelNode, valueNode, -1, syncChan, isRef, refLocation)
+ go build(foundCtx, labelNode, valueNode, -1, syncChan, isRef, refLocation)
select {
case r := <-syncChan:
schemas <- schemaProxyBuildResult{
@@ -1220,8 +1194,7 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
v: *r.res,
}
}
- }
- if utils.IsNodeArray(valueNode) {
+ } else if utils.IsNodeArray(valueNode) {
refBuilds := 0
results := make([]*low.ValueReference[*SchemaProxy], len(valueNode.Content))
@@ -1230,9 +1203,10 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
h := false
if h, _, refLocation = utils.IsNodeRefValue(vn); h {
isRef = true
- ref, _ := low.LocateRefNode(vn, idx)
+ ref, _, _, fctx := low.LocateRefNodeWithContext(ctx, vn, idx)
if ref != nil {
vn = ref
+ foundCtx = fctx
} else {
err := fmt.Errorf("build schema failed: reference cannot be found: %s, line %d, col %d",
vn.Content[1].Value, vn.Content[1].Line, vn.Content[1].Column)
@@ -1241,7 +1215,7 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
}
}
refBuilds++
- go build(vn, vn, i, syncChan, isRef, refLocation)
+ go build(foundCtx, vn, vn, i, syncChan, isRef, refLocation)
}
completedBuilds := 0
@@ -1262,6 +1236,8 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
v: *r,
}
}
+ } else {
+ errors <- fmt.Errorf("build schema failed: unexpected node type: %s, line %d, col %d", valueNode.Tag, valueNode.Line, valueNode.Column)
}
}
}
@@ -1269,22 +1245,29 @@ func buildSchema(schemas chan schemaProxyBuildResult, labelNode, valueNode *yaml
// ExtractSchema will return a pointer to a NodeReference that contains a *SchemaProxy if successful. The function
// will specifically look for a key node named 'schema' and extract the value mapped to that key. If the operation
// fails then no NodeReference is returned and an error is returned instead.
-func ExtractSchema(root *yaml.Node, idx *index.SpecIndex) (*low.NodeReference[*SchemaProxy], error) {
+func ExtractSchema(ctx context.Context, root *yaml.Node, idx *index.SpecIndex) (*low.NodeReference[*SchemaProxy], error) {
var schLabel, schNode *yaml.Node
errStr := "schema build failed: reference '%s' cannot be found at line %d, col %d"
isRef := false
refLocation := ""
+
if rf, rl, _ := utils.IsNodeRefValue(root); rf {
// locate reference in index.
isRef = true
- ref, _ := low.LocateRefNode(root, idx)
+ ref, fIdx, _, nCtx := low.LocateRefNodeWithContext(ctx, root, idx)
if ref != nil {
schNode = ref
schLabel = rl
+ ctx = nCtx
+ idx = fIdx
} else {
+ v := root.Content[1].Value
+ if root.Content[1].Value == "" {
+ v = "[empty]"
+ }
return nil, fmt.Errorf(errStr,
- root.Content[1].Value, root.Content[1].Line, root.Content[1].Column)
+ v, root.Content[1].Line, root.Content[1].Column)
}
} else {
_, schLabel, schNode = utils.FindKeyNodeFull(SchemaLabel, root.Content)
@@ -1292,12 +1275,21 @@ func ExtractSchema(root *yaml.Node, idx *index.SpecIndex) (*low.NodeReference[*S
h := false
if h, _, refLocation = utils.IsNodeRefValue(schNode); h {
isRef = true
- ref, _ := low.LocateRefNode(schNode, idx)
+ ref, foundIdx, _, nCtx := low.LocateRefNodeWithContext(ctx, schNode, idx)
if ref != nil {
schNode = ref
+ if foundIdx != nil {
+ // TODO: check on this
+ //idx = foundIdx
+ }
+ ctx = nCtx
} else {
+ v := schNode.Content[1].Value
+ if schNode.Content[1].Value == "" {
+ v = "[empty]"
+ }
return nil, fmt.Errorf(errStr,
- schNode.Content[1].Value, schNode.Content[1].Line, schNode.Content[1].Column)
+ v, schNode.Content[1].Line, schNode.Content[1].Column)
}
}
}
@@ -1305,7 +1297,7 @@ func ExtractSchema(root *yaml.Node, idx *index.SpecIndex) (*low.NodeReference[*S
if schNode != nil {
// check if schema has already been built.
- schema := &SchemaProxy{kn: schLabel, vn: schNode, idx: idx, isReference: isRef, referenceLookup: refLocation}
+ schema := &SchemaProxy{kn: schLabel, vn: schNode, idx: idx, ctx: ctx, isReference: isRef, referenceLookup: refLocation}
return &low.NodeReference[*SchemaProxy]{
Value: schema, KeyNode: schLabel, ValueNode: schNode, ReferenceNode: isRef,
Reference: refLocation,
diff --git a/datamodel/low/base/schema_proxy.go b/datamodel/low/base/schema_proxy.go
index 3e4c727..34737e1 100644
--- a/datamodel/low/base/schema_proxy.go
+++ b/datamodel/low/base/schema_proxy.go
@@ -4,8 +4,8 @@
package base
import (
+ "context"
"crypto/sha256"
-
"github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/utils"
"gopkg.in/yaml.v3"
@@ -51,14 +51,16 @@ type SchemaProxy struct {
buildError error
isReference bool // Is the schema underneath originally a $ref?
referenceLookup string // If the schema is a $ref, what's its name?
+ ctx context.Context
}
// Build will prepare the SchemaProxy for rendering, it does not build the Schema, only sets up internal state.
// Key maybe nil if absent.
-func (sp *SchemaProxy) Build(key, value *yaml.Node, idx *index.SpecIndex) error {
+func (sp *SchemaProxy) Build(ctx context.Context, key, value *yaml.Node, idx *index.SpecIndex) error {
sp.kn = key
sp.vn = value
sp.idx = idx
+ sp.ctx = ctx
if rf, _, r := utils.IsNodeRefValue(value); rf {
sp.isReference = true
sp.referenceLookup = r
@@ -83,7 +85,7 @@ func (sp *SchemaProxy) Schema() *Schema {
}
schema := new(Schema)
utils.CheckForMergeNodes(sp.vn)
- err := schema.Build(sp.vn, sp.idx)
+ err := schema.Build(sp.ctx, sp.vn, sp.idx)
if err != nil {
sp.buildError = err
return nil
@@ -129,6 +131,20 @@ func (sp *SchemaProxy) GetSchemaReference() string {
return sp.referenceLookup
}
+func (sp *SchemaProxy) GetSchemaReferenceLocation() *index.NodeOrigin {
+ if sp.idx != nil {
+ origin := sp.idx.FindNodeOrigin(sp.vn)
+ if origin != nil {
+ return origin
+ }
+ if sp.idx.GetRolodex() != nil {
+ origin = sp.idx.GetRolodex().FindNodeOrigin(sp.vn)
+ return origin
+ }
+ }
+ return nil
+}
+
// GetKeyNode will return the yaml.Node pointer that is a key for value node.
func (sp *SchemaProxy) GetKeyNode() *yaml.Node {
return sp.kn
diff --git a/datamodel/low/base/schema_proxy_test.go b/datamodel/low/base/schema_proxy_test.go
index 1f46046..feb0232 100644
--- a/datamodel/low/base/schema_proxy_test.go
+++ b/datamodel/low/base/schema_proxy_test.go
@@ -4,7 +4,9 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
+ "github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
"testing"
@@ -19,7 +21,7 @@ description: something`
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- err := sch.Build(&idxNode, idxNode.Content[0], nil)
+ err := sch.Build(context.Background(), &idxNode, idxNode.Content[0], nil)
assert.NoError(t, err)
assert.Equal(t, "db2a35dd6fb3d9481d0682571b9d687616bb2a34c1887f7863f0b2e769ca7b23",
@@ -51,7 +53,7 @@ func TestSchemaProxy_Build_CheckRef(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- err := sch.Build(nil, idxNode.Content[0], nil)
+ err := sch.Build(context.Background(), nil, idxNode.Content[0], nil)
assert.NoError(t, err)
assert.True(t, sch.IsSchemaReference())
assert.Equal(t, "wat", sch.GetSchemaReference())
@@ -67,7 +69,7 @@ func TestSchemaProxy_Build_HashInline(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- err := sch.Build(nil, idxNode.Content[0], nil)
+ err := sch.Build(context.Background(), nil, idxNode.Content[0], nil)
assert.NoError(t, err)
assert.False(t, sch.IsSchemaReference())
assert.NotNil(t, sch.Schema())
@@ -89,9 +91,73 @@ x-common-definitions:
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- err := sch.Build(nil, idxNode.Content[0], nil)
+ err := sch.Build(context.Background(), nil, idxNode.Content[0], nil)
assert.NoError(t, err)
assert.Len(t, sch.Schema().Enum.Value, 3)
assert.Equal(t, "The type of life cycle", sch.Schema().Description.Value)
}
+
+func TestSchemaProxy_GetSchemaReferenceLocation(t *testing.T) {
+
+ yml := `type: object
+properties:
+ name:
+ type: string
+ description: thing`
+
+ var idxNodeA yaml.Node
+ e := yaml.Unmarshal([]byte(yml), &idxNodeA)
+ assert.NoError(t, e)
+
+ yml = `
+type: object
+properties:
+ name:
+ type: string
+ description: thang`
+
+ var schA SchemaProxy
+ var schB SchemaProxy
+ var schC SchemaProxy
+ var idxNodeB yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNodeB)
+
+ c := index.CreateOpenAPIIndexConfig()
+ rolo := index.NewRolodex(c)
+ rolo.SetRootNode(&idxNodeA)
+ _ = rolo.IndexTheRolodex()
+
+ err := schA.Build(context.Background(), nil, idxNodeA.Content[0], rolo.GetRootIndex())
+ assert.NoError(t, err)
+ err = schB.Build(context.Background(), nil, idxNodeB.Content[0].Content[3].Content[1], rolo.GetRootIndex())
+ assert.NoError(t, err)
+
+ rolo.GetRootIndex().SetAbsolutePath("/rooty/rootster")
+ origin := schA.GetSchemaReferenceLocation()
+ assert.NotNil(t, origin)
+ assert.Equal(t, "/rooty/rootster", origin.AbsoluteLocation)
+
+ // mess things up so it cannot be found
+ schA.vn = schB.vn
+ origin = schA.GetSchemaReferenceLocation()
+ assert.Nil(t, origin)
+
+ // create a new index
+ idx := index.NewSpecIndexWithConfig(&idxNodeB, c)
+ idx.SetAbsolutePath("/boaty/mcboatface")
+
+ // add the index to the rolodex
+ rolo.AddIndex(idx)
+
+ // can now find the origin
+ origin = schA.GetSchemaReferenceLocation()
+ assert.NotNil(t, origin)
+ assert.Equal(t, "/boaty/mcboatface", origin.AbsoluteLocation)
+
+ // do it again, but with no index
+ err = schC.Build(context.Background(), nil, idxNodeA.Content[0], nil)
+ origin = schC.GetSchemaReferenceLocation()
+ assert.Nil(t, origin)
+
+}
diff --git a/datamodel/low/base/schema_test.go b/datamodel/low/base/schema_test.go
index 12cb132..2945d0d 100644
--- a/datamodel/low/base/schema_test.go
+++ b/datamodel/low/base/schema_test.go
@@ -1,15 +1,15 @@
package base
import (
- "testing"
-
+ "context"
+ "github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/orderedmap"
- "github.com/pb33f/libopenapi/resolver"
"github.com/pb33f/libopenapi/utils"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
+ "testing"
)
func test_get_schema_blob() string {
@@ -167,10 +167,10 @@ func Test_Schema(t *testing.T) {
mbErr := low.BuildModel(rootNode.Content[0], &sch)
assert.NoError(t, mbErr)
- schErr := sch.Build(rootNode.Content[0], nil)
+ schErr := sch.Build(context.Background(), rootNode.Content[0], nil)
assert.NoError(t, schErr)
assert.Equal(t, "something object", sch.Description.Value)
- assert.True(t, sch.AdditionalProperties.Value.(bool))
+ assert.True(t, sch.AdditionalProperties.Value.B)
assert.Equal(t, 2, orderedmap.Len(sch.Properties.Value))
v := sch.FindProperty("somethingB")
@@ -343,7 +343,7 @@ func TestSchemaAllOfSequenceOrder(t *testing.T) {
mbErr := low.BuildModel(rootNode.Content[0], &sch)
assert.NoError(t, mbErr)
- schErr := sch.Build(rootNode.Content[0], nil)
+ schErr := sch.Build(context.Background(), rootNode.Content[0], nil)
assert.NoError(t, schErr)
assert.Equal(t, "allOf sequence check", sch.Description.Value)
@@ -363,13 +363,13 @@ func TestSchema_Hash(t *testing.T) {
_ = yaml.Unmarshal([]byte(testSpec), &sc1n)
sch1 := Schema{}
_ = low.BuildModel(&sc1n, &sch1)
- _ = sch1.Build(sc1n.Content[0], nil)
+ _ = sch1.Build(context.Background(), sc1n.Content[0], nil)
var sc2n yaml.Node
_ = yaml.Unmarshal([]byte(testSpec), &sc2n)
sch2 := Schema{}
_ = low.BuildModel(&sc2n, &sch2)
- _ = sch2.Build(sc2n.Content[0], nil)
+ _ = sch2.Build(context.Background(), sc2n.Content[0], nil)
assert.Equal(t, sch1.Hash(), sch2.Hash())
}
@@ -381,13 +381,13 @@ func BenchmarkSchema_Hash(b *testing.B) {
_ = yaml.Unmarshal([]byte(testSpec), &sc1n)
sch1 := Schema{}
_ = low.BuildModel(&sc1n, &sch1)
- _ = sch1.Build(sc1n.Content[0], nil)
+ _ = sch1.Build(context.Background(), sc1n.Content[0], nil)
var sc2n yaml.Node
_ = yaml.Unmarshal([]byte(testSpec), &sc2n)
sch2 := Schema{}
_ = low.BuildModel(&sc2n, &sch2)
- _ = sch2.Build(sc2n.Content[0], nil)
+ _ = sch2.Build(context.Background(), sc2n.Content[0], nil)
for i := 0; i < b.N; i++ {
assert.Equal(b, sch1.Hash(), sch2.Hash())
@@ -417,7 +417,7 @@ const: tasty`
mbErr := low.BuildModel(rootNode.Content[0], &sch)
assert.NoError(t, mbErr)
- schErr := sch.Build(rootNode.Content[0], nil)
+ schErr := sch.Build(context.Background(), rootNode.Content[0], nil)
assert.NoError(t, schErr)
assert.Equal(t, "something object", sch.Description.Value)
assert.Len(t, sch.Type.Value.B, 2)
@@ -458,7 +458,7 @@ properties:
_ = yaml.Unmarshal([]byte(yml), &idxNode)
var n Schema
- err := n.Build(idxNode.Content[0], idx)
+ err := n.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "this is something", n.FindProperty("aValue").Value.Schema().Description.Value)
}
@@ -484,7 +484,7 @@ properties:
_ = yaml.Unmarshal([]byte(yml), &idxNode)
var n Schema
- err := n.Build(idxNode.Content[0], idx)
+ err := n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -509,7 +509,7 @@ dependentSchemas:
_ = yaml.Unmarshal([]byte(yml), &idxNode)
var n Schema
- err := n.Build(idxNode.Content[0], idx)
+ err := n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -534,7 +534,7 @@ patternProperties:
_ = yaml.Unmarshal([]byte(yml), &idxNode)
var n Schema
- err := n.Build(idxNode.Content[0], idx)
+ err := n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -574,7 +574,7 @@ items:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, schErr)
desc := "poly thing"
@@ -621,7 +621,7 @@ items:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -661,7 +661,7 @@ items:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, schErr)
desc := "poly thing"
@@ -708,7 +708,7 @@ items:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -734,7 +734,7 @@ allOf:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -760,7 +760,7 @@ allOf:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -788,7 +788,7 @@ allOf:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, schErr)
assert.Nil(t, sch.AllOf.Value[0].Value.Schema()) // child can't be resolved, so this will be nil.
assert.Error(t, sch.AllOf.Value[0].Value.GetBuildError())
@@ -818,7 +818,7 @@ allOf:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, schErr)
desc := "madness"
@@ -849,7 +849,7 @@ allOf:
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- err = sch.Build(idxNode.Content[0], idx)
+ err = sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -877,7 +877,7 @@ func Test_Schema_Polymorphism_RefMadnessIllegal(t *testing.T) {
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, schErr)
}
@@ -902,14 +902,14 @@ func Test_Schema_RefMadnessIllegal_Circular(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -934,14 +934,14 @@ func Test_Schema_RefMadnessIllegal_Nonexist(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
err := low.BuildModel(&idxNode, &sch)
assert.NoError(t, err)
- schErr := sch.Build(idxNode.Content[0], idx)
+ schErr := sch.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, schErr)
}
@@ -966,7 +966,7 @@ func TestExtractSchema(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
aValue := res.Value.Schema().FindProperty("aValue")
@@ -982,7 +982,7 @@ schema:
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], nil)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], nil)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
sch := res.Value.Schema()
@@ -998,7 +998,7 @@ schema:
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], nil)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], nil)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
sch := res.Value.Schema()
@@ -1023,7 +1023,7 @@ func TestExtractSchema_Ref(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
assert.Equal(t, "this is something", res.Value.Schema().Description.Value)
@@ -1047,7 +1047,7 @@ func TestExtractSchema_Ref_Fail(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- _, err := ExtractSchema(idxNode.Content[0], idx)
+ _, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -1075,14 +1075,14 @@ func TestExtractSchema_CheckChildPropCircular(t *testing.T) {
yml = `$ref: '#/components/schemas/Something'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
@@ -1107,7 +1107,7 @@ func TestExtractSchema_RefRoot(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, res.Value)
assert.Equal(t, "this is something", res.Value.Schema().Description.Value)
@@ -1130,7 +1130,7 @@ func TestExtractSchema_RefRoot_Fail(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- _, err := ExtractSchema(idxNode.Content[0], idx)
+ _, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -1150,7 +1150,7 @@ func TestExtractSchema_RefRoot_Child_Fail(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- _, err := ExtractSchema(idxNode.Content[0], idx)
+ _, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -1171,32 +1171,9 @@ func TestExtractSchema_AdditionalPropertiesAsSchema(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
- assert.NotNil(t, res.Value.Schema().AdditionalProperties.Value.(*SchemaProxy).Schema())
- assert.Nil(t, err)
-}
-
-func TestExtractSchema_AdditionalPropertiesAsSchemaSlice(t *testing.T) {
- yml := `components:
- schemas:
- Something:
- additionalProperties:
- - nice: rice`
-
- var iNode yaml.Node
- mErr := yaml.Unmarshal([]byte(yml), &iNode)
- assert.NoError(t, mErr)
- idx := index.NewSpecIndex(&iNode)
-
- yml = `$ref: '#/components/schemas/Something'`
-
- var idxNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &idxNode)
-
- res, err := ExtractSchema(idxNode.Content[0], idx)
-
- assert.NotNil(t, res.Value.Schema().AdditionalProperties.Value.([]low.ValueReference[interface{}]))
+ assert.NotNil(t, res.Value.Schema().AdditionalProperties.Value.A.Schema())
assert.Nil(t, err)
}
@@ -1216,7 +1193,7 @@ func TestExtractSchema_DoNothing(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.Nil(t, res)
assert.Nil(t, err)
}
@@ -1244,8 +1221,8 @@ func TestExtractSchema_AdditionalProperties_Ref(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
- assert.NotNil(t, res.Value.Schema().AdditionalProperties.Value.(*SchemaProxy).Schema())
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+ assert.NotNil(t, res.Value.Schema().AdditionalProperties.Value.A.Schema())
assert.Nil(t, err)
}
@@ -1358,7 +1335,7 @@ func TestExtractSchema_OneOfRef(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, err := ExtractSchema(idxNode.Content[0], idx)
+ res, err := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "a frosty cold beverage can be coke or sprite",
res.Value.Schema().OneOf.Value[0].Value.Schema().Description.Value)
@@ -1379,7 +1356,7 @@ func TestSchema_Hash_Equal(t *testing.T) {
uniqueItems: 1
maxProperties: 10
minProperties: 1
- additionalProperties: anything
+ additionalProperties: true
description: milky
contentEncoding: rubber shoes
contentMediaType: paper tiger
@@ -1421,7 +1398,7 @@ func TestSchema_Hash_Equal(t *testing.T) {
uniqueItems: 1
maxProperties: 10
minProperties: 1
- additionalProperties: anything
+ additionalProperties: true
description: milky
contentEncoding: rubber shoes
contentMediaType: paper tiger
@@ -1451,8 +1428,8 @@ func TestSchema_Hash_Equal(t *testing.T) {
_ = yaml.Unmarshal([]byte(left), &lNode)
_ = yaml.Unmarshal([]byte(right), &rNode)
- lDoc, _ := ExtractSchema(lNode.Content[0], nil)
- rDoc, _ := ExtractSchema(rNode.Content[0], nil)
+ lDoc, _ := ExtractSchema(context.Background(), lNode.Content[0], nil)
+ rDoc, _ := ExtractSchema(context.Background(), rNode.Content[0], nil)
assert.NotNil(t, lDoc)
assert.NotNil(t, rDoc)
@@ -1476,8 +1453,8 @@ func TestSchema_Hash_AdditionalPropsSlice(t *testing.T) {
_ = yaml.Unmarshal([]byte(left), &lNode)
_ = yaml.Unmarshal([]byte(right), &rNode)
- lDoc, _ := ExtractSchema(lNode.Content[0], nil)
- rDoc, _ := ExtractSchema(rNode.Content[0], nil)
+ lDoc, _ := ExtractSchema(context.Background(), lNode.Content[0], nil)
+ rDoc, _ := ExtractSchema(context.Background(), rNode.Content[0], nil)
assert.NotNil(t, lDoc)
assert.NotNil(t, rDoc)
@@ -1501,8 +1478,8 @@ func TestSchema_Hash_AdditionalPropsSliceNoMap(t *testing.T) {
_ = yaml.Unmarshal([]byte(left), &lNode)
_ = yaml.Unmarshal([]byte(right), &rNode)
- lDoc, _ := ExtractSchema(lNode.Content[0], nil)
- rDoc, _ := ExtractSchema(rNode.Content[0], nil)
+ lDoc, _ := ExtractSchema(context.Background(), lNode.Content[0], nil)
+ rDoc, _ := ExtractSchema(context.Background(), rNode.Content[0], nil)
assert.NotNil(t, lDoc)
assert.NotNil(t, rDoc)
@@ -1538,8 +1515,8 @@ func TestSchema_Hash_NotEqual(t *testing.T) {
_ = yaml.Unmarshal([]byte(left), &lNode)
_ = yaml.Unmarshal([]byte(right), &rNode)
- lDoc, _ := ExtractSchema(lNode.Content[0], nil)
- rDoc, _ := ExtractSchema(rNode.Content[0], nil)
+ lDoc, _ := ExtractSchema(context.Background(), lNode.Content[0], nil)
+ rDoc, _ := ExtractSchema(context.Background(), rNode.Content[0], nil)
assert.False(t, low.AreEqual(lDoc.Value.Schema(), rDoc.Value.Schema()))
}
@@ -1575,8 +1552,8 @@ func TestSchema_Hash_EqualJumbled(t *testing.T) {
_ = yaml.Unmarshal([]byte(left), &lNode)
_ = yaml.Unmarshal([]byte(right), &rNode)
- lDoc, _ := ExtractSchema(lNode.Content[0], nil)
- rDoc, _ := ExtractSchema(rNode.Content[0], nil)
+ lDoc, _ := ExtractSchema(context.Background(), lNode.Content[0], nil)
+ rDoc, _ := ExtractSchema(context.Background(), rNode.Content[0], nil)
assert.True(t, low.AreEqual(lDoc.Value.Schema(), rDoc.Value.Schema()))
}
@@ -1609,10 +1586,10 @@ func TestSchema_UnevaluatedPropertiesAsBool_DefinedAsTrue(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, _ := ExtractSchema(idxNode.Content[0], idx)
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.True(t, res.Value.Schema().UnevaluatedProperties.Value.IsB())
- assert.True(t, *res.Value.Schema().UnevaluatedProperties.Value.B)
+ assert.True(t, res.Value.Schema().UnevaluatedProperties.Value.B)
assert.Equal(t, "571bd1853c22393131e2dcadce86894da714ec14968895c8b7ed18154b2be8cd",
low.GenerateHashString(res.Value.Schema().UnevaluatedProperties.Value))
@@ -1634,10 +1611,10 @@ func TestSchema_UnevaluatedPropertiesAsBool_DefinedAsFalse(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, _ := ExtractSchema(idxNode.Content[0], idx)
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.True(t, res.Value.Schema().UnevaluatedProperties.Value.IsB())
- assert.False(t, *res.Value.Schema().UnevaluatedProperties.Value.B)
+ assert.False(t, res.Value.Schema().UnevaluatedProperties.Value.B)
}
func TestSchema_UnevaluatedPropertiesAsBool_Undefined(t *testing.T) {
@@ -1656,7 +1633,186 @@ func TestSchema_UnevaluatedPropertiesAsBool_Undefined(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- res, _ := ExtractSchema(idxNode.Content[0], idx)
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
assert.Nil(t, res.Value.Schema().UnevaluatedProperties.Value)
}
+
+func TestSchema_ExclusiveMinimum_3_with_Config(t *testing.T) {
+ yml := `openapi: 3.0.3
+components:
+ schemas:
+ Something:
+ type: integer
+ minimum: 3
+ exclusiveMinimum: true`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `$ref: '#/components/schemas/Something'`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+
+ assert.True(t, res.Value.Schema().ExclusiveMinimum.Value.A)
+}
+
+func TestSchema_ExclusiveMinimum_31_with_Config(t *testing.T) {
+ yml := `openapi: 3.1
+components:
+ schemas:
+ Something:
+ type: integer
+ minimum: 3
+ exclusiveMinimum: 3`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.1,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `$ref: '#/components/schemas/Something'`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+
+ assert.Equal(t, 3.0, res.Value.Schema().ExclusiveMinimum.Value.B)
+}
+
+func TestSchema_ExclusiveMaximum_3_with_Config(t *testing.T) {
+ yml := `openapi: 3.0.3
+components:
+ schemas:
+ Something:
+ type: integer
+ maximum: 3
+ exclusiveMaximum: true`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `$ref: '#/components/schemas/Something'`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+
+ assert.True(t, res.Value.Schema().ExclusiveMaximum.Value.A)
+}
+
+func TestSchema_ExclusiveMaximum_31_with_Config(t *testing.T) {
+ yml := `openapi: 3.1
+components:
+ schemas:
+ Something:
+ type: integer
+ maximum: 3
+ exclusiveMaximum: 3`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.1,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `$ref: '#/components/schemas/Something'`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, _ := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+
+ assert.Equal(t, 3.0, res.Value.Schema().ExclusiveMaximum.Value.B)
+}
+
+func TestSchema_EmptyySchemaRef(t *testing.T) {
+ yml := `openapi: 3.0.3
+components:
+ schemas:
+ Something:
+ $ref: ''`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `schema:
+ $ref: ''`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, e := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+ assert.Nil(t, res)
+ assert.Equal(t, "schema build failed: reference '[empty]' cannot be found at line 2, col 9", e.Error())
+
+}
+
+func TestSchema_EmptyRef(t *testing.T) {
+ yml := `openapi: 3.0.3
+components:
+ schemas:
+ Something:
+ $ref: ''`
+
+ var iNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &iNode)
+ assert.NoError(t, mErr)
+
+ config := index.CreateOpenAPIIndexConfig()
+ config.SpecInfo = &datamodel.SpecInfo{
+ VersionNumeric: 3.0,
+ }
+
+ idx := index.NewSpecIndexWithConfig(&iNode, config)
+
+ yml = `$ref: ''`
+
+ var idxNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &idxNode)
+
+ res, e := ExtractSchema(context.Background(), idxNode.Content[0], idx)
+ assert.Nil(t, res)
+ assert.Equal(t, "schema build failed: reference '[empty]' cannot be found at line 1, col 7", e.Error())
+
+}
diff --git a/datamodel/low/base/security_requirement.go b/datamodel/low/base/security_requirement.go
index cb28551..c621a4b 100644
--- a/datamodel/low/base/security_requirement.go
+++ b/datamodel/low/base/security_requirement.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -30,7 +31,7 @@ type SecurityRequirement struct {
}
// Build will extract security requirements from the node (the structure is odd, to be honest)
-func (s *SecurityRequirement) Build(_, root *yaml.Node, _ *index.SpecIndex) error {
+func (s *SecurityRequirement) Build(_ context.Context, _, root *yaml.Node, _ *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
s.Reference = new(low.Reference)
diff --git a/datamodel/low/base/security_requirement_test.go b/datamodel/low/base/security_requirement_test.go
index 396ab4f..cd6253c 100644
--- a/datamodel/low/base/security_requirement_test.go
+++ b/datamodel/low/base/security_requirement_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/orderedmap"
@@ -35,8 +36,8 @@ one:
var idxNode2 yaml.Node
_ = yaml.Unmarshal([]byte(yml2), &idxNode2)
- _ = sr.Build(nil, idxNode.Content[0], nil)
- _ = sr2.Build(nil, idxNode2.Content[0], nil)
+ _ = sr.Build(context.Background(), nil, idxNode.Content[0], nil)
+ _ = sr2.Build(context.Background(), nil, idxNode2.Content[0], nil)
assert.Equal(t, 2, orderedmap.Len(sr.Requirements.Value))
assert.Len(t, sr.GetKeys(), 2)
diff --git a/datamodel/low/base/tag.go b/datamodel/low/base/tag.go
index bb702ae..0cdea0d 100644
--- a/datamodel/low/base/tag.go
+++ b/datamodel/low/base/tag.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -34,14 +35,14 @@ func (t *Tag) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions and external docs for the Tag.
-func (t *Tag) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (t *Tag) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
t.Reference = new(low.Reference)
t.Extensions = low.ExtractExtensions(root)
// extract externalDocs
- extDocs, err := low.ExtractObject[*ExternalDoc](ExternalDocsLabel, root, idx)
+ extDocs, err := low.ExtractObject[*ExternalDoc](ctx, ExternalDocsLabel, root, idx)
t.ExternalDocs = extDocs
return err
}
@@ -73,25 +74,3 @@ func (t *Tag) Hash() [32]byte {
f = append(f, keys...)
return sha256.Sum256([]byte(strings.Join(f, "|")))
}
-
-// TODO: future mutation API experiment code is here. this snippet is to re-marshal the object.
-//func (t *Tag) MarshalYAML() (interface{}, error) {
-// m := make(map[string]interface{})
-// for i := range t.Extensions {
-// m[i.Value] = t.Extensions[i].Value
-// }
-// if t.Name.Value != "" {
-// m[NameLabel] = t.Name.Value
-// }
-// if t.Description.Value != "" {
-// m[DescriptionLabel] = t.Description.Value
-// }
-// if t.ExternalDocs.Value != nil {
-// m[ExternalDocsLabel] = t.ExternalDocs.Value
-// }
-// return m, nil
-//}
-//
-//func NewTag() *Tag {
-// return new(Tag)
-//}
diff --git a/datamodel/low/base/tag_test.go b/datamodel/low/base/tag_test.go
index f765f00..a53b628 100644
--- a/datamodel/low/base/tag_test.go
+++ b/datamodel/low/base/tag_test.go
@@ -4,6 +4,7 @@
package base
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -27,7 +28,7 @@ x-coffee: tasty`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "a tag", n.Name.Value)
assert.Equal(t, "a description", n.Description.Value)
@@ -52,7 +53,7 @@ externalDocs:
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -79,8 +80,8 @@ x-b33f: princess`
var rDoc Tag
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
assert.Equal(t, lDoc.Hash(), rDoc.Hash())
diff --git a/datamodel/low/extraction_functions.go b/datamodel/low/extraction_functions.go
index f5143f5..0c920f4 100644
--- a/datamodel/low/extraction_functions.go
+++ b/datamodel/low/extraction_functions.go
@@ -4,6 +4,7 @@
package low
import (
+ "context"
"crypto/sha256"
"fmt"
"reflect"
@@ -15,6 +16,8 @@ import (
"github.com/pb33f/libopenapi/utils"
"github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
"gopkg.in/yaml.v3"
+ "net/url"
+ "path/filepath"
)
// FindItemInMap accepts a string key and a collection of KeyReference[string] and ValueReference[T]. Every
@@ -64,26 +67,18 @@ func generateIndexCollection(idx *index.SpecIndex) []func() map[string]*index.Re
}
}
-// LocateRefNode will perform a complete lookup for a $ref node. This function searches the entire index for
-// the reference being supplied. If there is a match found, the reference *yaml.Node is returned.
-func LocateRefNode(root *yaml.Node, idx *index.SpecIndex) (*yaml.Node, error) {
+func LocateRefNodeWithContext(ctx context.Context, root *yaml.Node, idx *index.SpecIndex) (*yaml.Node, *index.SpecIndex, error, context.Context) {
+
if rf, _, rv := utils.IsNodeRefValue(root); rf {
+ if rv == "" {
+ return nil, nil, fmt.Errorf("reference at line %d, column %d is empty, it cannot be resolved",
+ root.Line, root.Column), ctx
+ }
+
// run through everything and return as soon as we find a match.
// this operates as fast as possible as ever
collections := generateIndexCollection(idx)
-
- // if there are any external indexes being used by remote
- // documents, then we need to search through them also.
- externalIndexes := idx.GetAllExternalIndexes()
- if len(externalIndexes) > 0 {
- var extCollection []func() map[string]*index.Reference
- for _, extIndex := range externalIndexes {
- extCollection = generateIndexCollection(extIndex)
- collections = append(collections, extCollection...)
- }
- }
-
var found map[string]*index.Reference
for _, collection := range collections {
found = collection()
@@ -94,23 +89,107 @@ func LocateRefNode(root *yaml.Node, idx *index.SpecIndex) (*yaml.Node, error) {
if jh, _, _ := utils.IsNodeRefValue(found[rv].Node); jh {
// if this node is circular, stop drop and roll.
if !IsCircular(found[rv].Node, idx) {
- return LocateRefNode(found[rv].Node, idx)
+ return LocateRefNodeWithContext(ctx, found[rv].Node, idx)
} else {
- return found[rv].Node, fmt.Errorf("circular reference '%s' found during lookup at line "+
+ return found[rv].Node, idx, fmt.Errorf("circular reference '%s' found during lookup at line "+
"%d, column %d, It cannot be resolved",
GetCircularReferenceResult(found[rv].Node, idx).GenerateJourneyPath(),
found[rv].Node.Line,
- found[rv].Node.Column)
+ found[rv].Node.Column), ctx
}
}
- return utils.NodeAlias(found[rv].Node), nil
+ return utils.NodeAlias(found[rv].Node), idx, nil, ctx
}
}
// perform a search for the reference in the index
- foundRefs := idx.SearchIndexForReference(rv)
- if len(foundRefs) > 0 {
- return utils.NodeAlias(foundRefs[0].Node), nil
+ // extract the correct root
+ specPath := idx.GetSpecAbsolutePath()
+ if ctx.Value(index.CurrentPathKey) != nil {
+ specPath = ctx.Value(index.CurrentPathKey).(string)
+ }
+
+ explodedRefValue := strings.Split(rv, "#")
+ if len(explodedRefValue) == 2 {
+ if !strings.HasPrefix(explodedRefValue[0], "http") {
+
+ if !filepath.IsAbs(explodedRefValue[0]) {
+
+ if strings.HasPrefix(specPath, "http") {
+ u, _ := url.Parse(specPath)
+ p := ""
+ if u.Path != "" && explodedRefValue[0] != "" {
+ p = filepath.Dir(u.Path)
+ }
+ if p != "" && explodedRefValue[0] != "" {
+ u.Path = filepath.Join(p, explodedRefValue[0])
+ }
+ u.Fragment = ""
+ rv = fmt.Sprintf("%s#%s", u.String(), explodedRefValue[1])
+
+ } else {
+ if specPath != "" {
+ var abs string
+ if explodedRefValue[0] == "" {
+ abs = specPath
+ } else {
+ abs, _ = filepath.Abs(filepath.Join(filepath.Dir(specPath), explodedRefValue[0]))
+ }
+ rv = fmt.Sprintf("%s#%s", abs, explodedRefValue[1])
+ } else {
+
+ // check for a config baseURL and use that if it exists.
+ if idx.GetConfig().BaseURL != nil {
+
+ u := *idx.GetConfig().BaseURL
+ p := ""
+ if u.Path != "" {
+ p = filepath.Dir(u.Path)
+ }
+ u.Path = filepath.Join(p, explodedRefValue[0])
+ rv = fmt.Sprintf("%s#%s", u.String(), explodedRefValue[1])
+ }
+ }
+ }
+ }
+ }
+ } else {
+
+ if !strings.HasPrefix(explodedRefValue[0], "http") {
+
+ if !filepath.IsAbs(explodedRefValue[0]) {
+
+ if strings.HasPrefix(specPath, "http") {
+ u, _ := url.Parse(specPath)
+ p := filepath.Dir(u.Path)
+ abs, _ := filepath.Abs(filepath.Join(p, rv))
+ u.Path = abs
+ rv = u.String()
+
+ } else {
+ if specPath != "" {
+
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(specPath), rv))
+ rv = abs
+
+ } else {
+
+ // check for a config baseURL and use that if it exists.
+ if idx.GetConfig().BaseURL != nil {
+ u := *idx.GetConfig().BaseURL
+ abs, _ := filepath.Abs(filepath.Join(u.Path, rv))
+ u.Path = abs
+ rv = u.String()
+ }
+ }
+ }
+ }
+ }
+ }
+
+ foundRef, fIdx, newCtx := idx.SearchIndexForReferenceWithContext(ctx, rv)
+ if foundRef != nil {
+ return utils.NodeAlias(foundRef.Node), fIdx, nil, newCtx
}
// let's try something else to find our references.
@@ -123,30 +202,40 @@ func LocateRefNode(root *yaml.Node, idx *index.SpecIndex) (*yaml.Node, error) {
nodes, fErr := path.Find(idx.GetRootNode())
if fErr == nil {
if len(nodes) > 0 {
- return utils.NodeAlias(nodes[0]), nil
+ return utils.NodeAlias(nodes[0]), idx, nil, ctx
}
}
}
}
- return nil, fmt.Errorf("reference '%s' at line %d, column %d was not found",
- rv, root.Line, root.Column)
+ return nil, idx, fmt.Errorf("reference '%s' at line %d, column %d was not found",
+ rv, root.Line, root.Column), ctx
}
- return nil, nil
+ return nil, idx, nil, ctx
+
+}
+
+// LocateRefNode will perform a complete lookup for a $ref node. This function searches the entire index for
+// the reference being supplied. If there is a match found, the reference *yaml.Node is returned.
+func LocateRefNode(root *yaml.Node, idx *index.SpecIndex) (*yaml.Node, *index.SpecIndex, error) {
+ r, i, e, _ := LocateRefNodeWithContext(context.Background(), root, idx)
+ return r, i, e
}
// ExtractObjectRaw will extract a typed Buildable[N] object from a root yaml.Node. The 'raw' aspect is
// that there is no NodeReference wrapper around the result returned, just the raw object.
-func ExtractObjectRaw[T Buildable[N], N any](key, root *yaml.Node, idx *index.SpecIndex) (T, error, bool, string) {
+func ExtractObjectRaw[T Buildable[N], N any](ctx context.Context, key, root *yaml.Node, idx *index.SpecIndex) (T, error, bool, string) {
var circError error
var isReference bool
var referenceValue string
root = utils.NodeAlias(root)
if h, _, rv := utils.IsNodeRefValue(root); h {
- ref, err := LocateRefNode(root, idx)
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, root, idx)
if ref != nil {
root = ref
isReference = true
referenceValue = rv
+ idx = fIdx
+ ctx = nCtx
if err != nil {
circError = err
}
@@ -161,7 +250,7 @@ func ExtractObjectRaw[T Buildable[N], N any](key, root *yaml.Node, idx *index.Sp
if err != nil {
return n, err, isReference, referenceValue
}
- err = n.Build(key, root, idx)
+ err = n.Build(ctx, key, root, idx)
if err != nil {
return n, err, isReference, referenceValue
}
@@ -180,19 +269,21 @@ func ExtractObjectRaw[T Buildable[N], N any](key, root *yaml.Node, idx *index.Sp
// ExtractObject will extract a typed Buildable[N] object from a root yaml.Node. The result is wrapped in a
// NodeReference[T] that contains the key node found and value node found when looking up the reference.
-func ExtractObject[T Buildable[N], N any](label string, root *yaml.Node, idx *index.SpecIndex) (NodeReference[T], error) {
+func ExtractObject[T Buildable[N], N any](ctx context.Context, label string, root *yaml.Node, idx *index.SpecIndex) (NodeReference[T], error) {
var ln, vn *yaml.Node
var circError error
var isReference bool
var referenceValue string
root = utils.NodeAlias(root)
if rf, rl, refVal := utils.IsNodeRefValue(root); rf {
- ref, err := LocateRefNode(root, idx)
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, root, idx)
if ref != nil {
vn = ref
ln = rl
isReference = true
referenceValue = refVal
+ idx = fIdx
+ ctx = nCtx
if err != nil {
circError = err
}
@@ -205,9 +296,13 @@ func ExtractObject[T Buildable[N], N any](label string, root *yaml.Node, idx *in
_, ln, vn = utils.FindKeyNodeFull(label, root.Content)
if vn != nil {
if h, _, rVal := utils.IsNodeRefValue(vn); h {
- ref, lerr := LocateRefNode(vn, idx)
+ ref, fIdx, lerr, nCtx := LocateRefNodeWithContext(ctx, vn, idx)
if ref != nil {
vn = ref
+ if fIdx != nil {
+ idx = fIdx
+ }
+ ctx = nCtx
isReference = true
referenceValue = rVal
if lerr != nil {
@@ -229,7 +324,7 @@ func ExtractObject[T Buildable[N], N any](label string, root *yaml.Node, idx *in
if ln == nil {
return NodeReference[T]{}, nil
}
- err = n.Build(ln, vn, idx)
+ err = n.Build(ctx, ln, vn, idx)
if err != nil {
return NodeReference[T]{}, err
}
@@ -265,17 +360,21 @@ func SetReference(obj any, ref string) {
// ExtractArray will extract a slice of []ValueReference[T] from a root yaml.Node that is defined as a sequence.
// Used when the value being extracted is an array.
-func ExtractArray[T Buildable[N], N any](label string, root *yaml.Node, idx *index.SpecIndex) ([]ValueReference[T],
+func ExtractArray[T Buildable[N], N any](ctx context.Context, label string, root *yaml.Node, idx *index.SpecIndex) ([]ValueReference[T],
*yaml.Node, *yaml.Node, error,
) {
var ln, vn *yaml.Node
var circError error
root = utils.NodeAlias(root)
+ isRef := false
if rf, rl, _ := utils.IsNodeRefValue(root); rf {
- ref, err := LocateRefNode(root, idx)
+ ref, fIdx, err, nCtx := LocateRefEnd(ctx, root, idx, 0)
if ref != nil {
+ isRef = true
vn = ref
ln = rl
+ idx = fIdx
+ ctx = nCtx
if err != nil {
circError = err
}
@@ -287,17 +386,20 @@ func ExtractArray[T Buildable[N], N any](label string, root *yaml.Node, idx *ind
_, ln, vn = utils.FindKeyNodeFullTop(label, root.Content)
if vn != nil {
if h, _, _ := utils.IsNodeRefValue(vn); h {
- ref, err := LocateRefNode(vn, idx)
+ ref, fIdx, err, nCtx := LocateRefEnd(ctx, vn, idx, 0)
if ref != nil {
+ isRef = true
vn = ref
- //referenceValue = rVal
+ idx = fIdx
+ ctx = nCtx
if err != nil {
circError = err
}
} else {
if err != nil {
- return []ValueReference[T]{}, nil, nil, fmt.Errorf("array build failed: reference cannot be found: %s",
- err.Error())
+ return []ValueReference[T]{}, nil, nil,
+ fmt.Errorf("array build failed: reference cannot be found: %s",
+ err.Error())
}
}
}
@@ -307,18 +409,33 @@ func ExtractArray[T Buildable[N], N any](label string, root *yaml.Node, idx *ind
var items []ValueReference[T]
if vn != nil && ln != nil {
if !utils.IsNodeArray(vn) {
- return []ValueReference[T]{}, nil, nil, fmt.Errorf("array build failed, input is not an array, line %d, column %d", vn.Line, vn.Column)
+
+ if !isRef {
+ return []ValueReference[T]{}, nil, nil,
+ fmt.Errorf("array build failed, input is not an array, line %d, column %d", vn.Line, vn.Column)
+ }
+ // if this was pulled from a ref, but it's not a sequence, check the label and see if anything comes out,
+ // and then check that is a sequence, if not, fail it.
+ _, _, fvn := utils.FindKeyNodeFullTop(label, vn.Content)
+ if fvn != nil {
+ if !utils.IsNodeArray(vn) {
+ return []ValueReference[T]{}, nil, nil,
+ fmt.Errorf("array build failed, input is not an array, line %d, column %d", vn.Line, vn.Column)
+ }
+ }
}
for _, node := range vn.Content {
localReferenceValue := ""
- //localIsReference := false
+ foundCtx := ctx
+ foundIndex := idx
if rf, _, rv := utils.IsNodeRefValue(node); rf {
- refg, err := LocateRefNode(node, idx)
+ refg, fIdx, err, nCtx := LocateRefEnd(ctx, node, idx, 0)
if refg != nil {
node = refg
- //localIsReference = true
localReferenceValue = rv
+ foundIndex = fIdx
+ foundCtx = nCtx
if err != nil {
circError = err
}
@@ -334,7 +451,7 @@ func ExtractArray[T Buildable[N], N any](label string, root *yaml.Node, idx *ind
if err != nil {
return []ValueReference[T]{}, ln, vn, err
}
- berr := n.Build(ln, node, idx)
+ berr := n.Build(foundCtx, ln, node, foundIndex)
if berr != nil {
return nil, ln, vn, berr
}
@@ -381,6 +498,7 @@ func ExtractExample(expNode, expLabel *yaml.Node) NodeReference[any] {
//
// This is useful when the node to be extracted, is already known and does not require a search.
func ExtractMapNoLookupExtensions[PT Buildable[N], N any](
+ ctx context.Context,
root *yaml.Node,
idx *index.SpecIndex,
includeExtensions bool,
@@ -417,15 +535,22 @@ func ExtractMapNoLookupExtensions[PT Buildable[N], N any](
}
node = utils.NodeAlias(node)
+ foundIndex := idx
+ foundContext := ctx
+
var isReference bool
var referenceValue string
// if value is a reference, we have to look it up in the index!
if h, _, rv := utils.IsNodeRefValue(node); h {
- ref, err := LocateRefNode(node, idx)
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, node, idx)
if ref != nil {
node = ref
isReference = true
referenceValue = rv
+ if fIdx != nil {
+ foundIndex = fIdx
+ }
+ foundContext = nCtx
if err != nil {
circError = err
}
@@ -435,13 +560,12 @@ func ExtractMapNoLookupExtensions[PT Buildable[N], N any](
}
}
}
-
var n PT = new(N)
err := BuildModel(node, n)
if err != nil {
return nil, err
}
- berr := n.Build(currentKey, node, idx)
+ berr := n.Build(foundContext, currentKey, node, foundIndex)
if berr != nil {
return nil, berr
}
@@ -457,7 +581,6 @@ func ExtractMapNoLookupExtensions[PT Buildable[N], N any](
ValueReference[PT]{
Value: n,
ValueNode: node,
- //IsReference: isReference,
Reference: referenceValue,
},
)
@@ -477,10 +600,11 @@ func ExtractMapNoLookupExtensions[PT Buildable[N], N any](
//
// This is useful when the node to be extracted, is already known and does not require a search.
func ExtractMapNoLookup[PT Buildable[N], N any](
+ ctx context.Context,
root *yaml.Node,
idx *index.SpecIndex,
) (orderedmap.Map[KeyReference[string], ValueReference[PT]], error) {
- return ExtractMapNoLookupExtensions[PT, N](root, idx, false)
+ return ExtractMapNoLookupExtensions[PT, N](ctx, root, idx, false)
}
type mappingResult[T any] struct {
@@ -495,24 +619,25 @@ type mappingResult[T any] struct {
// The second return value is the yaml.Node found for the 'label' and the third return value is the yaml.Node
// found for the value extracted from the label node.
func ExtractMapExtensions[PT Buildable[N], N any](
+ ctx context.Context,
label string,
root *yaml.Node,
idx *index.SpecIndex,
extensions bool,
) (orderedmap.Map[KeyReference[string], ValueReference[PT]], *yaml.Node, *yaml.Node, error) {
- //var isReference bool
var referenceValue string
var labelNode, valueNode *yaml.Node
var circError error
root = utils.NodeAlias(root)
if rf, rl, rv := utils.IsNodeRefValue(root); rf {
// locate reference in index.
- ref, err := LocateRefNode(root, idx)
+ ref, fIdx, err, fCtx := LocateRefNodeWithContext(ctx, root, idx)
if ref != nil {
valueNode = ref
labelNode = rl
- //isReference = true
referenceValue = rv
+ ctx = fCtx
+ idx = fIdx
if err != nil {
circError = err
}
@@ -522,13 +647,15 @@ func ExtractMapExtensions[PT Buildable[N], N any](
}
} else {
_, labelNode, valueNode = utils.FindKeyNodeFull(label, root.Content)
+ valueNode = utils.NodeAlias(valueNode)
if valueNode != nil {
if h, _, rvt := utils.IsNodeRefValue(valueNode); h {
- ref, err := LocateRefNode(valueNode, idx)
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, valueNode, idx)
if ref != nil {
valueNode = ref
- //isReference = true
referenceValue = rvt
+ idx = fIdx
+ ctx = nCtx
if err != nil {
circError = err
}
@@ -549,19 +676,17 @@ func ExtractMapExtensions[PT Buildable[N], N any](
bChan := make(chan mappingResult[PT])
eChan := make(chan error)
- buildMap := func(label *yaml.Node, value *yaml.Node, c chan mappingResult[PT], ec chan<- error, ref string) {
+ buildMap := func(nctx context.Context, label *yaml.Node, value *yaml.Node, c chan mappingResult[PT], ec chan<- error, ref string, fIdx *index.SpecIndex) {
var n PT = new(N)
value = utils.NodeAlias(value)
_ = BuildModel(value, n)
- err := n.Build(label, value, idx)
+ err := n.Build(nctx, label, value, fIdx)
if err != nil {
ec <- err
return
}
- //isRef := false
if ref != "" {
- //isRef = true
SetReference(n, ref)
}
@@ -573,7 +698,6 @@ func ExtractMapExtensions[PT Buildable[N], N any](
v: ValueReference[PT]{
Value: n,
ValueNode: value,
- //IsReference: isRef,
Reference: ref,
},
}
@@ -587,12 +711,20 @@ func ExtractMapExtensions[PT Buildable[N], N any](
currentLabelNode = en
continue
}
+
+ foundIndex := idx
+ foundContext := ctx
+
// check our valueNode isn't a reference still.
if h, _, refVal := utils.IsNodeRefValue(en); h {
- ref, err := LocateRefNode(en, idx)
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, en, idx)
if ref != nil {
en = ref
referenceValue = refVal
+ if fIdx != nil {
+ foundIndex = fIdx
+ }
+ foundContext = nCtx
if err != nil {
circError = err
}
@@ -610,7 +742,7 @@ func ExtractMapExtensions[PT Buildable[N], N any](
}
}
totalKeys++
- go buildMap(currentLabelNode, en, bChan, eChan, referenceValue)
+ go buildMap(foundContext, currentLabelNode, en, bChan, eChan, referenceValue, foundIndex)
}
completedKeys := 0
@@ -637,11 +769,12 @@ func ExtractMapExtensions[PT Buildable[N], N any](
// The second return value is the yaml.Node found for the 'label' and the third return value is the yaml.Node
// found for the value extracted from the label node.
func ExtractMap[PT Buildable[N], N any](
+ ctx context.Context,
label string,
root *yaml.Node,
idx *index.SpecIndex,
) (orderedmap.Map[KeyReference[string], ValueReference[PT]], *yaml.Node, *yaml.Node, error) {
- return ExtractMapExtensions[PT, N](label, root, idx, false)
+ return ExtractMapExtensions[PT, N](ctx, label, root, idx, false)
}
// ExtractExtensions will extract any 'x-' prefixed key nodes from a root node into a map. Requirements have been pre-cast:
@@ -714,6 +847,14 @@ func AreEqual(l, r Hashable) bool {
if l == nil || r == nil {
return false
}
+ vol := reflect.ValueOf(l)
+ vor := reflect.ValueOf(r)
+
+ if vol.Kind() != reflect.Struct && vor.Kind() != reflect.Struct {
+ if vol.IsNil() || vor.IsNil() {
+ return false
+ }
+ }
return l.Hash() == r.Hash()
}
@@ -734,3 +875,23 @@ func GenerateHashString(v any) string {
}
return fmt.Sprintf(HASH, sha256.Sum256([]byte(fmt.Sprint(v))))
}
+
+// LocateRefEnd will perform a complete lookup for a $ref node. This function searches the entire index for
+// the reference being supplied. If there is a match found, the reference *yaml.Node is returned.
+// the function operates recursively and will keep iterating through references until it finds a non-reference
+// node.
+func LocateRefEnd(ctx context.Context, root *yaml.Node, idx *index.SpecIndex, depth int) (*yaml.Node, *index.SpecIndex, error, context.Context) {
+ depth++
+ if depth > 100 {
+ return nil, nil, fmt.Errorf("reference resolution depth exceeded, possible circular reference"), ctx
+ }
+ ref, fIdx, err, nCtx := LocateRefNodeWithContext(ctx, root, idx)
+ if err != nil {
+ return ref, fIdx, err, nCtx
+ }
+ if rf, _, _ := utils.IsNodeRefValue(ref); rf {
+ return LocateRefEnd(nCtx, ref, fIdx, depth)
+ } else {
+ return ref, fIdx, err, nCtx
+ }
+}
diff --git a/datamodel/low/extraction_functions_test.go b/datamodel/low/extraction_functions_test.go
index dba721d..344c31e 100644
--- a/datamodel/low/extraction_functions_test.go
+++ b/datamodel/low/extraction_functions_test.go
@@ -4,17 +4,20 @@
package low
import (
+ "context"
"crypto/sha256"
"fmt"
+ "golang.org/x/sync/syncmap"
+ "gopkg.in/yaml.v3"
+ "net/url"
"os"
+ "path/filepath"
"strings"
"testing"
"github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/orderedmap"
- "github.com/pb33f/libopenapi/resolver"
"github.com/stretchr/testify/assert"
- "gopkg.in/yaml.v3"
)
func TestFindItemInMap(t *testing.T) {
@@ -64,7 +67,7 @@ func TestLocateRefNode(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- located, _ := LocateRefNode(cNode.Content[0], idx)
+ located, _, _ := LocateRefNode(cNode.Content[0], idx)
assert.NotNil(t, located)
}
@@ -86,7 +89,7 @@ func TestLocateRefNode_BadNode(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- located, err := LocateRefNode(cNode.Content[0], idx)
+ located, _, err := LocateRefNode(cNode.Content[0], idx)
// should both be empty.
assert.Nil(t, located)
@@ -110,7 +113,7 @@ func TestLocateRefNode_Path(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- located, _ := LocateRefNode(cNode.Content[0], idx)
+ located, _, _ := LocateRefNode(cNode.Content[0], idx)
assert.NotNil(t, located)
}
@@ -131,7 +134,7 @@ func TestLocateRefNode_Path_NotFound(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- located, err := LocateRefNode(cNode.Content[0], idx)
+ located, _, err := LocateRefNode(cNode.Content[0], idx)
assert.Nil(t, located)
assert.Error(t, err)
@@ -141,7 +144,7 @@ type pizza struct {
Description NodeReference[string]
}
-func (p *pizza) Build(_, _ *yaml.Node, _ *index.SpecIndex) error {
+func (p *pizza) Build(_ context.Context, _, _ *yaml.Node, _ *index.SpecIndex) error {
return nil
}
@@ -163,7 +166,7 @@ func TestExtractObject(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err := ExtractObject[*pizza]("tags", &cNode, idx)
+ tag, err := ExtractObject[*pizza](context.Background(), "tags", &cNode, idx)
assert.NoError(t, err)
assert.NotNil(t, tag)
assert.Equal(t, "hello pizza", tag.Value.Description.Value)
@@ -187,7 +190,7 @@ func TestExtractObject_Ref(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err := ExtractObject[*pizza]("tags", &cNode, idx)
+ tag, err := ExtractObject[*pizza](context.Background(), "tags", &cNode, idx)
assert.NoError(t, err)
assert.NotNil(t, tag)
assert.Equal(t, "hello pizza", tag.Value.Description.Value)
@@ -213,7 +216,7 @@ func TestExtractObject_DoubleRef(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err := ExtractObject[*pizza]("tags", &cNode, idx)
+ tag, err := ExtractObject[*pizza](context.Background(), "tags", &cNode, idx)
assert.NoError(t, err)
assert.NotNil(t, tag)
assert.Equal(t, "cake time!", tag.Value.Description.Value)
@@ -235,7 +238,7 @@ func TestExtractObject_DoubleRef_Circular(t *testing.T) {
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
// circular references are detected by the resolver, so lets run it!
- resolv := resolver.NewResolver(idx)
+ resolv := index.NewResolver(idx)
assert.Len(t, resolv.CheckForCircularReferences(), 1)
yml = `tags:
@@ -244,7 +247,7 @@ func TestExtractObject_DoubleRef_Circular(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*pizza]("tags", &cNode, idx)
+ _, err := ExtractObject[*pizza](context.Background(), "tags", &cNode, idx)
assert.Error(t, err)
assert.Equal(t, "cake -> loopy -> cake", idx.GetCircularReferences()[0].GenerateJourneyPath())
}
@@ -265,7 +268,7 @@ func TestExtractObject_DoubleRef_Circular_Fail(t *testing.T) {
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
// circular references are detected by the resolver, so lets run it!
- resolv := resolver.NewResolver(idx)
+ resolv := index.NewResolver(idx)
assert.Len(t, resolv.CheckForCircularReferences(), 1)
yml = `tags:
@@ -274,7 +277,7 @@ func TestExtractObject_DoubleRef_Circular_Fail(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*pizza]("tags", &cNode, idx)
+ _, err := ExtractObject[*pizza](context.Background(), "tags", &cNode, idx)
assert.Error(t, err)
}
@@ -295,7 +298,7 @@ func TestExtractObject_DoubleRef_Circular_Direct(t *testing.T) {
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
// circular references are detected by the resolver, so lets run it!
- resolv := resolver.NewResolver(idx)
+ resolv := index.NewResolver(idx)
assert.Len(t, resolv.CheckForCircularReferences(), 1)
yml = `$ref: '#/components/schemas/pizza'`
@@ -303,7 +306,7 @@ func TestExtractObject_DoubleRef_Circular_Direct(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*pizza]("tags", cNode.Content[0], idx)
+ _, err := ExtractObject[*pizza](context.Background(), "tags", cNode.Content[0], idx)
assert.Error(t, err)
assert.Equal(t, "cake -> loopy -> cake", idx.GetCircularReferences()[0].GenerateJourneyPath())
}
@@ -325,7 +328,7 @@ func TestExtractObject_DoubleRef_Circular_Direct_Fail(t *testing.T) {
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
// circular references are detected by the resolver, so lets run it!
- resolv := resolver.NewResolver(idx)
+ resolv := index.NewResolver(idx)
assert.Len(t, resolv.CheckForCircularReferences(), 1)
yml = `$ref: '#/components/schemas/why-did-westworld-have-to-end-so-poorly-ffs'`
@@ -333,7 +336,7 @@ func TestExtractObject_DoubleRef_Circular_Direct_Fail(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*pizza]("tags", cNode.Content[0], idx)
+ _, err := ExtractObject[*pizza](context.Background(), "tags", cNode.Content[0], idx)
assert.Error(t, err)
}
@@ -342,7 +345,7 @@ type test_borked struct {
DontWork int
}
-func (t test_borked) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (t test_borked) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
return fmt.Errorf("I am always going to fail, every thing")
}
@@ -350,7 +353,7 @@ type test_noGood struct {
DontWork int
}
-func (t *test_noGood) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (t *test_noGood) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
return fmt.Errorf("I am always going to fail a core build")
}
@@ -358,7 +361,7 @@ type test_almostGood struct {
AlmostWork NodeReference[int]
}
-func (t *test_almostGood) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (t *test_almostGood) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
return fmt.Errorf("I am always going to fail a build out")
}
@@ -366,7 +369,7 @@ type test_Good struct {
AlmostWork NodeReference[int]
}
-func (t *test_Good) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (t *test_Good) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
return nil
}
@@ -386,7 +389,7 @@ func TestExtractObject_BadLowLevelModel(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*test_noGood]("thing", &cNode, idx)
+ _, err := ExtractObject[*test_noGood](context.Background(), "thing", &cNode, idx)
assert.Error(t, err)
}
@@ -407,7 +410,7 @@ func TestExtractObject_BadBuild(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err := ExtractObject[*test_almostGood]("thing", &cNode, idx)
+ _, err := ExtractObject[*test_almostGood](context.Background(), "thing", &cNode, idx)
assert.Error(t, err)
}
@@ -428,7 +431,7 @@ func TestExtractObject_BadLabel(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- res, err := ExtractObject[*test_almostGood]("ding", &cNode, idx)
+ res, err := ExtractObject[*test_almostGood](context.Background(), "ding", &cNode, idx)
assert.Nil(t, res.Value)
assert.NoError(t, err)
@@ -450,7 +453,7 @@ func TestExtractObject_PathIsCircular(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -461,7 +464,7 @@ func TestExtractObject_PathIsCircular(t *testing.T) {
mErr = yaml.Unmarshal([]byte(yml), &rootNode)
assert.NoError(t, mErr)
- res, err := ExtractObject[*test_Good]("thing", &rootNode, idx)
+ res, err := ExtractObject[*test_Good](context.Background(), "thing", &rootNode, idx)
assert.NotNil(t, res.Value)
assert.Error(t, err) // circular error would have been thrown.
@@ -486,7 +489,7 @@ func TestExtractObject_PathIsCircular_IgnoreErrors(t *testing.T) {
// disable circular ref checking.
idx.SetAllowCircularReferenceResolving(true)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -497,7 +500,7 @@ func TestExtractObject_PathIsCircular_IgnoreErrors(t *testing.T) {
mErr = yaml.Unmarshal([]byte(yml), &rootNode)
assert.NoError(t, mErr)
- res, err := ExtractObject[*test_Good]("thing", &rootNode, idx)
+ res, err := ExtractObject[*test_Good](context.Background(), "thing", &rootNode, idx)
assert.NotNil(t, res.Value)
assert.NoError(t, err) // circular error would have been thrown, but we're ignoring them.
@@ -520,7 +523,7 @@ func TestExtractObjectRaw(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err, _, _ := ExtractObjectRaw[*pizza](nil, cNode.Content[0], idx)
+ tag, err, _, _ := ExtractObjectRaw[*pizza](context.Background(), nil, cNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, tag)
assert.Equal(t, "hello pizza", tag.Description.Value)
@@ -543,7 +546,7 @@ func TestExtractObjectRaw_With_Ref(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err, isRef, rv := ExtractObjectRaw[*pizza](nil, cNode.Content[0], idx)
+ tag, err, isRef, rv := ExtractObjectRaw[*pizza](context.Background(), nil, cNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, tag)
assert.Equal(t, "hello", tag.Description.Value)
@@ -564,7 +567,7 @@ func TestExtractObjectRaw_Ref_Circular(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -573,7 +576,7 @@ func TestExtractObjectRaw_Ref_Circular(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err, _, _ := ExtractObjectRaw[*pizza](nil, cNode.Content[0], idx)
+ tag, err, _, _ := ExtractObjectRaw[*pizza](context.Background(), nil, cNode.Content[0], idx)
assert.Error(t, err)
assert.NotNil(t, tag)
@@ -595,7 +598,7 @@ func TestExtractObjectRaw_RefBroken(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- tag, err, _, _ := ExtractObjectRaw[*pizza](nil, cNode.Content[0], idx)
+ tag, err, _, _ := ExtractObjectRaw[*pizza](context.Background(), nil, cNode.Content[0], idx)
assert.Error(t, err)
assert.Nil(t, tag)
@@ -617,7 +620,7 @@ func TestExtractObjectRaw_Ref_NonBuildable(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err, _, _ := ExtractObjectRaw[*test_noGood](nil, cNode.Content[0], idx)
+ _, err, _, _ := ExtractObjectRaw[*test_noGood](context.Background(), nil, cNode.Content[0], idx)
assert.Error(t, err)
}
@@ -638,7 +641,7 @@ func TestExtractObjectRaw_Ref_AlmostBuildable(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- _, err, _, _ := ExtractObjectRaw[*test_almostGood](nil, cNode.Content[0], idx)
+ _, err, _, _ := ExtractObjectRaw[*test_almostGood](context.Background(), nil, cNode.Content[0], idx)
assert.Error(t, err)
}
@@ -663,7 +666,7 @@ func TestExtractArray(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*pizza]("things", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*pizza](context.Background(), "things", cNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, things)
assert.Equal(t, "one", things[0].Value.Description.Value)
@@ -690,7 +693,7 @@ func TestExtractArray_Ref(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*pizza]("things", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*pizza](context.Background(), "things", cNode.Content[0], idx)
assert.NoError(t, err)
assert.NotNil(t, things)
assert.Equal(t, "one", things[0].Value.Description.Value)
@@ -717,7 +720,7 @@ func TestExtractArray_Ref_Unbuildable(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*test_noGood]("", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_noGood](context.Background(), "", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 0)
}
@@ -736,7 +739,7 @@ func TestExtractArray_Ref_Circular(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -745,9 +748,9 @@ func TestExtractArray_Ref_Circular(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*test_Good]("", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "", cNode.Content[0], idx)
assert.Error(t, err)
- assert.Len(t, things, 0)
+ assert.Len(t, things, 2)
}
func TestExtractArray_Ref_Bad(t *testing.T) {
@@ -764,7 +767,7 @@ func TestExtractArray_Ref_Bad(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -773,7 +776,7 @@ func TestExtractArray_Ref_Bad(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*test_Good]("", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 0)
}
@@ -792,7 +795,7 @@ func TestExtractArray_Ref_Nested(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -802,7 +805,7 @@ func TestExtractArray_Ref_Nested(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*test_Good]("limes", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "limes", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 0)
}
@@ -821,7 +824,7 @@ func TestExtractArray_Ref_Nested_Circular(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -831,7 +834,7 @@ func TestExtractArray_Ref_Nested_Circular(t *testing.T) {
var cNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &cNode)
- things, _, _, err := ExtractArray[*test_Good]("limes", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "limes", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 1)
}
@@ -858,7 +861,7 @@ func TestExtractArray_Ref_Nested_BadRef(t *testing.T) {
var cNode yaml.Node
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractArray[*test_Good]("limes", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "limes", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 0)
}
@@ -877,7 +880,7 @@ func TestExtractArray_Ref_Nested_CircularFlat(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -887,9 +890,9 @@ func TestExtractArray_Ref_Nested_CircularFlat(t *testing.T) {
var cNode yaml.Node
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractArray[*test_Good]("limes", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_Good](context.Background(), "limes", cNode.Content[0], idx)
assert.Error(t, err)
- assert.Len(t, things, 0)
+ assert.Len(t, things, 2)
}
func TestExtractArray_BadBuild(t *testing.T) {
@@ -909,7 +912,30 @@ func TestExtractArray_BadBuild(t *testing.T) {
var cNode yaml.Node
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractArray[*test_noGood]("limes", cNode.Content[0], idx)
+ things, _, _, err := ExtractArray[*test_noGood](context.Background(), "limes", cNode.Content[0], idx)
+ assert.Error(t, err)
+ assert.Len(t, things, 0)
+}
+
+func TestExtractArray_BadRefPropsTupe(t *testing.T) {
+
+ yml := `components:
+ parameters:
+ cakes:
+ limes: cake`
+
+ var idxNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &idxNode)
+ assert.NoError(t, mErr)
+ idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
+
+ yml = `limes:
+ $ref: '#/components/parameters/cakes'`
+
+ var cNode yaml.Node
+ e := yaml.Unmarshal([]byte(yml), &cNode)
+ assert.NoError(t, e)
+ things, _, _, err := ExtractArray[*test_noGood](context.Background(), "limes", cNode.Content[0], idx)
assert.Error(t, err)
assert.Len(t, things, 0)
}
@@ -968,7 +994,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_Good](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_Good](context.Background(), cNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -991,7 +1017,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookupExtensions[*test_Good](cNode.Content[0], idx, true)
+ things, err := ExtractMapNoLookupExtensions[*test_Good](context.Background(), cNode.Content[0], idx, true)
assert.NoError(t, err)
assert.Equal(t, 2, orderedmap.Len(things))
@@ -1024,7 +1050,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookupExtensions[*test_Good](cNode.Content[0], idx, true)
+ things, err := ExtractMapNoLookupExtensions[*test_Good](context.Background(), cNode.Content[0], idx, true)
assert.NoError(t, err)
assert.Equal(t, 4, orderedmap.Len(things))
@@ -1047,7 +1073,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookupExtensions[*test_Good](cNode.Content[0], idx, false)
+ things, err := ExtractMapNoLookupExtensions[*test_Good](context.Background(), cNode.Content[0], idx, false)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1073,7 +1099,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMapExtensions[*test_Good]("one", cNode.Content[0], idx, true)
+ things, _, _, err := ExtractMapExtensions[*test_Good](context.Background(), "one", cNode.Content[0], idx, true)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
}
@@ -1095,7 +1121,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMapExtensions[*test_Good]("one", cNode.Content[0], idx, false)
+ things, _, _, err := ExtractMapExtensions[*test_Good](context.Background(), "one", cNode.Content[0], idx, false)
assert.NoError(t, err)
assert.Zero(t, orderedmap.Len(things))
}
@@ -1120,7 +1146,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_Good](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_Good](context.Background(), cNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1146,7 +1172,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_Good](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_Good](context.Background(), cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1166,7 +1192,7 @@ func TestExtractMapFlatNoLookup_Ref_Circular(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -1178,7 +1204,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_Good](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_Good](context.Background(), cNode.Content[0], idx)
assert.Error(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1204,7 +1230,7 @@ hello:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_noGood](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_noGood](context.Background(), cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1230,7 +1256,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, err := ExtractMapNoLookup[*test_almostGood](cNode.Content[0], idx)
+ things, err := ExtractMapNoLookup[*test_almostGood](context.Background(), cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1253,7 +1279,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1280,7 +1306,7 @@ one:
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1310,7 +1336,7 @@ func TestExtractMapFlat_DoubleRef(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1340,7 +1366,7 @@ func TestExtractMapFlat_DoubleRef_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_almostGood]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_almostGood](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1367,7 +1393,7 @@ func TestExtractMapFlat_DoubleRef_Error_NotFound(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_almostGood]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_almostGood](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1387,7 +1413,7 @@ func TestExtractMapFlat_DoubleRef_Circles(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -1399,7 +1425,7 @@ func TestExtractMapFlat_DoubleRef_Circles(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
@@ -1426,7 +1452,7 @@ func TestExtractMapFlat_Ref_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_almostGood]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_almostGood](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
@@ -1446,7 +1472,7 @@ func TestExtractMapFlat_Ref_Circ_Error(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -1456,7 +1482,7 @@ func TestExtractMapFlat_Ref_Circ_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
}
@@ -1475,7 +1501,7 @@ func TestExtractMapFlat_Ref_Nested_Circ_Error(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -1486,7 +1512,7 @@ func TestExtractMapFlat_Ref_Nested_Circ_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Equal(t, 1, orderedmap.Len(things))
}
@@ -1512,7 +1538,7 @@ func TestExtractMapFlat_Ref_Nested_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
}
@@ -1538,7 +1564,7 @@ func TestExtractMapFlat_BadKey_Ref_Nested_Error(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("not-even-there", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "not-even-there", cNode.Content[0], idx)
assert.NoError(t, err)
assert.Zero(t, orderedmap.Len(things))
}
@@ -1557,7 +1583,7 @@ func TestExtractMapFlat_Ref_Bad(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -1567,43 +1593,11 @@ func TestExtractMapFlat_Ref_Bad(t *testing.T) {
e := yaml.Unmarshal([]byte(yml), &cNode)
assert.NoError(t, e)
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
+ things, _, _, err := ExtractMap[*test_Good](context.Background(), "one", cNode.Content[0], idx)
assert.Error(t, err)
assert.Zero(t, orderedmap.Len(things))
}
-func TestLocateRefNode_RemoteFile(t *testing.T) {
-
- ymlFile := fmt.Sprintf(`components:
- schemas:
- hey:
- $ref: '%s#/components/schemas/hey'`, "remote.yaml")
-
- ymlRemote := `components:
- schemas:
- hey:
- AlmostWork: 999`
-
- _ = os.WriteFile("remote.yaml", []byte(ymlRemote), 0665)
- defer os.Remove("remote.yaml")
-
- ymlLocal := `$ref: '#/components/schemas/hey'`
-
- var idxNode yaml.Node
- mErr := yaml.Unmarshal([]byte(ymlFile), &idxNode) // an empty index.
- assert.NoError(t, mErr)
- idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateOpenAPIIndexConfig())
-
- var cNode yaml.Node
- e := yaml.Unmarshal([]byte(ymlLocal), &cNode)
- assert.NoError(t, e)
-
- things, _, _, err := ExtractMap[*test_Good]("one", cNode.Content[0], idx)
- assert.NoError(t, err)
- assert.Equal(t, 1, orderedmap.Len(things))
-
-}
-
func TestExtractExtensions(t *testing.T) {
yml := `x-bing: ding
@@ -1657,8 +1651,15 @@ func (f test_fresh) Hash() [32]byte {
return sha256.Sum256([]byte(strings.Join(data, "|")))
}
func TestAreEqual(t *testing.T) {
+
+ var hey *test_fresh
+
assert.True(t, AreEqual(test_fresh{val: "hello"}, test_fresh{val: "hello"}))
+ assert.True(t, AreEqual(&test_fresh{val: "hello"}, &test_fresh{val: "hello"}))
assert.False(t, AreEqual(test_fresh{val: "hello"}, test_fresh{val: "goodbye"}))
+ assert.False(t, AreEqual(&test_fresh{val: "hello"}, &test_fresh{val: "goodbye"}))
+ assert.False(t, AreEqual(nil, &test_fresh{val: "goodbye"}))
+ assert.False(t, AreEqual(&test_fresh{val: "hello"}, hey))
assert.False(t, AreEqual(nil, nil))
}
@@ -1712,3 +1713,493 @@ func TestSetReference_nil(t *testing.T) {
SetReference(nil, "#/pigeon/street")
assert.NotEqual(t, "#/pigeon/street", n.GetReference())
}
+
+func TestLocateRefNode_CurrentPathKey_HttpLink(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "http://cakes.com/nice#/components/schemas/thing",
+ },
+ },
+ }
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "http://cakes.com#/components/schemas/thing")
+
+ idx := index.NewSpecIndexWithConfig(&no, index.CreateClosedAPIIndexConfig())
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_HttpLink_Local(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: ".#/components/schemas/thing",
+ },
+ },
+ }
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "http://cakes.com/nice/rice#/components/schemas/thing")
+
+ idx := index.NewSpecIndexWithConfig(&no, index.CreateClosedAPIIndexConfig())
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_HttpLink_RemoteCtx(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "#/components/schemas/thing",
+ },
+ },
+ }
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "https://cakes.com#/components/schemas/thing")
+ idx := index.NewSpecIndexWithConfig(&no, index.CreateClosedAPIIndexConfig())
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_HttpLink_RemoteCtx_WithPath(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "#/components/schemas/thing",
+ },
+ },
+ }
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "https://cakes.com/jazzzy/shoes#/components/schemas/thing")
+ idx := index.NewSpecIndexWithConfig(&no, index.CreateClosedAPIIndexConfig())
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_Path_Link(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "yazzy.yaml#/components/schemas/thing",
+ },
+ },
+ }
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "/jazzzy/shoes.yaml")
+ idx := index.NewSpecIndexWithConfig(&no, index.CreateClosedAPIIndexConfig())
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_Path_URL(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "yazzy.yaml#/components/schemas/thing",
+ },
+ },
+ }
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("https://herbs-and-coffee-in-the-fall.com")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&no, cf)
+ n, i, e, c := LocateRefNodeWithContext(context.Background(), &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_CurrentPathKey_DeeperPath_URL(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "slasshy/mazsshy/yazzy.yaml#/components/schemas/thing",
+ },
+ },
+ }
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("https://herbs-and-coffee-in-the-fall.com/pizza/burgers")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&no, cf)
+ n, i, e, c := LocateRefNodeWithContext(context.Background(), &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_NoExplode(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "components/schemas/thing.yaml",
+ },
+ },
+ }
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("http://smiledfdfdfdfds.com/bikes")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&no, cf)
+ n, i, e, c := LocateRefNodeWithContext(context.Background(), &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_NoExplode_HTTP(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "components/schemas/thing.yaml",
+ },
+ },
+ }
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("http://smilfghfhfhfhfhes.com/bikes")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&no, cf)
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "http://minty-fresh-shoes.com/nice/no.yaml")
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_NoExplode_NoSpecPath(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "components/schemas/thing.yaml",
+ },
+ },
+ }
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("http://smilfghfhfhfhfhes.com/bikes")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&no, cf)
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "no.yaml")
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.NotNil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefNode_DoARealLookup(t *testing.T) {
+
+ no := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "/root.yaml#/components/schemas/Burger",
+ },
+ },
+ }
+
+ b, err := os.ReadFile("../../test_specs/burgershop.openapi.yaml")
+ if err != nil {
+ t.Fatal(err)
+ }
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(b, &rootNode)
+
+ cf := index.CreateClosedAPIIndexConfig()
+ u, _ := url.Parse("http://smilfghfhfhfhfhes.com/bikes")
+ cf.BaseURL = u
+ idx := index.NewSpecIndexWithConfig(&rootNode, cf)
+
+ // fake cache to a lookup for a file that does not exist will work.
+ fakeCache := new(syncmap.Map)
+ fakeCache.Store("/root.yaml#/components/schemas/Burger", &index.Reference{Node: &no, Index: idx})
+ idx.SetCache(fakeCache)
+
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, "/root.yaml#/components/schemas/Burger")
+ n, i, e, c := LocateRefNodeWithContext(ctx, &no, idx)
+ assert.NotNil(t, n)
+ assert.NotNil(t, i)
+ assert.Nil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefEndNoRef_NoName(t *testing.T) {
+
+ r := &yaml.Node{Content: []*yaml.Node{{Kind: yaml.ScalarNode, Value: "$ref"}, {Kind: yaml.ScalarNode, Value: ""}}}
+ n, i, e, c := LocateRefEnd(nil, r, nil, 0)
+ assert.Nil(t, n)
+ assert.Nil(t, i)
+ assert.Error(t, e)
+ assert.Nil(t, c)
+}
+
+func TestLocateRefEndNoRef(t *testing.T) {
+
+ r := &yaml.Node{Content: []*yaml.Node{{Kind: yaml.ScalarNode, Value: "$ref"}, {Kind: yaml.ScalarNode, Value: "cake"}}}
+ n, i, e, c := LocateRefEnd(context.Background(), r, index.NewSpecIndexWithConfig(r, index.CreateClosedAPIIndexConfig()), 0)
+ assert.Nil(t, n)
+ assert.NotNil(t, i)
+ assert.Error(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefEnd_TooDeep(t *testing.T) {
+ r := &yaml.Node{Content: []*yaml.Node{{Kind: yaml.ScalarNode, Value: "$ref"}, {Kind: yaml.ScalarNode, Value: ""}}}
+ n, i, e, c := LocateRefEnd(nil, r, nil, 100)
+ assert.Nil(t, n)
+ assert.Nil(t, i)
+ assert.Error(t, e)
+ assert.Nil(t, c)
+}
+
+func TestLocateRefEnd_Loop(t *testing.T) {
+
+ yml, _ := os.ReadFile("../../test_specs/first.yaml")
+ var bsn yaml.Node
+ _ = yaml.Unmarshal(yml, &bsn)
+
+ cf := index.CreateOpenAPIIndexConfig()
+ cf.BasePath = "../../test_specs"
+
+ localFSConfig := &index.LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+ localFs, _ := index.NewLocalFSWithConfig(localFSConfig)
+ rolo := index.NewRolodex(cf)
+ rolo.AddLocalFS(cf.BasePath, localFs)
+ rolo.SetRootNode(&bsn)
+ rolo.IndexTheRolodex()
+
+ idx := rolo.GetRootIndex()
+ loop := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "third.yaml#/properties/property/properties/statistics",
+ },
+ },
+ }
+
+ wd, _ := os.Getwd()
+ cp, _ := filepath.Abs(filepath.Join(wd, "../../test_specs/first.yaml"))
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, cp)
+ n, i, e, c := LocateRefEnd(ctx, &loop, idx, 0)
+ assert.NotNil(t, n)
+ assert.NotNil(t, i)
+ assert.Nil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefEnd_Loop_WithResolve(t *testing.T) {
+
+ yml, _ := os.ReadFile("../../test_specs/first.yaml")
+ var bsn yaml.Node
+ _ = yaml.Unmarshal(yml, &bsn)
+
+ cf := index.CreateOpenAPIIndexConfig()
+ cf.BasePath = "../../test_specs"
+
+ localFSConfig := &index.LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+ localFs, _ := index.NewLocalFSWithConfig(localFSConfig)
+ rolo := index.NewRolodex(cf)
+ rolo.AddLocalFS(cf.BasePath, localFs)
+ rolo.SetRootNode(&bsn)
+ rolo.IndexTheRolodex()
+ rolo.Resolve()
+ idx := rolo.GetRootIndex()
+ loop := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "third.yaml#/properties/property/properties/statistics",
+ },
+ },
+ }
+
+ wd, _ := os.Getwd()
+ cp, _ := filepath.Abs(filepath.Join(wd, "../../test_specs/first.yaml"))
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, cp)
+ n, i, e, c := LocateRefEnd(ctx, &loop, idx, 0)
+ assert.NotNil(t, n)
+ assert.NotNil(t, i)
+ assert.Nil(t, e)
+ assert.NotNil(t, c)
+}
+
+func TestLocateRefEnd_Empty(t *testing.T) {
+
+ yml, _ := os.ReadFile("../../test_specs/first.yaml")
+ var bsn yaml.Node
+ _ = yaml.Unmarshal(yml, &bsn)
+
+ cf := index.CreateOpenAPIIndexConfig()
+ cf.BasePath = "../../test_specs"
+
+ localFSConfig := &index.LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+ localFs, _ := index.NewLocalFSWithConfig(localFSConfig)
+ rolo := index.NewRolodex(cf)
+ rolo.AddLocalFS(cf.BasePath, localFs)
+ rolo.SetRootNode(&bsn)
+ rolo.IndexTheRolodex()
+ idx := rolo.GetRootIndex()
+ loop := yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Value: "$ref",
+ },
+ {
+ Kind: yaml.ScalarNode,
+ Value: "",
+ },
+ },
+ }
+
+ wd, _ := os.Getwd()
+ cp, _ := filepath.Abs(filepath.Join(wd, "../../test_specs/first.yaml"))
+ ctx := context.WithValue(context.Background(), index.CurrentPathKey, cp)
+ n, i, e, c := LocateRefEnd(ctx, &loop, idx, 0)
+ assert.Nil(t, n)
+ assert.Nil(t, i)
+ assert.Error(t, e)
+ assert.Equal(t, "reference at line 0, column 0 is empty, it cannot be resolved", e.Error())
+ assert.NotNil(t, c)
+}
+
+func TestArray_NotRefNotArray(t *testing.T) {
+
+ yml := ``
+ var idxNode yaml.Node
+ mErr := yaml.Unmarshal([]byte(yml), &idxNode)
+ assert.NoError(t, mErr)
+ idx := index.NewSpecIndexWithConfig(&idxNode, index.CreateClosedAPIIndexConfig())
+
+ yml = `limes:
+ not: array`
+
+ var cNode yaml.Node
+ e := yaml.Unmarshal([]byte(yml), &cNode)
+ assert.NoError(t, e)
+ things, _, _, err := ExtractArray[*test_noGood](context.Background(), "limes", cNode.Content[0], idx)
+ assert.Error(t, err)
+ assert.Equal(t, err.Error(), "array build failed, input is not an array, line 2, column 3")
+ assert.Len(t, things, 0)
+
+}
diff --git a/datamodel/low/model_interfaces.go b/datamodel/low/model_interfaces.go
index e2f8296..7758da1 100644
--- a/datamodel/low/model_interfaces.go
+++ b/datamodel/low/model_interfaces.go
@@ -96,8 +96,6 @@ type OpenAPIParameter interface {
//TODO: this needs to be fixed, move returns to pointers.
type SharedOperations interface {
- //HasDescription
- //HasExternalDocs
GetOperationId() NodeReference[string]
GetExternalDocs() NodeReference[any]
GetDescription() NodeReference[string]
diff --git a/datamodel/low/reference.go b/datamodel/low/reference.go
index da45df2..34b6091 100644
--- a/datamodel/low/reference.go
+++ b/datamodel/low/reference.go
@@ -1,6 +1,7 @@
package low
import (
+ "context"
"fmt"
"github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/utils"
@@ -38,7 +39,7 @@ type IsReferenced interface {
//
// Used by generic functions when automatically building out structs based on yaml.Node inputs.
type Buildable[T any] interface {
- Build(key, value *yaml.Node, idx *index.SpecIndex) error
+ Build(ctx context.Context, key, value *yaml.Node, idx *index.SpecIndex) error
*T
}
@@ -112,6 +113,8 @@ type NodeReference[T any] struct {
// If HasReference is true, then Reference contains the original $ref value.
Reference string
+
+ Context context.Context
}
// KeyReference is a low-level container for key nodes holding a Value of type T. A KeyNode is a pointer to the
diff --git a/datamodel/low/reference_test.go b/datamodel/low/reference_test.go
index 2a33f2e..c1129c7 100644
--- a/datamodel/low/reference_test.go
+++ b/datamodel/low/reference_test.go
@@ -11,7 +11,6 @@ import (
"testing"
"github.com/pb33f/libopenapi/index"
- "github.com/pb33f/libopenapi/resolver"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
)
@@ -124,14 +123,14 @@ func TestIsCircular_LookupFromJourney(t *testing.T) {
yml = `$ref: '#/components/schemas/Something'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
assert.True(t, IsCircular(ref, idx))
}
@@ -157,14 +156,14 @@ func TestIsCircular_LookupFromJourney_Optional(t *testing.T) {
yml = `$ref: '#/components/schemas/Something'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
assert.True(t, IsCircular(ref, idx))
}
@@ -193,14 +192,14 @@ func TestIsCircular_LookupFromLoopPoint(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
assert.True(t, IsCircular(ref, idx))
}
@@ -225,14 +224,14 @@ func TestIsCircular_LookupFromLoopPoint_Optional(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
assert.True(t, IsCircular(ref, idx))
}
@@ -262,7 +261,7 @@ func TestIsCircular_FromRefLookup(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndex(&iNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -298,7 +297,7 @@ func TestIsCircular_FromRefLookup_Optional(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndex(&iNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
@@ -346,14 +345,14 @@ func TestGetCircularReferenceResult_FromJourney(t *testing.T) {
yml = `$ref: '#/components/schemas/Something'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
circ := GetCircularReferenceResult(ref, idx)
assert.NotNil(t, circ)
@@ -380,14 +379,14 @@ func TestGetCircularReferenceResult_FromJourney_Optional(t *testing.T) {
yml = `$ref: '#/components/schemas/Something'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
circ := GetCircularReferenceResult(ref, idx)
assert.NotNil(t, circ)
@@ -418,14 +417,14 @@ func TestGetCircularReferenceResult_FromLoopPoint(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
circ := GetCircularReferenceResult(ref, idx)
assert.NotNil(t, circ)
@@ -452,14 +451,14 @@ func TestGetCircularReferenceResult_FromLoopPoint_Optional(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- ref, err := LocateRefNode(idxNode.Content[0], idx)
+ ref, _, err := LocateRefNode(idxNode.Content[0], idx)
assert.NoError(t, err)
circ := GetCircularReferenceResult(ref, idx)
assert.NotNil(t, circ)
@@ -490,7 +489,7 @@ func TestGetCircularReferenceResult_FromMappedRef(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -522,7 +521,7 @@ func TestGetCircularReferenceResult_FromMappedRef_Optional(t *testing.T) {
yml = `$ref: '#/components/schemas/Nothing'`
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
@@ -545,7 +544,7 @@ func TestGetCircularReferenceResult_NothingFound(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndex(&iNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 0)
diff --git a/datamodel/low/serializing.go b/datamodel/low/serializing.go
deleted file mode 100644
index e58d4df..0000000
--- a/datamodel/low/serializing.go
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
-// SPDX-License-Identifier: MIT
-
-package low
diff --git a/datamodel/low/v2/definitions.go b/datamodel/low/v2/definitions.go
index ab5065a..d56801b 100644
--- a/datamodel/low/v2/definitions.go
+++ b/datamodel/low/v2/definitions.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"sort"
"strings"
@@ -73,7 +74,7 @@ func (s *SecurityDefinitions) FindSecurityDefinition(securityDef string) *low.Va
}
// Build will extract all definitions into SchemaProxy instances.
-func (d *Definitions) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (d *Definitions) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
// TODO: Refactor with orderedmap.TranslatePipeline.
@@ -84,7 +85,7 @@ func (d *Definitions) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
var buildFunc = func(label *yaml.Node, value *yaml.Node, idx *index.SpecIndex,
r chan definitionResult[*base.SchemaProxy], e chan error) {
- obj, err, _, rv := low.ExtractObjectRaw[*base.SchemaProxy](label, value, idx)
+ obj, err, _, rv := low.ExtractObjectRaw[*base.SchemaProxy](ctx, label, value, idx)
if err != nil {
e <- err
}
@@ -137,7 +138,7 @@ func (d *Definitions) Hash() [32]byte {
}
// Build will extract all ParameterDefinitions into Parameter instances.
-func (pd *ParameterDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (pd *ParameterDefinitions) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
errorChan := make(chan error)
resultChan := make(chan definitionResult[*Parameter])
var defLabel *yaml.Node
@@ -145,7 +146,7 @@ func (pd *ParameterDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex)
var buildFunc = func(label *yaml.Node, value *yaml.Node, idx *index.SpecIndex,
r chan definitionResult[*Parameter], e chan error) {
- obj, err, _, rv := low.ExtractObjectRaw[*Parameter](label, value, idx)
+ obj, err, _, rv := low.ExtractObjectRaw[*Parameter](ctx, label, value, idx)
if err != nil {
e <- err
}
@@ -187,7 +188,7 @@ type definitionResult[T any] struct {
}
// Build will extract all ResponsesDefinitions into Response instances.
-func (r *ResponsesDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (r *ResponsesDefinitions) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
errorChan := make(chan error)
resultChan := make(chan definitionResult[*Response])
var defLabel *yaml.Node
@@ -195,7 +196,7 @@ func (r *ResponsesDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) e
var buildFunc = func(label *yaml.Node, value *yaml.Node, idx *index.SpecIndex,
r chan definitionResult[*Response], e chan error) {
- obj, err, _, rv := low.ExtractObjectRaw[*Response](label, value, idx)
+ obj, err, _, rv := low.ExtractObjectRaw[*Response](ctx, label, value, idx)
if err != nil {
e <- err
}
@@ -231,7 +232,7 @@ func (r *ResponsesDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) e
}
// Build will extract all SecurityDefinitions into SecurityScheme instances.
-func (s *SecurityDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (s *SecurityDefinitions) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
errorChan := make(chan error)
resultChan := make(chan definitionResult[*SecurityScheme])
var defLabel *yaml.Node
@@ -240,7 +241,7 @@ func (s *SecurityDefinitions) Build(_, root *yaml.Node, idx *index.SpecIndex) er
var buildFunc = func(label *yaml.Node, value *yaml.Node, idx *index.SpecIndex,
r chan definitionResult[*SecurityScheme], e chan error) {
- obj, err, _, rv := low.ExtractObjectRaw[*SecurityScheme](label, value, idx)
+ obj, err, _, rv := low.ExtractObjectRaw[*SecurityScheme](ctx, label, value, idx)
if err != nil {
e <- err
}
diff --git a/datamodel/low/v2/definitions_test.go b/datamodel/low/v2/definitions_test.go
index dedec28..c648c85 100644
--- a/datamodel/low/v2/definitions_test.go
+++ b/datamodel/low/v2/definitions_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -25,7 +26,7 @@ func TestDefinitions_Schemas_Build_Error(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -44,7 +45,7 @@ func TestDefinitions_Parameters_Build_Error(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -63,7 +64,7 @@ func TestDefinitions_Hash(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Equal(t, "26d23786e6873e1a337f8e9be85f7de1490e4ff6cd303c3b15e593a25a6a149d",
low.GenerateHashString(&n))
@@ -83,7 +84,7 @@ func TestDefinitions_Responses_Build_Error(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -102,7 +103,7 @@ func TestDefinitions_Security_Build_Error(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
diff --git a/datamodel/low/v2/examples.go b/datamodel/low/v2/examples.go
index dfb37c2..bb49a1b 100644
--- a/datamodel/low/v2/examples.go
+++ b/datamodel/low/v2/examples.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -29,7 +30,7 @@ func (e *Examples) FindExample(name string) *low.ValueReference[any] {
}
// Build will extract all examples and will attempt to unmarshal content into a map or slice based on type.
-func (e *Examples) Build(_, root *yaml.Node, _ *index.SpecIndex) error {
+func (e *Examples) Build(_ context.Context, _, root *yaml.Node, _ *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
var keyNode, currNode *yaml.Node
diff --git a/datamodel/low/v2/examples_test.go b/datamodel/low/v2/examples_test.go
index 28eb350..9591ea9 100644
--- a/datamodel/low/v2/examples_test.go
+++ b/datamodel/low/v2/examples_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -27,7 +28,7 @@ nothing: int`
var n Examples
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `anything:
cake: burger
@@ -43,7 +44,7 @@ yes:
var n2 Examples
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/header.go b/datamodel/low/v2/header.go
index 5bb96ce..dc493fc 100644
--- a/datamodel/low/v2/header.go
+++ b/datamodel/low/v2/header.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -51,11 +52,11 @@ func (h *Header) GetExtensions() map[low.KeyReference[string]]low.ValueReference
}
// Build will build out items, extensions and default value from the supplied node.
-func (h *Header) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (h *Header) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
h.Extensions = low.ExtractExtensions(root)
- items, err := low.ExtractObject[*Items](ItemsLabel, root, idx)
+ items, err := low.ExtractObject[*Items](ctx, ItemsLabel, root, idx)
if err != nil {
return err
}
diff --git a/datamodel/low/v2/header_test.go b/datamodel/low/v2/header_test.go
index 3677020..9e3f59d 100644
--- a/datamodel/low/v2/header_test.go
+++ b/datamodel/low/v2/header_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -25,7 +26,7 @@ func TestHeader_Build(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -44,7 +45,7 @@ default:
var n Header
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NotNil(t, n.Default.Value)
assert.Len(t, n.Default.Value, 3)
@@ -65,7 +66,7 @@ func TestHeader_DefaultAsObject(t *testing.T) {
var n Header
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NotNil(t, n.Default.Value)
}
@@ -80,7 +81,7 @@ func TestHeader_NoDefault(t *testing.T) {
var n Header
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Equal(t, 12, n.Minimum.Value)
}
@@ -116,7 +117,7 @@ multipleOf: 12`
var n Header
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `description: head
items:
@@ -148,7 +149,7 @@ pattern: wow
var n2 Header
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/items.go b/datamodel/low/v2/items.go
index 416e0d9..36036bc 100644
--- a/datamodel/low/v2/items.go
+++ b/datamodel/low/v2/items.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -102,11 +103,11 @@ func (i *Items) Hash() [32]byte {
}
// Build will build out items and default value.
-func (i *Items) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (i *Items) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
i.Extensions = low.ExtractExtensions(root)
- items, iErr := low.ExtractObject[*Items](ItemsLabel, root, idx)
+ items, iErr := low.ExtractObject[*Items](ctx, ItemsLabel, root, idx)
if iErr != nil {
return iErr
}
diff --git a/datamodel/low/v2/items_test.go b/datamodel/low/v2/items_test.go
index c34ed4d..a5bf4b3 100644
--- a/datamodel/low/v2/items_test.go
+++ b/datamodel/low/v2/items_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -25,7 +26,7 @@ func TestItems_Build(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -42,7 +43,7 @@ default:
var n Items
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Len(t, n.Default.Value, 2)
assert.Len(t, n.GetExtensions(), 1)
@@ -60,7 +61,7 @@ func TestItems_DefaultAsMap(t *testing.T) {
var n Items
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Len(t, n.Default.Value, 2)
@@ -96,7 +97,7 @@ multipleOf: 12`
var n Items
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `items:
type: int
@@ -127,7 +128,7 @@ pattern: wow
var n2 Items
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/operation.go b/datamodel/low/v2/operation.go
index 17e8f4b..bf4a4c6 100644
--- a/datamodel/low/v2/operation.go
+++ b/datamodel/low/v2/operation.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -36,20 +37,20 @@ type Operation struct {
}
// Build will extract external docs, extensions, parameters, responses and security requirements.
-func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (o *Operation) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
o.Extensions = low.ExtractExtensions(root)
// extract externalDocs
- extDocs, dErr := low.ExtractObject[*base.ExternalDoc](base.ExternalDocsLabel, root, idx)
+ extDocs, dErr := low.ExtractObject[*base.ExternalDoc](ctx, base.ExternalDocsLabel, root, idx)
if dErr != nil {
return dErr
}
o.ExternalDocs = extDocs
// extract parameters
- params, ln, vn, pErr := low.ExtractArray[*Parameter](ParametersLabel, root, idx)
+ params, ln, vn, pErr := low.ExtractArray[*Parameter](ctx, ParametersLabel, root, idx)
if pErr != nil {
return pErr
}
@@ -62,14 +63,14 @@ func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// extract responses
- respBody, respErr := low.ExtractObject[*Responses](ResponsesLabel, root, idx)
+ respBody, respErr := low.ExtractObject[*Responses](ctx, ResponsesLabel, root, idx)
if respErr != nil {
return respErr
}
o.Responses = respBody
// extract security
- sec, sln, svn, sErr := low.ExtractArray[*base.SecurityRequirement](SecurityLabel, root, idx)
+ sec, sln, svn, sErr := low.ExtractArray[*base.SecurityRequirement](ctx, SecurityLabel, root, idx)
if sErr != nil {
return sErr
}
diff --git a/datamodel/low/v2/operation_test.go b/datamodel/low/v2/operation_test.go
index 8fedf0d..87e015f 100644
--- a/datamodel/low/v2/operation_test.go
+++ b/datamodel/low/v2/operation_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -28,7 +29,7 @@ func TestOperation_Build_ExternalDocs(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -47,7 +48,7 @@ func TestOperation_Build_Params(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -66,7 +67,7 @@ func TestOperation_Build_Responses(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -85,7 +86,7 @@ func TestOperation_Build_Security(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -128,7 +129,7 @@ x-smoke: not for a while`
var n Operation
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `summary: a nice day
tags:
@@ -166,7 +167,7 @@ security:
var n2 Operation
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/package_test.go b/datamodel/low/v2/package_test.go
index 9ebb422..5d5ae15 100644
--- a/datamodel/low/v2/package_test.go
+++ b/datamodel/low/v2/package_test.go
@@ -5,8 +5,10 @@ package v2
import (
"fmt"
+ "github.com/pb33f/libopenapi/utils"
+ "os"
+
"github.com/pb33f/libopenapi/datamodel"
- "io/ioutil"
)
// How to create a low-level Swagger / OpenAPI 2 Document from a specification
@@ -15,18 +17,19 @@ func Example_createLowLevelSwaggerDocument() {
// How to create a low-level OpenAPI 2 Document
// load petstore into bytes
- petstoreBytes, _ := ioutil.ReadFile("../../../test_specs/petstorev2.json")
+ petstoreBytes, _ := os.ReadFile("../../../test_specs/petstorev2.json")
// read in specification
info, _ := datamodel.ExtractSpecInfo(petstoreBytes)
// build low-level document model
- document, errors := CreateDocument(info)
+ document, err := CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
// if something went wrong, a slice of errors is returned
- if len(errors) > 0 {
- for i := range errors {
- fmt.Printf("error: %s\n", errors[i].Error())
+ errs := utils.UnwrapErrors(err)
+ if len(errs) > 0 {
+ for i := range errs {
+ fmt.Printf("error: %s\n", errs[i].Error())
}
panic("cannot build document")
}
@@ -43,18 +46,19 @@ func ExampleCreateDocument() {
// How to create a low-level OpenAPI 2 Document
// load petstore into bytes
- petstoreBytes, _ := ioutil.ReadFile("../../../test_specs/petstorev2.json")
+ petstoreBytes, _ := os.ReadFile("../../../test_specs/petstorev2.json")
// read in specification
info, _ := datamodel.ExtractSpecInfo(petstoreBytes)
// build low-level document model
- document, errors := CreateDocument(info)
+ document, err := CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
// if something went wrong, a slice of errors is returned
- if len(errors) > 0 {
- for i := range errors {
- fmt.Printf("error: %s\n", errors[i].Error())
+ errs := utils.UnwrapErrors(err)
+ if len(errs) > 0 {
+ for i := range errs {
+ fmt.Printf("error: %s\n", errs[i].Error())
}
panic("cannot build document")
}
diff --git a/datamodel/low/v2/parameter.go b/datamodel/low/v2/parameter.go
index 2e9490f..96514ab 100644
--- a/datamodel/low/v2/parameter.go
+++ b/datamodel/low/v2/parameter.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -94,18 +95,18 @@ func (p *Parameter) GetExtensions() map[low.KeyReference[string]]low.ValueRefere
}
// Build will extract out extensions, schema, items and default value
-func (p *Parameter) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *Parameter) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Extensions = low.ExtractExtensions(root)
- sch, sErr := base.ExtractSchema(root, idx)
+ sch, sErr := base.ExtractSchema(ctx, root, idx)
if sErr != nil {
return sErr
}
if sch != nil {
p.Schema = *sch
}
- items, iErr := low.ExtractObject[*Items](ItemsLabel, root, idx)
+ items, iErr := low.ExtractObject[*Items](ctx, ItemsLabel, root, idx)
if iErr != nil {
return iErr
}
diff --git a/datamodel/low/v2/parameter_test.go b/datamodel/low/v2/parameter_test.go
index 48b9575..ae2cedd 100644
--- a/datamodel/low/v2/parameter_test.go
+++ b/datamodel/low/v2/parameter_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -25,7 +26,7 @@ func TestParameter_Build(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -44,7 +45,7 @@ func TestParameter_Build_Items(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -63,7 +64,7 @@ func TestParameter_DefaultSlice(t *testing.T) {
var n Parameter
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Len(t, n.Default.Value.([]any), 3)
}
@@ -80,7 +81,7 @@ func TestParameter_DefaultMap(t *testing.T) {
var n Parameter
_ = low.BuildModel(&idxNode, &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Len(t, n.Default.Value.(map[string]any), 2)
}
@@ -95,7 +96,7 @@ func TestParameter_NoDefaultNoError(t *testing.T) {
var n Parameter
_ = low.BuildModel(&idxNode, &n)
- err := n.Build(nil, idxNode.Content[0], idx)
+ err := n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
}
@@ -136,7 +137,7 @@ required: true`
var n Parameter
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `items:
type: int
@@ -174,7 +175,7 @@ allowEmptyValue: true
var n2 Parameter
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/path_item.go b/datamodel/low/v2/path_item.go
index 5046534..944d665 100644
--- a/datamodel/low/v2/path_item.go
+++ b/datamodel/low/v2/path_item.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -48,7 +49,7 @@ func (p *PathItem) GetExtensions() map[low.KeyReference[string]]low.ValueReferen
// Build will extract extensions, parameters and operations for all methods. Every method is handled
// asynchronously, in order to keep things moving quickly for complex operations.
-func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *PathItem) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Extensions = low.ExtractExtensions(root)
@@ -61,7 +62,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
var ops []low.NodeReference[*Operation]
// extract parameters
- params, ln, vn, pErr := low.ExtractArray[*Parameter](ParametersLabel, root, idx)
+ params, ln, vn, pErr := low.ExtractArray[*Parameter](ctx, ParametersLabel, root, idx)
if pErr != nil {
return pErr
}
@@ -158,7 +159,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
opErrorChan := make(chan error)
var buildOpFunc = func(op low.NodeReference[*Operation], ch chan<- bool, errCh chan<- error) {
- er := op.Value.Build(op.KeyNode, op.ValueNode, idx)
+ er := op.Value.Build(ctx, op.KeyNode, op.ValueNode, idx)
if er != nil {
errCh <- er
}
diff --git a/datamodel/low/v2/path_item_test.go b/datamodel/low/v2/path_item_test.go
index 2efff31..aac313e 100644
--- a/datamodel/low/v2/path_item_test.go
+++ b/datamodel/low/v2/path_item_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -25,7 +26,7 @@ func TestPathItem_Build_Params(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -44,7 +45,7 @@ func TestPathItem_Build_MethodFail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -76,7 +77,7 @@ x-winter: is coming`
var n PathItem
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `post:
description: post me there
@@ -103,7 +104,7 @@ parameters:
var n2 PathItem
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/paths.go b/datamodel/low/v2/paths.go
index 9308ca8..2655b6e 100644
--- a/datamodel/low/v2/paths.go
+++ b/datamodel/low/v2/paths.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -58,7 +59,7 @@ func (p *Paths) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions and paths from node.
-func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *Paths) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Extensions = low.ExtractExtensions(root)
@@ -130,7 +131,7 @@ func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
cNode := value.currentNode
path := new(PathItem)
_ = low.BuildModel(pNode, path)
- err := path.Build(cNode, pNode, idx)
+ err := path.Build(ctx, cNode, pNode, idx)
if err != nil {
return pathBuildResult{}, err
}
diff --git a/datamodel/low/v2/paths_test.go b/datamodel/low/v2/paths_test.go
index 48752db..41d26af 100644
--- a/datamodel/low/v2/paths_test.go
+++ b/datamodel/low/v2/paths_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"fmt"
"testing"
@@ -27,7 +28,7 @@ func TestPaths_Build(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -47,7 +48,7 @@ func TestPaths_FindPathAndKey(t *testing.T) {
var n Paths
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
_, k := n.FindPathAndKey("/no/pizza")
assert.Equal(t, "because i'm fat", k.Value.Post.Value.Description.Value)
@@ -74,7 +75,7 @@ x-milk: creamy`
var n Paths
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `x-milk: creamy
/spl/unk:
@@ -94,7 +95,7 @@ x-milk: creamy`
var n2 Paths
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
@@ -123,6 +124,6 @@ func TestPaths_Build_Fail_Many(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
diff --git a/datamodel/low/v2/response.go b/datamodel/low/v2/response.go
index 6198f36..08ca388 100644
--- a/datamodel/low/v2/response.go
+++ b/datamodel/low/v2/response.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -45,11 +46,11 @@ func (r *Response) FindHeader(hType string) *low.ValueReference[*Header] {
}
// Build will extract schema, extensions, examples and headers from node
-func (r *Response) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (r *Response) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
r.Extensions = low.ExtractExtensions(root)
- s, err := base.ExtractSchema(root, idx)
+ s, err := base.ExtractSchema(ctx, root, idx)
if err != nil {
return err
}
@@ -58,14 +59,14 @@ func (r *Response) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// extract examples
- examples, expErr := low.ExtractObject[*Examples](ExamplesLabel, root, idx)
+ examples, expErr := low.ExtractObject[*Examples](ctx, ExamplesLabel, root, idx)
if expErr != nil {
return expErr
}
r.Examples = examples
//extract headers
- headers, lN, kN, err := low.ExtractMap[*Header](HeadersLabel, root, idx)
+ headers, lN, kN, err := low.ExtractMap[*Header](ctx, HeadersLabel, root, idx)
if err != nil {
return err
}
diff --git a/datamodel/low/v2/response_test.go b/datamodel/low/v2/response_test.go
index 4ca1146..786932a 100644
--- a/datamodel/low/v2/response_test.go
+++ b/datamodel/low/v2/response_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -25,7 +26,7 @@ func TestResponse_Build_Schema(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -44,7 +45,7 @@ func TestResponse_Build_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -63,7 +64,7 @@ func TestResponse_Build_Headers(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -87,7 +88,7 @@ x-herbs: missing`
var n Response
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `description: your thing, sir.
examples:
@@ -106,7 +107,7 @@ headers:
var n2 Response
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/responses.go b/datamodel/low/v2/responses.go
index a7699f8..4c76b6c 100644
--- a/datamodel/low/v2/responses.go
+++ b/datamodel/low/v2/responses.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -29,13 +30,13 @@ func (r *Responses) GetExtensions() map[low.KeyReference[string]]low.ValueRefere
}
// Build will extract default value and extensions from node.
-func (r *Responses) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (r *Responses) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
r.Extensions = low.ExtractExtensions(root)
if utils.IsNodeMap(root) {
- codes, err := low.ExtractMapNoLookup[*Response](root, idx)
+ codes, err := low.ExtractMapNoLookup[*Response](ctx, root, idx)
if err != nil {
return err
}
diff --git a/datamodel/low/v2/responses_test.go b/datamodel/low/v2/responses_test.go
index ae38e7f..5cb3c24 100644
--- a/datamodel/low/v2/responses_test.go
+++ b/datamodel/low/v2/responses_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -24,7 +25,7 @@ func TestResponses_Build_Response(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -43,7 +44,7 @@ func TestResponses_Build_Response_Default(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -61,7 +62,7 @@ func TestResponses_Build_WrongType(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -88,7 +89,7 @@ x-tea: warm
var n Responses
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `401:
description: and you are?
@@ -110,7 +111,7 @@ x-tea: warm`
var n2 Responses
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/scopes.go b/datamodel/low/v2/scopes.go
index d2ba4ed..e6395af 100644
--- a/datamodel/low/v2/scopes.go
+++ b/datamodel/low/v2/scopes.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -36,7 +37,7 @@ func (s *Scopes) FindScope(scope string) *low.ValueReference[string] {
}
// Build will extract scope values and extensions from node.
-func (s *Scopes) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (s *Scopes) Build(_ context.Context, _, root *yaml.Node, _ *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
s.Extensions = low.ExtractExtensions(root)
diff --git a/datamodel/low/v2/scopes_test.go b/datamodel/low/v2/scopes_test.go
index 8bcd33b..7c0b521 100644
--- a/datamodel/low/v2/scopes_test.go
+++ b/datamodel/low/v2/scopes_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -23,7 +24,7 @@ x-men: needs a reboot or a refresh`
var n Scopes
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `x-men: needs a reboot or a refresh
pizza: beans
@@ -35,7 +36,7 @@ burgers: chips`
var n2 Scopes
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/security_scheme.go b/datamodel/low/v2/security_scheme.go
index 0a025c4..bdf235d 100644
--- a/datamodel/low/v2/security_scheme.go
+++ b/datamodel/low/v2/security_scheme.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -38,12 +39,12 @@ func (ss *SecurityScheme) GetExtensions() map[low.KeyReference[string]]low.Value
}
// Build will extract extensions and scopes from the node.
-func (ss *SecurityScheme) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (ss *SecurityScheme) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
ss.Extensions = low.ExtractExtensions(root)
- scopes, sErr := low.ExtractObject[*Scopes](ScopesLabel, root, idx)
+ scopes, sErr := low.ExtractObject[*Scopes](ctx, ScopesLabel, root, idx)
if sErr != nil {
return sErr
}
diff --git a/datamodel/low/v2/security_scheme_test.go b/datamodel/low/v2/security_scheme_test.go
index 17907f0..6312dae 100644
--- a/datamodel/low/v2/security_scheme_test.go
+++ b/datamodel/low/v2/security_scheme_test.go
@@ -4,6 +4,7 @@
package v2
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -27,7 +28,7 @@ func TestSecurityScheme_Build_Borked(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -47,7 +48,7 @@ func TestSecurityScheme_Build_Scopes(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 2, orderedmap.Len(n.Scopes.Value.Values))
@@ -72,7 +73,7 @@ x-beer: not for a while`
var n SecurityScheme
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `in: my heart
scopes:
@@ -92,7 +93,7 @@ authorizationUrl: https://pb33f.io
var n2 SecurityScheme
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v2/swagger.go b/datamodel/low/v2/swagger.go
index 872c8de..244cca0 100644
--- a/datamodel/low/v2/swagger.go
+++ b/datamodel/low/v2/swagger.go
@@ -12,16 +12,18 @@
package v2
import (
+ "context"
+ "errors"
"github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
- "github.com/pb33f/libopenapi/resolver"
"gopkg.in/yaml.v3"
+ "path/filepath"
)
// processes a property of a Swagger document asynchronously using bool and error channels for signals.
-type documentFunction func(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error)
+type documentFunction func(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error)
// Swagger represents a high-level Swagger / OpenAPI 2 document. An instance of Swagger is the root of the specification.
type Swagger struct {
@@ -109,6 +111,10 @@ type Swagger struct {
//
// This property is not a part of the OpenAPI schema, this is custom to libopenapi.
SpecInfo *datamodel.SpecInfo
+
+ // Rolodex is a reference to the index.Rolodex instance created when the specification was read.
+ // The rolodex is used to look up references from file systems (local or remote)
+ Rolodex *index.Rolodex
}
// FindExtension locates an extension from the root of the Swagger document.
@@ -123,57 +129,100 @@ func (s *Swagger) GetExtensions() map[low.KeyReference[string]]low.ValueReferenc
// CreateDocumentFromConfig will create a new Swagger document from the provided SpecInfo and DocumentConfiguration.
func CreateDocumentFromConfig(info *datamodel.SpecInfo,
- configuration *datamodel.DocumentConfiguration) (*Swagger, []error) {
+ configuration *datamodel.DocumentConfiguration) (*Swagger, error) {
return createDocument(info, configuration)
}
-// CreateDocument will create a new Swagger document from the provided SpecInfo.
-//
-// Deprecated: Use CreateDocumentFromConfig instead.
-func CreateDocument(info *datamodel.SpecInfo) (*Swagger, []error) {
- return createDocument(info, &datamodel.DocumentConfiguration{
- AllowRemoteReferences: true,
- AllowFileReferences: true,
- })
-}
-
-func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Swagger, []error) {
+func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Swagger, error) {
doc := Swagger{Swagger: low.ValueReference[string]{Value: info.Version, ValueNode: info.RootNode}}
doc.Extensions = low.ExtractExtensions(info.RootNode.Content[0])
- // build an index
- idx := index.NewSpecIndexWithConfig(info.RootNode, &index.SpecIndexConfig{
- BaseURL: config.BaseURL,
- RemoteURLHandler: config.RemoteURLHandler,
- AllowRemoteLookup: config.AllowRemoteReferences,
- AllowFileLookup: config.AllowFileReferences,
- })
- doc.Index = idx
- doc.SpecInfo = info
+ // create an index config and shadow the document configuration.
+ idxConfig := index.CreateClosedAPIIndexConfig()
+ idxConfig.SpecInfo = info
+ idxConfig.IgnoreArrayCircularReferences = config.IgnoreArrayCircularReferences
+ idxConfig.IgnorePolymorphicCircularReferences = config.IgnorePolymorphicCircularReferences
+ idxConfig.AvoidCircularReferenceCheck = true
+ idxConfig.BaseURL = config.BaseURL
+ idxConfig.BasePath = config.BasePath
+ idxConfig.Logger = config.Logger
+ rolodex := index.NewRolodex(idxConfig)
+ rolodex.SetRootNode(info.RootNode)
+ doc.Rolodex = rolodex
- var errors []error
+ // If basePath is provided, add a local filesystem to the rolodex.
+ if idxConfig.BasePath != "" {
+ var cwd string
+ cwd, _ = filepath.Abs(config.BasePath)
+ // if a supplied local filesystem is provided, add it to the rolodex.
+ if config.LocalFS != nil {
+ rolodex.AddLocalFS(cwd, config.LocalFS)
+ } else {
+
+ // create a local filesystem
+ localFSConf := index.LocalFSConfig{
+ BaseDirectory: cwd,
+ IndexConfig: idxConfig,
+ FileFilters: config.FileFilter,
+ }
+ fileFS, _ := index.NewLocalFSWithConfig(&localFSConf)
+ idxConfig.AllowFileLookup = true
+
+ // add the filesystem to the rolodex
+ rolodex.AddLocalFS(cwd, fileFS)
+ }
+ }
+
+ // if base url is provided, add a remote filesystem to the rolodex.
+ if idxConfig.BaseURL != nil {
+
+ // create a remote filesystem
+ remoteFS, _ := index.NewRemoteFSWithConfig(idxConfig)
+ if config.RemoteURLHandler != nil {
+ remoteFS.RemoteHandlerFunc = config.RemoteURLHandler
+ }
+ idxConfig.AllowRemoteLookup = true
+
+ // add to the rolodex
+ rolodex.AddRemoteFS(config.BaseURL.String(), remoteFS)
+
+ }
+
+ doc.Rolodex = rolodex
+
+ var errs []error
+
+ // index all the things!
+ _ = rolodex.IndexTheRolodex()
+
+ // check for circular references
+ if !config.SkipCircularReferenceCheck {
+ rolodex.CheckForCircularReferences()
+ }
+
+ // extract errors
+ roloErrs := rolodex.GetCaughtErrors()
+ if roloErrs != nil {
+ errs = append(errs, roloErrs...)
+ }
+
+ // set the index on the document.
+ doc.Index = rolodex.GetRootIndex()
+ doc.SpecInfo = info
// build out swagger scalar variables.
_ = low.BuildModel(info.RootNode.Content[0], &doc)
+ ctx := context.Background()
+
// extract externalDocs
- extDocs, err := low.ExtractObject[*base.ExternalDoc](base.ExternalDocsLabel, info.RootNode, idx)
+ extDocs, err := low.ExtractObject[*base.ExternalDoc](ctx, base.ExternalDocsLabel, info.RootNode, rolodex.GetRootIndex())
if err != nil {
- errors = append(errors, err)
+ errs = append(errs, err)
}
doc.ExternalDocs = extDocs
- // create resolver and check for circular references.
- resolve := resolver.NewResolver(idx)
- resolvingErrors := resolve.CheckForCircularReferences()
-
- if len(resolvingErrors) > 0 {
- for r := range resolvingErrors {
- errors = append(errors, resolvingErrors[r])
- }
- }
-
extractionFuncs := []documentFunction{
extractInfo,
extractPaths,
@@ -187,7 +236,7 @@ func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfigur
doneChan := make(chan bool)
errChan := make(chan error)
for i := range extractionFuncs {
- go extractionFuncs[i](info.RootNode.Content[0], &doc, idx, doneChan, errChan)
+ go extractionFuncs[i](ctx, info.RootNode.Content[0], &doc, rolodex.GetRootIndex(), doneChan, errChan)
}
completedExtractions := 0
for completedExtractions < len(extractionFuncs) {
@@ -196,11 +245,11 @@ func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfigur
completedExtractions++
case e := <-errChan:
completedExtractions++
- errors = append(errors, e)
+ errs = append(errs, e)
}
}
- return &doc, errors
+ return &doc, errors.Join(errs...)
}
func (s *Swagger) GetExternalDocs() *low.NodeReference[any] {
@@ -211,8 +260,8 @@ func (s *Swagger) GetExternalDocs() *low.NodeReference[any] {
}
}
-func extractInfo(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- info, err := low.ExtractObject[*base.Info](base.InfoLabel, root, idx)
+func extractInfo(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ info, err := low.ExtractObject[*base.Info](ctx, base.InfoLabel, root, idx)
if err != nil {
e <- err
return
@@ -221,8 +270,8 @@ func extractInfo(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- b
c <- true
}
-func extractPaths(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- paths, err := low.ExtractObject[*Paths](PathsLabel, root, idx)
+func extractPaths(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ paths, err := low.ExtractObject[*Paths](ctx, PathsLabel, root, idx)
if err != nil {
e <- err
return
@@ -230,8 +279,8 @@ func extractPaths(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<-
doc.Paths = paths
c <- true
}
-func extractDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- def, err := low.ExtractObject[*Definitions](DefinitionsLabel, root, idx)
+func extractDefinitions(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ def, err := low.ExtractObject[*Definitions](ctx, DefinitionsLabel, root, idx)
if err != nil {
e <- err
return
@@ -239,8 +288,8 @@ func extractDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c c
doc.Definitions = def
c <- true
}
-func extractParamDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- param, err := low.ExtractObject[*ParameterDefinitions](ParametersLabel, root, idx)
+func extractParamDefinitions(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ param, err := low.ExtractObject[*ParameterDefinitions](ctx, ParametersLabel, root, idx)
if err != nil {
e <- err
return
@@ -249,8 +298,8 @@ func extractParamDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex
c <- true
}
-func extractResponsesDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- resp, err := low.ExtractObject[*ResponsesDefinitions](ResponsesLabel, root, idx)
+func extractResponsesDefinitions(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ resp, err := low.ExtractObject[*ResponsesDefinitions](ctx, ResponsesLabel, root, idx)
if err != nil {
e <- err
return
@@ -259,8 +308,8 @@ func extractResponsesDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecI
c <- true
}
-func extractSecurityDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- sec, err := low.ExtractObject[*SecurityDefinitions](SecurityDefinitionsLabel, root, idx)
+func extractSecurityDefinitions(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ sec, err := low.ExtractObject[*SecurityDefinitions](ctx, SecurityDefinitionsLabel, root, idx)
if err != nil {
e <- err
return
@@ -269,8 +318,8 @@ func extractSecurityDefinitions(root *yaml.Node, doc *Swagger, idx *index.SpecIn
c <- true
}
-func extractTags(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- tags, ln, vn, err := low.ExtractArray[*base.Tag](base.TagsLabel, root, idx)
+func extractTags(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ tags, ln, vn, err := low.ExtractArray[*base.Tag](ctx, base.TagsLabel, root, idx)
if err != nil {
e <- err
return
@@ -283,8 +332,8 @@ func extractTags(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- b
c <- true
}
-func extractSecurity(root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
- sec, ln, vn, err := low.ExtractArray[*base.SecurityRequirement](SecurityLabel, root, idx)
+func extractSecurity(ctx context.Context, root *yaml.Node, doc *Swagger, idx *index.SpecIndex, c chan<- bool, e chan<- error) {
+ sec, ln, vn, err := low.ExtractArray[*base.SecurityRequirement](ctx, SecurityLabel, root, idx)
if err != nil {
e <- err
return
diff --git a/datamodel/low/v2/swagger_test.go b/datamodel/low/v2/swagger_test.go
index 1ce4215..c0c5318 100644
--- a/datamodel/low/v2/swagger_test.go
+++ b/datamodel/low/v2/swagger_test.go
@@ -5,7 +5,11 @@ package v2
import (
"fmt"
- "io/ioutil"
+ "github.com/pb33f/libopenapi/index"
+ "github.com/pb33f/libopenapi/utils"
+ "net/http"
+ "net/url"
+ "os"
"testing"
"github.com/pb33f/libopenapi/datamodel"
@@ -19,13 +23,10 @@ func initTest() {
if doc != nil {
return
}
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev2-complete.yaml")
+ data, _ := os.ReadFile("../../../test_specs/petstorev2-complete.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
- doc, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -40,13 +41,10 @@ func initTest() {
}
func BenchmarkCreateDocument(b *testing.B) {
- data, _ := ioutil.ReadFile("../../../test_specs/petstorev2-complete.yaml")
+ data, _ := os.ReadFile("../../../test_specs/petstorev2-complete.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
for i := 0; i < b.N; i++ {
- doc, _ = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ doc, _ = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
}
}
@@ -184,8 +182,8 @@ func TestCreateDocument_ExternalDocsBad(t *testing.T) {
$ref: bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -193,7 +191,7 @@ func TestCreateDocument_ExternalDocsBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 2)
}
func TestCreateDocument_TagsBad(t *testing.T) {
@@ -202,8 +200,8 @@ func TestCreateDocument_TagsBad(t *testing.T) {
$ref: bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -211,7 +209,7 @@ func TestCreateDocument_TagsBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 2)
}
func TestCreateDocument_PathsBad(t *testing.T) {
@@ -224,8 +222,8 @@ func TestCreateDocument_PathsBad(t *testing.T) {
$ref: bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -233,7 +231,7 @@ func TestCreateDocument_PathsBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 2)
}
func TestCreateDocument_SecurityBad(t *testing.T) {
@@ -242,8 +240,8 @@ func TestCreateDocument_SecurityBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -251,7 +249,7 @@ func TestCreateDocument_SecurityBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_SecurityDefinitionsBad(t *testing.T) {
@@ -260,8 +258,8 @@ func TestCreateDocument_SecurityDefinitionsBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -269,7 +267,7 @@ func TestCreateDocument_SecurityDefinitionsBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_ResponsesBad(t *testing.T) {
@@ -278,8 +276,8 @@ func TestCreateDocument_ResponsesBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -287,7 +285,7 @@ func TestCreateDocument_ResponsesBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_ParametersBad(t *testing.T) {
@@ -296,8 +294,8 @@ func TestCreateDocument_ParametersBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -305,7 +303,7 @@ func TestCreateDocument_ParametersBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_DefinitionsBad(t *testing.T) {
@@ -314,8 +312,8 @@ func TestCreateDocument_DefinitionsBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -323,7 +321,7 @@ func TestCreateDocument_DefinitionsBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_InfoBad(t *testing.T) {
@@ -332,8 +330,8 @@ func TestCreateDocument_InfoBad(t *testing.T) {
$ref: `
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocument(info)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
wait := true
for wait {
select {
@@ -341,15 +339,151 @@ func TestCreateDocument_InfoBad(t *testing.T) {
wait = false
}
}
- assert.Len(t, err, 1)
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCircularReferenceError(t *testing.T) {
- data, _ := ioutil.ReadFile("../../../test_specs/swagger-circular-tests.yaml")
+ data, _ := os.ReadFile("../../../test_specs/swagger-circular-tests.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- circDoc, err := CreateDocument(info)
+ circDoc, err := CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
assert.NotNil(t, circDoc)
- assert.Len(t, err, 3)
+ assert.Len(t, utils.UnwrapErrors(err), 3)
}
+
+func TestRolodexLocalFileSystem(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = "../../../test_specs"
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexLocalFileSystem_ProvideNonRolodexFS(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+ baseDir := "../../../test_specs"
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = baseDir
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ cf.LocalFS = os.DirFS(baseDir)
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexLocalFileSystem_ProvideRolodexFS(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+ baseDir := "../../../test_specs"
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = baseDir
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+
+ localFS, lErr := index.NewLocalFSWithConfig(&index.LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: cf.FileFilter,
+ })
+ cf.LocalFS = localFS
+
+ assert.NoError(t, lErr)
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexLocalFileSystem_BadPath(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = "/NOWHERE"
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+
+ baseUrl := "https://raw.githubusercontent.com/pb33f/libopenapi/main/test_specs"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexRemoteFileSystem_BadBase(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+
+ baseUrl := "https://no-no-this-will-not-work-it-just-will-not-get-the-job-done-mate.com"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem_CustomRemote_NoBaseURL(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.RemoteFS, _ = index.NewRemoteFSWithConfig(&index.SpecIndexConfig{})
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem_CustomHttpHandler(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.RemoteURLHandler = http.Get
+ baseUrl := "https://no-no-this-will-not-work-it-just-will-not-get-the-job-done-mate.com"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+
+ pizza := func(url string) (resp *http.Response, err error) {
+ return nil, nil
+ }
+ cf.RemoteURLHandler = pizza
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem_FailRemoteFS(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.RemoteURLHandler = http.Get
+ baseUrl := "https://no-no-this-will-not-work-it-just-will-not-get-the-job-done-mate.com"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+
+ pizza := func(url string) (resp *http.Response, err error) {
+ return nil, nil
+ }
+ cf.RemoteURLHandler = pizza
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
diff --git a/datamodel/low/v3/callback.go b/datamodel/low/v3/callback.go
index 0dc12df..a3996bf 100644
--- a/datamodel/low/v3/callback.go
+++ b/datamodel/low/v3/callback.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -41,7 +42,7 @@ func (cb *Callback) FindExpression(exp string) *low.ValueReference[*PathItem] {
}
// Build will extract extensions, expressions and PathItem objects for Callback
-func (cb *Callback) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (cb *Callback) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
cb.Reference = new(low.Reference)
@@ -59,7 +60,7 @@ func (cb *Callback) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
if strings.HasPrefix(currentCB.Value, "x-") {
continue // ignore extension.
}
- callback, eErr, _, rv := low.ExtractObjectRaw[*PathItem](currentCB, callbackNode, idx)
+ callback, eErr, _, rv := low.ExtractObjectRaw[*PathItem](ctx, currentCB, callbackNode, idx)
if eErr != nil {
return eErr
}
diff --git a/datamodel/low/v3/callback_test.go b/datamodel/low/v3/callback_test.go
index c2c0058..f3561c0 100644
--- a/datamodel/low/v3/callback_test.go
+++ b/datamodel/low/v3/callback_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -35,7 +36,7 @@ func TestCallback_Build_Success(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], nil)
+ err = n.Build(context.Background(), nil, rootNode.Content[0], nil)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(n.Expression.Value))
@@ -67,7 +68,7 @@ func TestCallback_Build_Error(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, rootNode.Content[0], idx)
assert.Error(t, err)
}
@@ -102,7 +103,7 @@ func TestCallback_Build_Using_InlineRef(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, rootNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, 1, orderedmap.Len(n.Expression.Value))
@@ -130,7 +131,7 @@ x-weed: loved`
var n Callback
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `burgers:
description: tasty!
@@ -147,7 +148,7 @@ beer:
var n2 Callback
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/components.go b/datamodel/low/v3/components.go
index ed28390..447c6d1 100644
--- a/datamodel/low/v3/components.go
+++ b/datamodel/low/v3/components.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -142,7 +143,7 @@ func (co *Components) FindCallback(callback string) *low.ValueReference[*Callbac
// Build converts root YAML node containing components to low level model.
// Process each component in parallel.
-func (co *Components) Build(root *yaml.Node, idx *index.SpecIndex) error {
+func (co *Components) Build(ctx context.Context, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
co.Reference = new(low.Reference)
@@ -162,55 +163,55 @@ func (co *Components) Build(root *yaml.Node, idx *index.SpecIndex) error {
}
go func() {
- schemas, err := extractComponentValues[*base.SchemaProxy](SchemasLabel, root, idx)
+ schemas, err := extractComponentValues[*base.SchemaProxy](ctx, SchemasLabel, root, idx)
captureError(err)
co.Schemas = schemas
wg.Done()
}()
go func() {
- parameters, err := extractComponentValues[*Parameter](ParametersLabel, root, idx)
+ parameters, err := extractComponentValues[*Parameter](ctx, ParametersLabel, root, idx)
captureError(err)
co.Parameters = parameters
wg.Done()
}()
go func() {
- responses, err := extractComponentValues[*Response](ResponsesLabel, root, idx)
+ responses, err := extractComponentValues[*Response](ctx, ResponsesLabel, root, idx)
captureError(err)
co.Responses = responses
wg.Done()
}()
go func() {
- examples, err := extractComponentValues[*base.Example](base.ExamplesLabel, root, idx)
+ examples, err := extractComponentValues[*base.Example](ctx, base.ExamplesLabel, root, idx)
captureError(err)
co.Examples = examples
wg.Done()
}()
go func() {
- requestBodies, err := extractComponentValues[*RequestBody](RequestBodiesLabel, root, idx)
+ requestBodies, err := extractComponentValues[*RequestBody](ctx, RequestBodiesLabel, root, idx)
captureError(err)
co.RequestBodies = requestBodies
wg.Done()
}()
go func() {
- headers, err := extractComponentValues[*Header](HeadersLabel, root, idx)
+ headers, err := extractComponentValues[*Header](ctx, HeadersLabel, root, idx)
captureError(err)
co.Headers = headers
wg.Done()
}()
go func() {
- securitySchemes, err := extractComponentValues[*SecurityScheme](SecuritySchemesLabel, root, idx)
+ securitySchemes, err := extractComponentValues[*SecurityScheme](ctx, SecuritySchemesLabel, root, idx)
captureError(err)
co.SecuritySchemes = securitySchemes
wg.Done()
}()
go func() {
- links, err := extractComponentValues[*Link](LinksLabel, root, idx)
+ links, err := extractComponentValues[*Link](ctx, LinksLabel, root, idx)
captureError(err)
co.Links = links
wg.Done()
}()
go func() {
- callbacks, err := extractComponentValues[*Callback](CallbacksLabel, root, idx)
+ callbacks, err := extractComponentValues[*Callback](ctx, CallbacksLabel, root, idx)
captureError(err)
co.Callbacks = callbacks
wg.Done()
@@ -223,7 +224,7 @@ func (co *Components) Build(root *yaml.Node, idx *index.SpecIndex) error {
// extractComponentValues converts all the YAML nodes of a component type to
// low level model.
// Process each node in parallel.
-func extractComponentValues[T low.Buildable[N], N any](label string, root *yaml.Node, idx *index.SpecIndex) (low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[T]]], error) {
+func extractComponentValues[T low.Buildable[N], N any](ctx context.Context, label string, root *yaml.Node, idx *index.SpecIndex) (low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[T]]], error) {
var emptyResult low.NodeReference[orderedmap.Map[low.KeyReference[string], low.ValueReference[T]]]
_, nodeLabel, nodeValue := utils.FindKeyNodeFullTop(label, root.Content)
if nodeValue == nil {
@@ -289,7 +290,7 @@ func extractComponentValues[T low.Buildable[N], N any](label string, root *yaml.
// TODO: check circular crazy on this. It may explode
var err error
if h, _, _ := utils.IsNodeRefValue(node); h && label != SchemasLabel {
- node, err = low.LocateRefNode(node, idx)
+ node, _, err = low.LocateRefNode(node, idx)
}
if err != nil {
return componentBuildResult[T]{}, err
@@ -297,7 +298,7 @@ func extractComponentValues[T low.Buildable[N], N any](label string, root *yaml.
// build.
_ = low.BuildModel(node, n)
- err = n.Build(currentLabel, node, idx)
+ err = n.Build(ctx, currentLabel, node, idx)
if err != nil {
return componentBuildResult[T]{}, err
}
diff --git a/datamodel/low/v3/components_test.go b/datamodel/low/v3/components_test.go
index 18951d7..d69a8e0 100644
--- a/datamodel/low/v3/components_test.go
+++ b/datamodel/low/v3/components_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"fmt"
"testing"
@@ -66,7 +67,6 @@ var testComponentsYaml = `
description: eighteen of many`
func TestComponents_Build_Success(t *testing.T) {
-
var idxNode yaml.Node
mErr := yaml.Unmarshal([]byte(testComponentsYaml), &idxNode)
assert.NoError(t, mErr)
@@ -76,7 +76,7 @@ func TestComponents_Build_Success(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "one of many", n.FindSchema("one").Value.Schema().Description.Value)
@@ -102,11 +102,9 @@ func TestComponents_Build_Success(t *testing.T) {
assert.Equal(t, "7add1a6c63a354b1a8ffe22552c213fe26d1229beb0b0cbe7c7ca06e63f9a364",
low.GenerateHashString(&n))
-
}
func TestComponents_Build_Success_Skip(t *testing.T) {
-
yml := `components:`
var idxNode yaml.Node
@@ -118,13 +116,11 @@ func TestComponents_Build_Success_Skip(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
-
}
func TestComponents_Build_Fail(t *testing.T) {
-
yml := `
parameters:
schema:
@@ -139,13 +135,11 @@ func TestComponents_Build_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
-
}
func TestComponents_Build_ParameterFail(t *testing.T) {
-
yml := `
parameters:
pizza:
@@ -161,9 +155,8 @@ func TestComponents_Build_ParameterFail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
-
}
// Test parse failure among many parameters.
@@ -191,12 +184,11 @@ func TestComponents_Build_ParameterFail_Many(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
func TestComponents_Build_Fail_TypeFail(t *testing.T) {
-
yml := `
parameters:
- schema:
@@ -211,12 +203,11 @@ func TestComponents_Build_Fail_TypeFail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.Error(t, err)
}
func TestComponents_Build_ExtensionTest(t *testing.T) {
-
yml := `x-curry: seagull
headers:
x-curry-gull: vinadloo`
@@ -230,14 +221,12 @@ headers:
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "seagull", n.FindExtension("x-curry").Value)
-
}
func TestComponents_Build_HashEmpty(t *testing.T) {
-
yml := `x-curry: seagull`
var idxNode yaml.Node
@@ -249,11 +238,10 @@ func TestComponents_Build_HashEmpty(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(idxNode.Content[0], idx)
+ err = n.Build(context.Background(), idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "seagull", n.FindExtension("x-curry").Value)
assert.Len(t, n.GetExtensions(), 1)
assert.Equal(t, "9cf2c6ab3f9ff7e5231fcb391c8af5c47406711d2ca366533f21a8bb2f67edfe",
low.GenerateHashString(&n))
-
}
diff --git a/datamodel/low/v3/create_document.go b/datamodel/low/v3/create_document.go
index f363481..b70a053 100644
--- a/datamodel/low/v3/create_document.go
+++ b/datamodel/low/v3/create_document.go
@@ -1,8 +1,9 @@
package v3
import (
+ "context"
"errors"
- "os"
+ "path/filepath"
"sync"
"github.com/pb33f/libopenapi/datamodel"
@@ -10,7 +11,6 @@ import (
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
"github.com/pb33f/libopenapi/orderedmap"
- "github.com/pb33f/libopenapi/resolver"
"github.com/pb33f/libopenapi/utils"
)
@@ -18,70 +18,97 @@ import (
//
// Deprecated: Use CreateDocumentFromConfig instead. This function will be removed in a later version, it
// defaults to allowing file and remote references, and does not support relative file references.
-func CreateDocument(info *datamodel.SpecInfo) (*Document, []error) {
- config := datamodel.DocumentConfiguration{
- AllowFileReferences: true,
- AllowRemoteReferences: true,
- }
- return createDocument(info, &config)
+func CreateDocument(info *datamodel.SpecInfo) (*Document, error) {
+ return createDocument(info, datamodel.NewDocumentConfiguration())
}
// CreateDocumentFromConfig Create a new document from the provided SpecInfo and DocumentConfiguration pointer.
-func CreateDocumentFromConfig(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, []error) {
+func CreateDocumentFromConfig(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, error) {
return createDocument(info, config)
}
-func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, []error) {
+func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfiguration) (*Document, error) {
_, labelNode, versionNode := utils.FindKeyNodeFull(OpenAPILabel, info.RootNode.Content)
var version low.NodeReference[string]
if versionNode == nil {
- return nil, []error{errors.New("no openapi version/tag found, cannot create document")}
+ return nil, errors.New("no openapi version/tag found, cannot create document")
}
version = low.NodeReference[string]{Value: versionNode.Value, KeyNode: labelNode, ValueNode: versionNode}
doc := Document{Version: version}
- // get current working directory as a basePath
- cwd, _ := os.Getwd()
+ // create an index config and shadow the document configuration.
+ idxConfig := index.CreateClosedAPIIndexConfig()
+ idxConfig.SpecInfo = info
+ idxConfig.IgnoreArrayCircularReferences = config.IgnoreArrayCircularReferences
+ idxConfig.IgnorePolymorphicCircularReferences = config.IgnorePolymorphicCircularReferences
+ idxConfig.AvoidCircularReferenceCheck = true
+ idxConfig.BaseURL = config.BaseURL
+ idxConfig.BasePath = config.BasePath
+ idxConfig.Logger = config.Logger
+ rolodex := index.NewRolodex(idxConfig)
+ rolodex.SetRootNode(info.RootNode)
+ doc.Rolodex = rolodex
- // If basePath is provided override it
- if config.BasePath != "" {
- cwd = config.BasePath
- }
- // build an index
- idx := index.NewSpecIndexWithConfig(info.RootNode, &index.SpecIndexConfig{
- BaseURL: config.BaseURL,
- RemoteURLHandler: config.RemoteURLHandler,
- BasePath: cwd,
- AllowFileLookup: config.AllowFileReferences,
- AllowRemoteLookup: config.AllowRemoteReferences,
- AvoidBuildIndex: config.AvoidIndexBuild,
- })
- doc.Index = idx
+ // If basePath is provided, add a local filesystem to the rolodex.
+ if idxConfig.BasePath != "" || config.AllowFileReferences {
+ var cwd string
+ cwd, _ = filepath.Abs(config.BasePath)
+ // if a supplied local filesystem is provided, add it to the rolodex.
+ if config.LocalFS != nil {
+ rolodex.AddLocalFS(cwd, config.LocalFS)
+ } else {
- var errs []error
+ // create a local filesystem
+ localFSConf := index.LocalFSConfig{
+ BaseDirectory: cwd,
+ IndexConfig: idxConfig,
+ FileFilters: config.FileFilter,
+ }
- errs = idx.GetReferenceIndexErrors()
+ fileFS, _ := index.NewLocalFSWithConfig(&localFSConf)
+ idxConfig.AllowFileLookup = true
- // create resolver and check for circular references.
- resolve := resolver.NewResolver(idx)
-
- // if configured, ignore circular references in arrays and polymorphic schemas
- if config.IgnoreArrayCircularReferences {
- resolve.IgnoreArrayCircularReferences()
- }
- if config.IgnorePolymorphicCircularReferences {
- resolve.IgnorePolymorphicCircularReferences()
- }
-
- // check for circular references.
- resolvingErrors := resolve.CheckForCircularReferences()
-
- if len(resolvingErrors) > 0 {
- for r := range resolvingErrors {
- errs = append(errs, resolvingErrors[r])
+ // add the filesystem to the rolodex
+ rolodex.AddLocalFS(cwd, fileFS)
}
}
+ // if base url is provided, add a remote filesystem to the rolodex.
+ if idxConfig.BaseURL != nil || config.AllowRemoteReferences {
+ // create a remote filesystem
+ remoteFS, _ := index.NewRemoteFSWithConfig(idxConfig)
+ if config.RemoteURLHandler != nil {
+ remoteFS.RemoteHandlerFunc = config.RemoteURLHandler
+ }
+ idxConfig.AllowRemoteLookup = true
+
+ // add to the rolodex
+ u := "default"
+ if config.BaseURL != nil {
+ u = config.BaseURL.String()
+ }
+ rolodex.AddRemoteFS(u, remoteFS)
+ }
+
+ // index the rolodex
+ var errs []error
+
+ // index all the things.
+ _ = rolodex.IndexTheRolodex()
+
+ // check for circular references
+ if !config.SkipCircularReferenceCheck {
+ rolodex.CheckForCircularReferences()
+ }
+
+ // extract errors
+ roloErrs := rolodex.GetCaughtErrors()
+ if roloErrs != nil {
+ errs = append(errs, roloErrs...)
+ }
+
+ // set root index.
+ doc.Index = rolodex.GetRootIndex()
var wg sync.WaitGroup
doc.Extensions = low.ExtractExtensions(info.RootNode.Content[0])
@@ -94,17 +121,17 @@ func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfigur
}
}
- runExtraction := func(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex,
- runFunc func(i *datamodel.SpecInfo, d *Document, idx *index.SpecIndex) error,
+ runExtraction := func(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex,
+ runFunc func(ctx context.Context, i *datamodel.SpecInfo, d *Document, idx *index.SpecIndex) error,
ers *[]error,
wg *sync.WaitGroup,
) {
- if er := runFunc(info, doc, idx); er != nil {
+ if er := runFunc(ctx, info, doc, idx); er != nil {
*ers = append(*ers, er)
}
wg.Done()
}
- extractionFuncs := []func(i *datamodel.SpecInfo, d *Document, idx *index.SpecIndex) error{
+ extractionFuncs := []func(ctx context.Context, i *datamodel.SpecInfo, d *Document, idx *index.SpecIndex) error{
extractInfo,
extractServers,
extractTags,
@@ -115,28 +142,30 @@ func createDocument(info *datamodel.SpecInfo, config *datamodel.DocumentConfigur
extractWebhooks,
}
+ ctx := context.Background()
+
wg.Add(len(extractionFuncs))
for _, f := range extractionFuncs {
- go runExtraction(info, &doc, idx, f, &errs, &wg)
+ go runExtraction(ctx, info, &doc, rolodex.GetRootIndex(), f, &errs, &wg)
}
wg.Wait()
- return &doc, errs
+ return &doc, errors.Join(errs...)
}
-func extractInfo(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+func extractInfo(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
_, ln, vn := utils.FindKeyNodeFullTop(base.InfoLabel, info.RootNode.Content[0].Content)
if vn != nil {
ir := base.Info{}
_ = low.BuildModel(vn, &ir)
- _ = ir.Build(ln, vn, idx)
+ _ = ir.Build(ctx, ln, vn, idx)
nr := low.NodeReference[*base.Info]{Value: &ir, ValueNode: vn, KeyNode: ln}
doc.Info = nr
}
return nil
}
-func extractSecurity(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
- sec, ln, vn, err := low.ExtractArray[*base.SecurityRequirement](SecurityLabel, info.RootNode.Content[0], idx)
+func extractSecurity(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+ sec, ln, vn, err := low.ExtractArray[*base.SecurityRequirement](ctx, SecurityLabel, info.RootNode.Content[0], idx)
if err != nil {
return err
}
@@ -150,8 +179,8 @@ func extractSecurity(info *datamodel.SpecInfo, doc *Document, idx *index.SpecInd
return nil
}
-func extractExternalDocs(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
- extDocs, dErr := low.ExtractObject[*base.ExternalDoc](base.ExternalDocsLabel, info.RootNode.Content[0], idx)
+func extractExternalDocs(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+ extDocs, dErr := low.ExtractObject[*base.ExternalDoc](ctx, base.ExternalDocsLabel, info.RootNode.Content[0], idx)
if dErr != nil {
return dErr
}
@@ -159,12 +188,12 @@ func extractExternalDocs(info *datamodel.SpecInfo, doc *Document, idx *index.Spe
return nil
}
-func extractComponents(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+func extractComponents(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
_, ln, vn := utils.FindKeyNodeFullTop(ComponentsLabel, info.RootNode.Content[0].Content)
if vn != nil {
ir := Components{}
_ = low.BuildModel(vn, &ir)
- err := ir.Build(vn, idx)
+ err := ir.Build(ctx, vn, idx)
if err != nil {
return err
}
@@ -174,7 +203,7 @@ func extractComponents(info *datamodel.SpecInfo, doc *Document, idx *index.SpecI
return nil
}
-func extractServers(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+func extractServers(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
_, ln, vn := utils.FindKeyNodeFull(ServersLabel, info.RootNode.Content[0].Content)
if vn != nil {
if utils.IsNodeArray(vn) {
@@ -183,7 +212,7 @@ func extractServers(info *datamodel.SpecInfo, doc *Document, idx *index.SpecInde
if utils.IsNodeMap(srvN) {
srvr := Server{}
_ = low.BuildModel(srvN, &srvr)
- _ = srvr.Build(ln, srvN, idx)
+ _ = srvr.Build(ctx, ln, srvN, idx)
servers = append(servers, low.ValueReference[*Server]{
Value: &srvr,
ValueNode: srvN,
@@ -200,7 +229,7 @@ func extractServers(info *datamodel.SpecInfo, doc *Document, idx *index.SpecInde
return nil
}
-func extractTags(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+func extractTags(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
_, ln, vn := utils.FindKeyNodeFull(base.TagsLabel, info.RootNode.Content[0].Content)
if vn != nil {
if utils.IsNodeArray(vn) {
@@ -209,7 +238,7 @@ func extractTags(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex)
if utils.IsNodeMap(tagN) {
tag := base.Tag{}
_ = low.BuildModel(tagN, &tag)
- if err := tag.Build(ln, tagN, idx); err != nil {
+ if err := tag.Build(ctx, ln, tagN, idx); err != nil {
return err
}
tags = append(tags, low.ValueReference[*base.Tag]{
@@ -228,11 +257,11 @@ func extractTags(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex)
return nil
}
-func extractPaths(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+func extractPaths(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
_, ln, vn := utils.FindKeyNodeFull(PathsLabel, info.RootNode.Content[0].Content)
if vn != nil {
ir := Paths{}
- err := ir.Build(ln, vn, idx)
+ err := ir.Build(ctx, ln, vn, idx)
if err != nil {
return err
}
@@ -242,8 +271,8 @@ func extractPaths(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex)
return nil
}
-func extractWebhooks(info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
- hooks, hooksL, hooksN, eErr := low.ExtractMap[*PathItem](WebhooksLabel, info.RootNode, idx)
+func extractWebhooks(ctx context.Context, info *datamodel.SpecInfo, doc *Document, idx *index.SpecIndex) error {
+ hooks, hooksL, hooksN, eErr := low.ExtractMap[*PathItem](ctx, WebhooksLabel, info.RootNode, idx)
if eErr != nil {
return eErr
}
diff --git a/datamodel/low/v3/create_document_test.go b/datamodel/low/v3/create_document_test.go
index 9ed27d6..47d74cb 100644
--- a/datamodel/low/v3/create_document_test.go
+++ b/datamodel/low/v3/create_document_test.go
@@ -2,11 +2,15 @@ package v3
import (
"fmt"
+ "github.com/pb33f/libopenapi/index"
+ "github.com/pb33f/libopenapi/utils"
+ "log/slog"
+ "net/http"
+ "net/url"
"os"
"testing"
"github.com/pb33f/libopenapi/datamodel"
- "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/orderedmap"
"github.com/stretchr/testify/assert"
)
@@ -19,9 +23,9 @@ func initTest() {
}
data, _ := os.ReadFile("../../../test_specs/burgershop.openapi.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- var err []error
+ var err error
// deprecated function test.
- doc, err = CreateDocument(info)
+ doc, err = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
if err != nil {
panic("broken something")
}
@@ -31,10 +35,7 @@ func BenchmarkCreateDocument(b *testing.B) {
data, _ := os.ReadFile("../../../test_specs/burgershop.openapi.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
for i := 0; i < b.N; i++ {
- doc, _ = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ doc, _ = CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
}
}
@@ -42,47 +43,145 @@ func BenchmarkCreateDocument_Circular(b *testing.B) {
data, _ := os.ReadFile("../../../test_specs/circular-tests.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
for i := 0; i < b.N; i++ {
- _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- if err != nil {
- panic("this should not error")
- }
- }
-}
-
-func BenchmarkCreateDocument_k8s(b *testing.B) {
-
- data, _ := os.ReadFile("../../../test_specs/k8s.json")
- info, _ := datamodel.ExtractSpecInfo(data)
-
- for i := 0; i < b.N; i++ {
-
- _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- if err != nil {
- panic("this should not error")
+ _, err := CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
+ if err == nil {
+ panic("this should error, it has circular references")
}
}
}
func TestCircularReferenceError(t *testing.T) {
-
data, _ := os.ReadFile("../../../test_specs/circular-tests.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- circDoc, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ circDoc, err := CreateDocumentFromConfig(info, datamodel.NewDocumentConfiguration())
+
assert.NotNil(t, circDoc)
- assert.Len(t, err, 3)
+ assert.Error(t, err)
+
+ assert.Len(t, utils.UnwrapErrors(err), 3)
+}
+
+func TestRolodexLocalFileSystem(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = "../../../test_specs"
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexLocalFileSystem_ProvideNonRolodexFS(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+ baseDir := "../../../test_specs"
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = baseDir
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ cf.LocalFS = os.DirFS(baseDir)
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexLocalFileSystem_ProvideRolodexFS(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+ baseDir := "../../../test_specs"
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = baseDir
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+
+ localFS, lErr := index.NewLocalFSWithConfig(&index.LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: cf.FileFilter,
+ })
+ cf.LocalFS = localFS
+
+ assert.NoError(t, lErr)
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexLocalFileSystem_BadPath(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = "/NOWHERE"
+ cf.FileFilter = []string{"first.yaml", "second.yaml", "third.yaml"}
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ }))
+
+ baseUrl := "https://raw.githubusercontent.com/pb33f/libopenapi/main/test_specs"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.NoError(t, err)
+}
+
+func TestRolodexRemoteFileSystem_BadBase(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+
+ baseUrl := "https://no-no-this-will-not-work-it-just-will-not-get-the-job-done-mate.com"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem_CustomRemote_NoBaseURL(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.RemoteFS, _ = index.NewRemoteFSWithConfig(&index.SpecIndexConfig{})
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
+}
+
+func TestRolodexRemoteFileSystem_CustomHttpHandler(t *testing.T) {
+ data, _ := os.ReadFile("../../../test_specs/first.yaml")
+ info, _ := datamodel.ExtractSpecInfo(data)
+
+ cf := datamodel.NewDocumentConfiguration()
+ cf.RemoteURLHandler = http.Get
+ baseUrl := "https://no-no-this-will-not-work-it-just-will-not-get-the-job-done-mate.com"
+ u, _ := url.Parse(baseUrl)
+ cf.BaseURL = u
+
+ pizza := func(url string) (resp *http.Response, err error) {
+ return nil, nil
+ }
+ cf.RemoteURLHandler = pizza
+ lDoc, err := CreateDocumentFromConfig(info, cf)
+ assert.NotNil(t, lDoc)
+ assert.Error(t, err)
}
func TestCircularReference_IgnoreArray(t *testing.T) {
-
spec := `openapi: 3.1.0
components:
schemas:
@@ -102,16 +201,13 @@ components:
info, _ := datamodel.ExtractSpecInfo([]byte(spec))
circDoc, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
IgnoreArrayCircularReferences: true,
})
assert.NotNil(t, circDoc)
- assert.Len(t, err, 0)
+ assert.Len(t, utils.UnwrapErrors(err), 0)
}
func TestCircularReference_IgnorePoly(t *testing.T) {
-
spec := `openapi: 3.1.0
components:
schemas:
@@ -131,12 +227,10 @@ components:
info, _ := datamodel.ExtractSpecInfo([]byte(spec))
circDoc, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
IgnorePolymorphicCircularReferences: true,
})
assert.NotNil(t, circDoc)
- assert.Len(t, err, 0)
+ assert.Len(t, utils.UnwrapErrors(err), 0)
}
func BenchmarkCreateDocument_Stripe(b *testing.B) {
@@ -144,10 +238,7 @@ func BenchmarkCreateDocument_Stripe(b *testing.B) {
info, _ := datamodel.ExtractSpecInfo(data)
for i := 0; i < b.N; i++ {
- _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
if err != nil {
panic("this should not error")
}
@@ -158,10 +249,7 @@ func BenchmarkCreateDocument_Petstore(b *testing.B) {
data, _ := os.ReadFile("../../../test_specs/petstorev3.json")
info, _ := datamodel.ExtractSpecInfo(data)
for i := 0; i < b.N; i++ {
- _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ _, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
if err != nil {
panic("this should not error")
}
@@ -169,15 +257,10 @@ func BenchmarkCreateDocument_Petstore(b *testing.B) {
}
func TestCreateDocumentStripe(t *testing.T) {
-
data, _ := os.ReadFile("../../../test_specs/stripe.yaml")
info, _ := datamodel.ExtractSpecInfo(data)
- d, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- BasePath: "/here",
- })
- assert.Len(t, err, 3)
+ d, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Len(t, utils.UnwrapErrors(err), 3)
assert.Equal(t, "3.0.0", d.Version.Value)
assert.Equal(t, "Stripe API", d.Info.Value.Title.Value)
@@ -234,16 +317,14 @@ func TestCreateDocument_WebHooks(t *testing.T) {
}
func TestCreateDocument_WebHooks_Error(t *testing.T) {
- yml := `webhooks:
+ yml := `openapi: 3.0
+webhooks:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Len(t, utils.UnwrapErrors(err), 1)
}
func TestCreateDocument_Servers(t *testing.T) {
@@ -304,7 +385,6 @@ func TestCreateDocument_Tags(t *testing.T) {
// this is why we will need a higher level API to this model, this looks cool and all, but dude.
assert.Equal(t, "now?", extension.Value.(map[string]interface{})["ok"].([]interface{})[0].(map[string]interface{})["what"])
}
-
}
/// tag2
@@ -314,7 +394,6 @@ func TestCreateDocument_Tags(t *testing.T) {
assert.Equal(t, "https://pb33f.io", doc.Tags.Value[1].Value.ExternalDocs.Value.URL.Value)
assert.NotEmpty(t, doc.Tags.Value[1].Value.ExternalDocs.Value.URL.Value)
assert.Len(t, doc.Tags.Value[1].Value.Extensions, 0)
-
}
func TestCreateDocument_Paths(t *testing.T) {
@@ -439,7 +518,6 @@ func TestCreateDocument_Paths(t *testing.T) {
assert.NotNil(t, servers)
assert.Len(t, servers, 1)
assert.Equal(t, "https://pb33f.io", servers[0].Value.URL.Value)
-
}
func TestCreateDocument_Components_Schemas(t *testing.T) {
@@ -465,7 +543,6 @@ func TestCreateDocument_Components_Schemas(t *testing.T) {
p := fries.Value.Schema().FindProperty("favoriteDrink")
assert.Equal(t, "a frosty cold beverage can be coke or sprite",
p.Value.Schema().Description.Value)
-
}
func TestCreateDocument_Components_SecuritySchemes(t *testing.T) {
@@ -494,7 +571,6 @@ func TestCreateDocument_Components_SecuritySchemes(t *testing.T) {
readScope = oAuth.Flows.Value.AuthorizationCode.Value.FindScope("write:burgers")
assert.NotNil(t, readScope)
assert.Equal(t, "modify burgers and stuff", readScope.Value)
-
}
func TestCreateDocument_Components_Responses(t *testing.T) {
@@ -507,7 +583,6 @@ func TestCreateDocument_Components_Responses(t *testing.T) {
assert.NotNil(t, dressingResponse.Value)
assert.Equal(t, "all the dressings for a burger.", dressingResponse.Value.Description.Value)
assert.Len(t, dressingResponse.Value.Content.Value, 1)
-
}
func TestCreateDocument_Components_Examples(t *testing.T) {
@@ -594,7 +669,6 @@ func TestCreateDocument_Component_Discriminator(t *testing.T) {
assert.Nil(t, dsc.FindMappingValue("don't exist"))
assert.NotNil(t, doc.GetExternalDocs())
assert.Nil(t, doc.FindSecurityRequirement("scooby doo"))
-
}
func TestCreateDocument_CheckAdditionalProperties_Schema(t *testing.T) {
@@ -602,11 +676,8 @@ func TestCreateDocument_CheckAdditionalProperties_Schema(t *testing.T) {
components := doc.Components.Value
d := components.FindSchema("Dressing")
assert.NotNil(t, d.Value.Schema().AdditionalProperties.Value)
- if n, ok := d.Value.Schema().AdditionalProperties.Value.(*base.SchemaProxy); ok {
- assert.Equal(t, "something in here.", n.Schema().Description.Value)
- } else {
- assert.Fail(t, "should be a schema")
- }
+
+ assert.True(t, d.Value.Schema().AdditionalProperties.Value.IsA(), "should be a schema")
}
func TestCreateDocument_CheckAdditionalProperties_Bool(t *testing.T) {
@@ -614,7 +685,7 @@ func TestCreateDocument_CheckAdditionalProperties_Bool(t *testing.T) {
components := doc.Components.Value
d := components.FindSchema("Drink")
assert.NotNil(t, d.Value.Schema().AdditionalProperties.Value)
- assert.True(t, d.Value.Schema().AdditionalProperties.Value.(bool))
+ assert.True(t, d.Value.Schema().AdditionalProperties.Value.B)
}
func TestCreateDocument_Components_Error(t *testing.T) {
@@ -627,12 +698,9 @@ components:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 0)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.NoError(t, err)
ob := doc.Components.Value.FindSchema("bork").Value
ob.Schema()
@@ -646,12 +714,10 @@ webhooks:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- doc, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ doc, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t, "flat map build failed: reference cannot be found: reference at line 4, column 5 is empty, it cannot be resolved",
+ err.Error())
}
func TestCreateDocument_Components_Error_Extract(t *testing.T) {
@@ -662,13 +728,9 @@ components:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
-
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t, "reference at line 5, column 7 is empty, it cannot be resolved", err.Error())
}
func TestCreateDocument_Paths_Errors(t *testing.T) {
@@ -678,12 +740,10 @@ paths:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t,
+ "path item build failed: cannot find reference: at line 4, col 10", err.Error())
}
func TestCreateDocument_Tags_Errors(t *testing.T) {
@@ -692,12 +752,10 @@ tags:
- $ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t,
+ "object extraction failed: reference at line 3, column 5 is empty, it cannot be resolved", err.Error())
}
func TestCreateDocument_Security_Error(t *testing.T) {
@@ -706,12 +764,11 @@ security:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t,
+ "array build failed: reference cannot be found: reference at line 3, column 3 is empty, it cannot be resolved",
+ err.Error())
}
func TestCreateDocument_ExternalDoc_Error(t *testing.T) {
@@ -720,12 +777,9 @@ externalDocs:
$ref: #bork`
info, _ := datamodel.ExtractSpecInfo([]byte(yml))
- var err []error
- _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
- assert.Len(t, err, 1)
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t, "object extraction failed: reference at line 3, column 3 is empty, it cannot be resolved", err.Error())
}
func TestCreateDocument_YamlAnchor(t *testing.T) {
@@ -736,16 +790,10 @@ func TestCreateDocument_YamlAnchor(t *testing.T) {
info, _ := datamodel.ExtractSpecInfo(anchorDocument)
// build low-level document model
- document, errors := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ document, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
- // if something went wrong, a slice of errors is returned
- if len(errors) > 0 {
- for i := range errors {
- fmt.Printf("error: %s\n", errors[i].Error())
- }
+ if err != nil {
+ fmt.Printf("error: %s\n", err.Error())
panic("cannot build document")
}
@@ -777,8 +825,19 @@ func TestCreateDocument_YamlAnchor(t *testing.T) {
assert.NotNil(t, jsonGet)
// Should this work? It doesn't
- //postJsonType := examplePath.GetValue().Post.GetValue().RequestBody.GetValue().FindContent("application/json")
- //assert.NotNil(t, postJsonType)
+ // update from quobix 10/14/2023: It does now!
+ postJsonType := examplePath.GetValue().Post.GetValue().RequestBody.GetValue().FindContent("application/json")
+ assert.NotNil(t, postJsonType)
+}
+
+func TestCreateDocument_NotOpenAPI_EnforcedDocCheck(t *testing.T) {
+ yml := `notadoc: no`
+
+ info, _ := datamodel.ExtractSpecInfo([]byte(yml))
+ var err error
+ _, err = CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
+ assert.Equal(t,
+ "no openapi version/tag found, cannot create document", err.Error())
}
func ExampleCreateDocument() {
@@ -791,16 +850,10 @@ func ExampleCreateDocument() {
info, _ := datamodel.ExtractSpecInfo(petstoreBytes)
// build low-level document model
- document, errors := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- })
+ document, err := CreateDocumentFromConfig(info, &datamodel.DocumentConfiguration{})
- // if something went wrong, a slice of errors is returned
- if len(errors) > 0 {
- for i := range errors {
- fmt.Printf("error: %s\n", errors[i].Error())
- }
+ if err != nil {
+ fmt.Printf("error: %s\n", err.Error())
panic("cannot build document")
}
diff --git a/datamodel/low/v3/document.go b/datamodel/low/v3/document.go
index 9b1f282..f7f093a 100644
--- a/datamodel/low/v3/document.go
+++ b/datamodel/low/v3/document.go
@@ -83,6 +83,9 @@ type Document struct {
//
// This property is not a part of the OpenAPI schema, this is custom to libopenapi.
Index *index.SpecIndex
+
+ // Rolodex is a reference to the rolodex used when creating this document.
+ Rolodex *index.Rolodex
}
// FindSecurityRequirement will attempt to locate a security requirement string from a supplied name.
diff --git a/datamodel/low/v3/encoding.go b/datamodel/low/v3/encoding.go
index c965cc9..50643d9 100644
--- a/datamodel/low/v3/encoding.go
+++ b/datamodel/low/v3/encoding.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"strings"
@@ -61,11 +62,11 @@ func (en *Encoding) Hash() [32]byte {
}
// Build will extract all Header objects from supplied node.
-func (en *Encoding) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (en *Encoding) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
en.Reference = new(low.Reference)
- headers, hL, hN, err := low.ExtractMap[*Header](HeadersLabel, root, idx)
+ headers, hL, hN, err := low.ExtractMap[*Header](ctx, HeadersLabel, root, idx)
if err != nil {
return err
}
diff --git a/datamodel/low/v3/encoding_test.go b/datamodel/low/v3/encoding_test.go
index 1f1501d..7f0c840 100644
--- a/datamodel/low/v3/encoding_test.go
+++ b/datamodel/low/v3/encoding_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -31,7 +32,7 @@ explode: true`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "hot/cakes", n.ContentType.Value)
assert.Equal(t, true, n.AllowReserved.Value)
@@ -59,7 +60,7 @@ headers:
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -79,7 +80,7 @@ allowReserved: true`
var n Encoding
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `explode: true
contentType: application/waffle
@@ -96,7 +97,7 @@ style: post modern
var n2 Encoding
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/examples_test.go b/datamodel/low/v3/examples_test.go
index 8f71efa..0686008 100644
--- a/datamodel/low/v3/examples_test.go
+++ b/datamodel/low/v3/examples_test.go
@@ -5,8 +5,9 @@ package v3
import (
"fmt"
+ "os"
+
"github.com/pb33f/libopenapi/datamodel"
- "io/ioutil"
)
// How to create a low-level OpenAPI 3+ Document from an OpenAPI specification
@@ -14,19 +15,17 @@ func Example_createLowLevelOpenAPIDocument() {
// How to create a low-level OpenAPI 3 Document
// load petstore into bytes
- petstoreBytes, _ := ioutil.ReadFile("../../../test_specs/petstorev3.json")
+ petstoreBytes, _ := os.ReadFile("../../../test_specs/petstorev3.json")
// read in specification
info, _ := datamodel.ExtractSpecInfo(petstoreBytes)
// build low-level document model
- document, errors := CreateDocument(info)
+ document, errs := CreateDocument(info)
// if something went wrong, a slice of errors is returned
- if len(errors) > 0 {
- for i := range errors {
- fmt.Printf("error: %s\n", errors[i].Error())
- }
+ if errs != nil {
+ fmt.Printf("error: %s\n", errs.Error())
panic("cannot build document")
}
diff --git a/datamodel/low/v3/header.go b/datamodel/low/v3/header.go
index 535406f..ca76b8b 100644
--- a/datamodel/low/v3/header.go
+++ b/datamodel/low/v3/header.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -97,7 +98,7 @@ func (h *Header) Hash() [32]byte {
}
// Build will extract extensions, examples, schema and content/media types from node.
-func (h *Header) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (h *Header) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
h.Reference = new(low.Reference)
@@ -110,7 +111,7 @@ func (h *Header) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle examples if set.
- exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](base.ExamplesLabel, root, idx)
+ exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](ctx, base.ExamplesLabel, root, idx)
if eErr != nil {
return eErr
}
@@ -123,7 +124,7 @@ func (h *Header) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle schema
- sch, sErr := base.ExtractSchema(root, idx)
+ sch, sErr := base.ExtractSchema(ctx, root, idx)
if sErr != nil {
return sErr
}
@@ -132,7 +133,7 @@ func (h *Header) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle content, if set.
- con, cL, cN, cErr := low.ExtractMap[*MediaType](ContentLabel, root, idx)
+ con, cL, cN, cErr := low.ExtractMap[*MediaType](ctx, ContentLabel, root, idx)
if cErr != nil {
return cErr
}
diff --git a/datamodel/low/v3/header_test.go b/datamodel/low/v3/header_test.go
index 44df47b..a80dc39 100644
--- a/datamodel/low/v3/header_test.go
+++ b/datamodel/low/v3/header_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -53,7 +54,7 @@ content:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "michelle, meddy and maddy", n.Description.Value)
assert.True(t, n.AllowReserved.Value)
@@ -101,7 +102,7 @@ func TestHeader_Build_Success_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
exp := n.FindExample("family").Value
@@ -129,7 +130,7 @@ func TestHeader_Build_Fail_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -145,7 +146,7 @@ func TestHeader_Build_Fail_Schema(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -162,7 +163,7 @@ func TestHeader_Build_Fail_Content(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -195,7 +196,7 @@ x-mango: chutney`
var n Header
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `x-mango: chutney
required: true
@@ -224,7 +225,7 @@ schema:
var n2 Header
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/link.go b/datamodel/low/v3/link.go
index 63e19c5..05596ba 100644
--- a/datamodel/low/v3/link.go
+++ b/datamodel/low/v3/link.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -55,13 +56,13 @@ func (l *Link) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions and servers from the node.
-func (l *Link) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (l *Link) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
l.Reference = new(low.Reference)
l.Extensions = low.ExtractExtensions(root)
// extract server.
- ser, sErr := low.ExtractObject[*Server](ServerLabel, root, idx)
+ ser, sErr := low.ExtractObject[*Server](ctx, ServerLabel, root, idx)
if sErr != nil {
return sErr
}
diff --git a/datamodel/low/v3/link_test.go b/datamodel/low/v3/link_test.go
index cb81f4b..3a80241 100644
--- a/datamodel/low/v3/link_test.go
+++ b/datamodel/low/v3/link_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -33,7 +34,7 @@ x-linky: slinky
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "#/someref", n.OperationRef.Value)
@@ -75,7 +76,7 @@ server:
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -99,7 +100,7 @@ x-mcdonalds: bigmac`
var n Link
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `parameters:
bacon: eggs
@@ -118,7 +119,7 @@ server:
var n2 Link
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/media_type.go b/datamodel/low/v3/media_type.go
index 9671bfe..5ea4b52 100644
--- a/datamodel/low/v3/media_type.go
+++ b/datamodel/low/v3/media_type.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -56,7 +57,7 @@ func (mt *MediaType) GetAllExamples() orderedmap.Map[low.KeyReference[string], l
}
// Build will extract examples, extensions, schema and encoding from node.
-func (mt *MediaType) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (mt *MediaType) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
mt.Reference = new(low.Reference)
@@ -85,7 +86,7 @@ func (mt *MediaType) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
//handle schema
- sch, sErr := base.ExtractSchema(root, idx)
+ sch, sErr := base.ExtractSchema(ctx, root, idx)
if sErr != nil {
return sErr
}
@@ -94,7 +95,7 @@ func (mt *MediaType) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle examples if set.
- exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](base.ExamplesLabel, root, idx)
+ exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](ctx, base.ExamplesLabel, root, idx)
if eErr != nil {
return eErr
}
@@ -107,7 +108,7 @@ func (mt *MediaType) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle encoding
- encs, encsL, encsN, encErr := low.ExtractMap[*Encoding](EncodingLabel, root, idx)
+ encs, encsL, encsN, encErr := low.ExtractMap[*Encoding](ctx, EncodingLabel, root, idx)
if encErr != nil {
return encErr
}
diff --git a/datamodel/low/v3/media_type_test.go b/datamodel/low/v3/media_type_test.go
index 27764da..f8bc87f 100644
--- a/datamodel/low/v3/media_type_test.go
+++ b/datamodel/low/v3/media_type_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -33,7 +34,7 @@ x-rock: and roll`
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "and roll", n.FindExtension("x-rock").Value)
assert.Equal(t, "string", n.Schema.Value.Schema().Type.Value.A)
@@ -56,7 +57,7 @@ func TestMediaType_Build_Fail_Schema(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -73,7 +74,7 @@ func TestMediaType_Build_Fail_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -91,7 +92,7 @@ func TestMediaType_Build_Fail_Encoding(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -116,7 +117,7 @@ x-done: for the day!`
var n MediaType
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `encoding:
meaty/chewy:
@@ -137,7 +138,7 @@ example: a thing`
var n2 MediaType
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/oauth_flows.go b/datamodel/low/v3/oauth_flows.go
index 9692d68..3f9a4ae 100644
--- a/datamodel/low/v3/oauth_flows.go
+++ b/datamodel/low/v3/oauth_flows.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -37,31 +38,31 @@ func (o *OAuthFlows) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions and all OAuthFlow types from the supplied node.
-func (o *OAuthFlows) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (o *OAuthFlows) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
o.Reference = new(low.Reference)
o.Extensions = low.ExtractExtensions(root)
- v, vErr := low.ExtractObject[*OAuthFlow](ImplicitLabel, root, idx)
+ v, vErr := low.ExtractObject[*OAuthFlow](ctx, ImplicitLabel, root, idx)
if vErr != nil {
return vErr
}
o.Implicit = v
- v, vErr = low.ExtractObject[*OAuthFlow](PasswordLabel, root, idx)
+ v, vErr = low.ExtractObject[*OAuthFlow](ctx, PasswordLabel, root, idx)
if vErr != nil {
return vErr
}
o.Password = v
- v, vErr = low.ExtractObject[*OAuthFlow](ClientCredentialsLabel, root, idx)
+ v, vErr = low.ExtractObject[*OAuthFlow](ctx, ClientCredentialsLabel, root, idx)
if vErr != nil {
return vErr
}
o.ClientCredentials = v
- v, vErr = low.ExtractObject[*OAuthFlow](AuthorizationCodeLabel, root, idx)
+ v, vErr = low.ExtractObject[*OAuthFlow](ctx, AuthorizationCodeLabel, root, idx)
if vErr != nil {
return vErr
}
@@ -117,7 +118,7 @@ func (o *OAuthFlow) FindExtension(ext string) *low.ValueReference[any] {
}
// Build will extract extensions from the node.
-func (o *OAuthFlow) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (o *OAuthFlow) Build(_ context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
o.Reference = new(low.Reference)
o.Extensions = low.ExtractExtensions(root)
return nil
diff --git a/datamodel/low/v3/oauth_flows_test.go b/datamodel/low/v3/oauth_flows_test.go
index ea5357b..a556a28 100644
--- a/datamodel/low/v3/oauth_flows_test.go
+++ b/datamodel/low/v3/oauth_flows_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -30,7 +31,7 @@ x-tasty: herbs
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "herbs", n.FindExtension("x-tasty").Value)
assert.Equal(t, "https://pb33f.io/auth", n.AuthorizationUrl.Value)
@@ -54,7 +55,7 @@ x-tasty: herbs`
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "herbs", n.FindExtension("x-tasty").Value)
assert.Equal(t, "https://pb33f.io/auth", n.Implicit.Value.AuthorizationUrl.Value)
@@ -74,7 +75,7 @@ func TestOAuthFlow_Build_Implicit_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -91,7 +92,7 @@ func TestOAuthFlow_Build_Password(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "https://pb33f.io/auth", n.Password.Value.AuthorizationUrl.Value)
}
@@ -109,7 +110,7 @@ func TestOAuthFlow_Build_Password_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -126,7 +127,7 @@ func TestOAuthFlow_Build_ClientCredentials(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "https://pb33f.io/auth", n.ClientCredentials.Value.AuthorizationUrl.Value)
}
@@ -144,7 +145,7 @@ func TestOAuthFlow_Build_ClientCredentials_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -161,7 +162,7 @@ func TestOAuthFlow_Build_AuthCode(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "https://pb33f.io/auth", n.AuthorizationCode.Value.AuthorizationUrl.Value)
}
@@ -179,7 +180,7 @@ func TestOAuthFlow_Build_AuthCode_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -198,7 +199,7 @@ x-sleepy: tired`
var n OAuthFlow
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `refreshUrl: https://pb33f.io/refresh
tokenUrl: https://pb33f.io/token
@@ -213,7 +214,7 @@ scopes:
var n2 OAuthFlow
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
@@ -239,7 +240,7 @@ x-code: cody
var n OAuthFlows
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `authorizationCode:
authorizationUrl: https://pb33f.io/auth
@@ -258,7 +259,7 @@ password:
var n2 OAuthFlows
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/operation.go b/datamodel/low/v3/operation.go
index ace2481..7e598a6 100644
--- a/datamodel/low/v3/operation.go
+++ b/datamodel/low/v3/operation.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -58,21 +59,21 @@ func (o *Operation) FindSecurityRequirement(name string) []low.ValueReference[st
}
// Build will extract external docs, parameters, request body, responses, callbacks, security and servers.
-func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (o *Operation) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
o.Reference = new(low.Reference)
o.Extensions = low.ExtractExtensions(root)
// extract externalDocs
- extDocs, dErr := low.ExtractObject[*base.ExternalDoc](base.ExternalDocsLabel, root, idx)
+ extDocs, dErr := low.ExtractObject[*base.ExternalDoc](ctx, base.ExternalDocsLabel, root, idx)
if dErr != nil {
return dErr
}
o.ExternalDocs = extDocs
// extract parameters
- params, ln, vn, pErr := low.ExtractArray[*Parameter](ParametersLabel, root, idx)
+ params, ln, vn, pErr := low.ExtractArray[*Parameter](ctx, ParametersLabel, root, idx)
if pErr != nil {
return pErr
}
@@ -85,21 +86,21 @@ func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// extract request body
- rBody, rErr := low.ExtractObject[*RequestBody](RequestBodyLabel, root, idx)
+ rBody, rErr := low.ExtractObject[*RequestBody](ctx, RequestBodyLabel, root, idx)
if rErr != nil {
return rErr
}
o.RequestBody = rBody
// extract responses
- respBody, respErr := low.ExtractObject[*Responses](ResponsesLabel, root, idx)
+ respBody, respErr := low.ExtractObject[*Responses](ctx, ResponsesLabel, root, idx)
if respErr != nil {
return respErr
}
o.Responses = respBody
// extract callbacks
- callbacks, cbL, cbN, cbErr := low.ExtractMap[*Callback](CallbacksLabel, root, idx)
+ callbacks, cbL, cbN, cbErr := low.ExtractMap[*Callback](ctx, CallbacksLabel, root, idx)
if cbErr != nil {
return cbErr
}
@@ -112,7 +113,7 @@ func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// extract security
- sec, sln, svn, sErr := low.ExtractArray[*base.SecurityRequirement](SecurityLabel, root, idx)
+ sec, sln, svn, sErr := low.ExtractArray[*base.SecurityRequirement](ctx, SecurityLabel, root, idx)
if sErr != nil {
return sErr
}
@@ -137,7 +138,7 @@ func (o *Operation) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// extract servers
- servers, sl, sn, serErr := low.ExtractArray[*Server](ServersLabel, root, idx)
+ servers, sl, sn, serErr := low.ExtractArray[*Server](ctx, ServersLabel, root, idx)
if serErr != nil {
return serErr
}
diff --git a/datamodel/low/v3/operation_test.go b/datamodel/low/v3/operation_test.go
index 1d7663e..18a4f88 100644
--- a/datamodel/low/v3/operation_test.go
+++ b/datamodel/low/v3/operation_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -50,7 +51,7 @@ servers:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Len(t, n.Tags.Value, 2)
@@ -87,7 +88,7 @@ func TestOperation_Build_FailDocs(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -104,7 +105,7 @@ func TestOperation_Build_FailParams(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -121,7 +122,7 @@ func TestOperation_Build_FailRequestBody(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -138,7 +139,7 @@ func TestOperation_Build_FailResponses(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -155,7 +156,7 @@ func TestOperation_Build_FailCallbacks(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -172,7 +173,7 @@ func TestOperation_Build_FailSecurity(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -189,7 +190,7 @@ func TestOperation_Build_FailServers(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -229,7 +230,7 @@ x-mint: sweet`
var n Operation
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `tags:
- nice
@@ -265,7 +266,7 @@ x-mint: sweet`
var n2 Operation
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
@@ -300,7 +301,7 @@ security: []`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Len(t, n.Security.Value, 0)
diff --git a/datamodel/low/v3/parameter.go b/datamodel/low/v3/parameter.go
index 8645f20..92215a4 100644
--- a/datamodel/low/v3/parameter.go
+++ b/datamodel/low/v3/parameter.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -60,7 +61,7 @@ func (p *Parameter) GetExtensions() map[low.KeyReference[string]]low.ValueRefere
}
// Build will extract examples, extensions and content/media types.
-func (p *Parameter) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *Parameter) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Reference = new(low.Reference)
@@ -73,7 +74,7 @@ func (p *Parameter) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle schema
- sch, sErr := base.ExtractSchema(root, idx)
+ sch, sErr := base.ExtractSchema(ctx, root, idx)
if sErr != nil {
return sErr
}
@@ -82,7 +83,7 @@ func (p *Parameter) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle examples if set.
- exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](base.ExamplesLabel, root, idx)
+ exps, expsL, expsN, eErr := low.ExtractMap[*base.Example](ctx, base.ExamplesLabel, root, idx)
if eErr != nil {
return eErr
}
@@ -95,7 +96,7 @@ func (p *Parameter) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle content, if set.
- con, cL, cN, cErr := low.ExtractMap[*MediaType](ContentLabel, root, idx)
+ con, cL, cN, cErr := low.ExtractMap[*MediaType](ctx, ContentLabel, root, idx)
if cErr != nil {
return cErr
}
diff --git a/datamodel/low/v3/parameter_test.go b/datamodel/low/v3/parameter_test.go
index 44c5738..4769e4c 100644
--- a/datamodel/low/v3/parameter_test.go
+++ b/datamodel/low/v3/parameter_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -55,7 +56,7 @@ content:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "michelle, meddy and maddy", n.Description.Value)
assert.True(t, n.AllowReserved.Value)
@@ -105,7 +106,7 @@ func TestParameter_Build_Success_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
exp := n.FindExample("family").Value
@@ -133,7 +134,7 @@ func TestParameter_Build_Fail_Examples(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -149,7 +150,7 @@ func TestParameter_Build_Fail_Schema(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -166,7 +167,7 @@ func TestParameter_Build_Fail_Content(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -216,7 +217,7 @@ content:
var n Parameter
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `description: michelle, meddy and maddy
required: true
@@ -262,7 +263,7 @@ content:
var n2 Parameter
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/path_item.go b/datamodel/low/v3/path_item.go
index 9a0a157..95cb239 100644
--- a/datamodel/low/v3/path_item.go
+++ b/datamodel/low/v3/path_item.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -109,7 +110,7 @@ func (p *PathItem) GetExtensions() map[low.KeyReference[string]]low.ValueReferen
// Build extracts extensions, parameters, servers and each http method defined.
// everything is extracted asynchronously for speed.
-func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *PathItem) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Reference = new(low.Reference)
@@ -123,7 +124,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
var ops []low.NodeReference[*Operation]
// extract parameters
- params, ln, vn, pErr := low.ExtractArray[*Parameter](ParametersLabel, root, idx)
+ params, ln, vn, pErr := low.ExtractArray[*Parameter](ctx, ParametersLabel, root, idx)
if pErr != nil {
return pErr
}
@@ -143,7 +144,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
if utils.IsNodeMap(srvN) {
srvr := new(Server)
_ = low.BuildModel(srvN, srvr)
- srvr.Build(ln, srvN, idx)
+ srvr.Build(ctx, ln, srvN, idx)
servers = append(servers, low.ValueReference[*Server]{
Value: srvr,
ValueNode: srvN,
@@ -198,6 +199,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
continue // ignore everything else.
}
+ foundContext := ctx
var op Operation
opIsRef := false
var opRefVal string
@@ -213,12 +215,15 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
opIsRef = true
opRefVal = ref
- r, err := low.LocateRefNode(pathNode, idx)
+ r, newIdx, err, nCtx := low.LocateRefNodeWithContext(ctx, pathNode, idx)
if r != nil {
if r.Kind == yaml.DocumentNode {
r = r.Content[0]
}
pathNode = r
+ foundContext = nCtx
+ foundContext = context.WithValue(foundContext, index.FoundIndexKey, newIdx)
+
if r.Tag == "" {
// If it's a node from file, tag is empty
pathNode = r.Content[0]
@@ -233,6 +238,8 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
return fmt.Errorf("path item build failed: cannot find reference: %s at line %d, col %d",
pathNode.Content[1].Value, pathNode.Content[1].Line, pathNode.Content[1].Column)
}
+ } else {
+ foundContext = context.WithValue(foundContext, index.FoundIndexKey, idx)
}
wg.Add(1)
low.BuildModelAsync(pathNode, &op, &wg, &errors)
@@ -241,6 +248,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
Value: &op,
KeyNode: currentNode,
ValueNode: pathNode,
+ Context: foundContext,
}
if opIsRef {
opRef.Reference = opRefVal
@@ -277,7 +285,7 @@ func (p *PathItem) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
ref = op.Reference
}
- err := op.Value.Build(op.KeyNode, op.ValueNode, idx)
+ err := op.Value.Build(op.Context, op.KeyNode, op.ValueNode, op.Context.Value(index.FoundIndexKey).(*index.SpecIndex))
if ref != "" {
op.Value.Reference.Reference = ref
}
diff --git a/datamodel/low/v3/path_item_test.go b/datamodel/low/v3/path_item_test.go
index cd5f631..e83b0d4 100644
--- a/datamodel/low/v3/path_item_test.go
+++ b/datamodel/low/v3/path_item_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -43,7 +44,7 @@ x-byebye: boebert`
var n PathItem
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `get:
description: get me
@@ -75,7 +76,7 @@ summary: it's another path item`
var n2 PathItem
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/paths.go b/datamodel/low/v3/paths.go
index 129b48d..3b0292f 100644
--- a/datamodel/low/v3/paths.go
+++ b/datamodel/low/v3/paths.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -64,7 +65,7 @@ func (p *Paths) GetExtensions() map[low.KeyReference[string]]low.ValueReference[
}
// Build will extract extensions and all PathItems. This happens asynchronously for speed.
-func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (p *Paths) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
p.Reference = new(low.Reference)
@@ -119,6 +120,7 @@ func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
}()
+
// TranslatePipeline output.
go func() {
for {
@@ -132,21 +134,17 @@ func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
wg.Done()
}()
+
err := datamodel.TranslatePipeline[buildInput, buildResult](in, out,
func(value buildInput) (buildResult, error) {
pNode := value.pathNode
cNode := value.currentNode
+
if ok, _, _ := utils.IsNodeRefValue(pNode); ok {
- r, err := low.LocateRefNode(pNode, idx)
+ r, _, err := low.LocateRefNode(pNode, idx)
if r != nil {
pNode = r
- if r.Tag == "" {
- // If it's a node from file, tag is empty
- // If it's a reference we need to extract actual operation node
- pNode = r.Content[0]
- }
-
if err != nil {
if !idx.AllowCircularReferenceResolving() {
return buildResult{}, fmt.Errorf("path item build failed: %s", err.Error())
@@ -158,13 +156,19 @@ func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
}
+
path := new(PathItem)
_ = low.BuildModel(pNode, path)
- err := path.Build(cNode, pNode, idx)
+ err := path.Build(ctx, cNode, pNode, idx)
+
if err != nil {
- return buildResult{}, err
+ if idx != nil && idx.GetLogger() != nil {
+ idx.GetLogger().Error(fmt.Sprintf("error building path item '%s'", err.Error()))
+ }
+ //return buildResult{}, err
}
+
return buildResult{
key: low.KeyReference[string]{
Value: cNode.Value,
@@ -182,6 +186,7 @@ func (p *Paths) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
return err
}
+
p.PathItems = pathsMap
return nil
}
diff --git a/datamodel/low/v3/paths_test.go b/datamodel/low/v3/paths_test.go
index ce4ee6e..a4d78eb 100644
--- a/datamodel/low/v3/paths_test.go
+++ b/datamodel/low/v3/paths_test.go
@@ -4,12 +4,15 @@
package v3
import (
+ "bytes"
+ "context"
"fmt"
+ "log/slog"
+ "strings"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
- "github.com/pb33f/libopenapi/resolver"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
)
@@ -48,7 +51,7 @@ x-milk: cold`
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
path := n.FindPath("/some/path").Value
@@ -80,7 +83,7 @@ func TestPaths_Build_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -106,7 +109,7 @@ func TestPaths_Build_FailRef(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
somePath := n.FindPath("/some/path").Value
@@ -135,14 +138,25 @@ func TestPaths_Build_FailRefDeadEnd(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ n.Build(context.Background(), nil, idxNode.Content[0], idx)
+
+ assert.Contains(t, buf.String(), "msg=\"unable to locate reference anywhere in the rolodex\" reference=#/no/path")
+ assert.Contains(t, buf.String(), "msg=\"unable to locate reference anywhere in the rolodex\" reference=#/nowhere")
}
func TestPaths_Build_SuccessRef(t *testing.T) {
@@ -161,13 +175,14 @@ func TestPaths_Build_SuccessRef(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
+
idx := index.NewSpecIndex(&idxNode)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
somePath := n.FindPath("/some/path").Value
@@ -190,14 +205,25 @@ func TestPaths_Build_BadParams(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+ er := buf.String()
+ assert.Contains(t, er, "array build failed, input is not an array, line 3, column 5'")
+
}
func TestPaths_Build_BadRef(t *testing.T) {
@@ -216,14 +242,27 @@ func TestPaths_Build_BadRef(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+ assert.Contains(t, buf.String(), "unable to locate reference anywhere in the rolodex\" reference=#/no-where")
+ assert.Contains(t, buf.String(), "error building path item 'path item build failed: cannot find reference: #/no-where at line 4, col 10'")
+
}
func TestPathItem_Build_GoodRef(t *testing.T) {
@@ -252,7 +291,7 @@ func TestPathItem_Build_GoodRef(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
}
@@ -276,14 +315,25 @@ func TestPathItem_Build_BadRef(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+ assert.Contains(t, buf.String(), "unable to locate reference anywhere in the rolodex\" reference=#/~1cakes/NotFound")
+ assert.Contains(t, buf.String(), "error building path item 'path item build failed: cannot find reference: #/~1another~1path/get at line 4, col 10")
+
}
func TestPathNoOps(t *testing.T) {
@@ -300,7 +350,7 @@ func TestPathNoOps(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
}
@@ -330,7 +380,7 @@ func TestPathItem_Build_Using_Ref(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, rootNode.Content[0], idx)
assert.NoError(t, err)
somePath := n.FindPath("/a/path")
@@ -358,7 +408,7 @@ func TestPath_Build_Using_CircularRef(t *testing.T) {
assert.NoError(t, mErr)
idx := index.NewSpecIndex(&idxNode)
- resolve := resolver.NewResolver(idx)
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -373,7 +423,7 @@ func TestPath_Build_Using_CircularRef(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, rootNode.Content[0], idx)
assert.Error(t, err)
}
@@ -392,9 +442,18 @@ func TestPath_Build_Using_CircularRefWithOp(t *testing.T) {
var idxNode yaml.Node
mErr := yaml.Unmarshal([]byte(yml), &idxNode)
assert.NoError(t, mErr)
- idx := index.NewSpecIndex(&idxNode)
- resolve := resolver.NewResolver(idx)
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
+
+ resolve := index.NewResolver(idx)
errs := resolve.CheckForCircularReferences()
assert.Len(t, errs, 1)
@@ -410,8 +469,8 @@ func TestPath_Build_Using_CircularRefWithOp(t *testing.T) {
err := low.BuildModel(rootNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, rootNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, rootNode.Content[0], idx)
+ assert.Contains(t, buf.String(), "error building path item 'build schema failed: circular reference 'post -> post -> post' found during lookup at line 4, column 7, It cannot be resolved'")
}
@@ -424,14 +483,23 @@ func TestPaths_Build_BrokenOp(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+ assert.Contains(t, buf.String(), "error building path item 'object extraction failed: reference at line 4, column 7 is empty, it cannot be resolved'")
}
func TestPaths_Hash(t *testing.T) {
@@ -450,7 +518,7 @@ x-france: french`
var n Paths
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `/french/toast:
description: toast
@@ -466,7 +534,7 @@ x-france: french`
var n2 Paths
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
@@ -494,12 +562,22 @@ func TestPaths_Build_Fail_Many(t *testing.T) {
var idxNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &idxNode)
- idx := index.NewSpecIndex(&idxNode)
+
+ var b []byte
+ buf := bytes.NewBuffer(b)
+ log := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ cfg := index.SpecIndexConfig{
+ Logger: log,
+ }
+ idx := index.NewSpecIndexWithConfig(&idxNode, &cfg)
var n Paths
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
- assert.Error(t, err)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
+ errors := strings.Split(buf.String(), "\n")
+ assert.Len(t, errors, 1001)
}
diff --git a/datamodel/low/v3/request_body.go b/datamodel/low/v3/request_body.go
index ea681df..56e3f8b 100644
--- a/datamodel/low/v3/request_body.go
+++ b/datamodel/low/v3/request_body.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -42,14 +43,14 @@ func (rb *RequestBody) FindContent(cType string) *low.ValueReference[*MediaType]
}
// Build will extract extensions and MediaType objects from the node.
-func (rb *RequestBody) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (rb *RequestBody) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
rb.Reference = new(low.Reference)
rb.Extensions = low.ExtractExtensions(root)
// handle content, if set.
- con, cL, cN, cErr := low.ExtractMap[*MediaType](ContentLabel, root, idx)
+ con, cL, cN, cErr := low.ExtractMap[*MediaType](ctx, ContentLabel, root, idx)
if cErr != nil {
return cErr
}
diff --git a/datamodel/low/v3/request_body_test.go b/datamodel/low/v3/request_body_test.go
index 832e5de..39b6605 100644
--- a/datamodel/low/v3/request_body_test.go
+++ b/datamodel/low/v3/request_body_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -28,7 +29,7 @@ x-requesto: presto`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "a nice request", n.Description.Value)
assert.True(t, n.Required.Value)
@@ -51,7 +52,7 @@ func TestRequestBody_Fail(t *testing.T) {
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -75,7 +76,7 @@ x-toast: nice
var n RequestBody
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `description: nice toast
content:
@@ -94,7 +95,7 @@ x-toast: nice`
var n2 RequestBody
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/response.go b/datamodel/low/v3/response.go
index 9e8468b..13ec225 100644
--- a/datamodel/low/v3/response.go
+++ b/datamodel/low/v3/response.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -56,14 +57,14 @@ func (r *Response) FindLink(hType string) *low.ValueReference[*Link] {
}
// Build will extract headers, extensions, content and links from node.
-func (r *Response) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (r *Response) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
r.Reference = new(low.Reference)
r.Extensions = low.ExtractExtensions(root)
//extract headers
- headers, lN, kN, err := low.ExtractMapExtensions[*Header](HeadersLabel, root, idx, true)
+ headers, lN, kN, err := low.ExtractMapExtensions[*Header](ctx, HeadersLabel, root, idx, true)
if err != nil {
return err
}
@@ -75,7 +76,7 @@ func (r *Response) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
}
- con, clN, cN, cErr := low.ExtractMap[*MediaType](ContentLabel, root, idx)
+ con, clN, cN, cErr := low.ExtractMap[*MediaType](ctx, ContentLabel, root, idx)
if cErr != nil {
return cErr
}
@@ -88,7 +89,7 @@ func (r *Response) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
}
// handle links if set
- links, linkLabel, linkValue, lErr := low.ExtractMap[*Link](LinksLabel, root, idx)
+ links, linkLabel, linkValue, lErr := low.ExtractMap[*Link](ctx, LinksLabel, root, idx)
if lErr != nil {
return lErr
}
diff --git a/datamodel/low/v3/response_test.go b/datamodel/low/v3/response_test.go
index 0424c75..51d31a2 100644
--- a/datamodel/low/v3/response_test.go
+++ b/datamodel/low/v3/response_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -39,7 +40,7 @@ default:
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "default response", n.Default.Value.Description.Value)
@@ -92,7 +93,7 @@ x-shoes: old`
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
// check hash
assert.Equal(t, "54ab66e6cb8bd226940f421c2387e45215b84c946182435dfe2a3036043fa07c",
@@ -116,7 +117,7 @@ func TestResponses_Build_FailCodes_WrongType(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -134,7 +135,7 @@ func TestResponses_Build_FailCodes(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -151,7 +152,7 @@ func TestResponses_Build_FailDefault(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -171,7 +172,7 @@ func TestResponses_Build_FailBadHeader(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -191,7 +192,7 @@ func TestResponses_Build_FailBadContent(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -211,7 +212,7 @@ func TestResponses_Build_FailBadLinks(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
@@ -232,7 +233,7 @@ func TestResponses_Build_AllowXPrefixHeader(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "string",
@@ -267,7 +268,7 @@ links:
var n Response
_ = low.BuildModel(idxNode.Content[0], &n)
- _ = n.Build(nil, idxNode.Content[0], idx)
+ _ = n.Build(context.Background(), nil, idxNode.Content[0], idx)
yml2 := `description: nice toast
x-ham: jam
@@ -294,7 +295,7 @@ links:
var n2 Response
_ = low.BuildModel(idxNode2.Content[0], &n2)
- _ = n2.Build(nil, idxNode2.Content[0], idx2)
+ _ = n2.Build(context.Background(), nil, idxNode2.Content[0], idx2)
// hash
assert.Equal(t, n.Hash(), n2.Hash())
diff --git a/datamodel/low/v3/responses.go b/datamodel/low/v3/responses.go
index 28f8c49..6adfbac 100644
--- a/datamodel/low/v3/responses.go
+++ b/datamodel/low/v3/responses.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"sort"
@@ -47,13 +48,13 @@ func (r *Responses) GetExtensions() map[low.KeyReference[string]]low.ValueRefere
}
// Build will extract default response and all Response objects for each code
-func (r *Responses) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (r *Responses) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
r.Reference = new(low.Reference)
r.Extensions = low.ExtractExtensions(root)
utils.CheckForMergeNodes(root)
if utils.IsNodeMap(root) {
- codes, err := low.ExtractMapNoLookup[*Response](root, idx)
+ codes, err := low.ExtractMapNoLookup[*Response](ctx, root, idx)
if err != nil {
return err
diff --git a/datamodel/low/v3/security_scheme.go b/datamodel/low/v3/security_scheme.go
index 5ee59fb..ef5e365 100644
--- a/datamodel/low/v3/security_scheme.go
+++ b/datamodel/low/v3/security_scheme.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"fmt"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -48,13 +49,13 @@ func (ss *SecurityScheme) GetExtensions() map[low.KeyReference[string]]low.Value
}
// Build will extract OAuthFlows and extensions from the node.
-func (ss *SecurityScheme) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (ss *SecurityScheme) Build(ctx context.Context, _, root *yaml.Node, idx *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
ss.Reference = new(low.Reference)
ss.Extensions = low.ExtractExtensions(root)
- oa, oaErr := low.ExtractObject[*OAuthFlows](OAuthFlowsLabel, root, idx)
+ oa, oaErr := low.ExtractObject[*OAuthFlows](ctx, OAuthFlowsLabel, root, idx)
if oaErr != nil {
return oaErr
}
diff --git a/datamodel/low/v3/security_scheme_test.go b/datamodel/low/v3/security_scheme_test.go
index cbdab09..24065b9 100644
--- a/datamodel/low/v3/security_scheme_test.go
+++ b/datamodel/low/v3/security_scheme_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/pb33f/libopenapi/index"
@@ -25,7 +26,7 @@ func TestSecurityRequirement_Build(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Len(t, n.Requirements.Value, 1)
@@ -55,7 +56,7 @@ x-milk: please`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "0b5ee36519fdfc6383c7befd92294d77b5799cd115911ff8c3e194f345a8c103",
@@ -86,6 +87,6 @@ func TestSecurityScheme_Build_Fail(t *testing.T) {
err := low.BuildModel(&idxNode, &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.Error(t, err)
}
diff --git a/datamodel/low/v3/server.go b/datamodel/low/v3/server.go
index ee2388f..d91fefc 100644
--- a/datamodel/low/v3/server.go
+++ b/datamodel/low/v3/server.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"crypto/sha256"
"sort"
"strings"
@@ -36,7 +37,7 @@ func (s *Server) FindVariable(serverVar string) *low.ValueReference[*ServerVaria
}
// Build will extract server variables from the supplied node.
-func (s *Server) Build(_, root *yaml.Node, idx *index.SpecIndex) error {
+func (s *Server) Build(_ context.Context, _, root *yaml.Node, _ *index.SpecIndex) error {
root = utils.NodeAlias(root)
utils.CheckForMergeNodes(root)
s.Reference = new(low.Reference)
diff --git a/datamodel/low/v3/server_test.go b/datamodel/low/v3/server_test.go
index e10712d..f0c4ffb 100644
--- a/datamodel/low/v3/server_test.go
+++ b/datamodel/low/v3/server_test.go
@@ -4,6 +4,7 @@
package v3
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/index"
"github.com/stretchr/testify/assert"
@@ -30,7 +31,7 @@ variables:
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "ec69dfcf68ad8988f3804e170ee6c4a7ad2e4ac51084796eea93168820827546",
@@ -63,7 +64,7 @@ description: high quality software for developers.`
err := low.BuildModel(idxNode.Content[0], &n)
assert.NoError(t, err)
- err = n.Build(nil, idxNode.Content[0], idx)
+ err = n.Build(context.Background(), nil, idxNode.Content[0], idx)
assert.NoError(t, err)
assert.Equal(t, "https://pb33f.io", n.URL.Value)
assert.Equal(t, "high quality software for developers.", n.Description.Value)
diff --git a/datamodel/spec_info.go b/datamodel/spec_info.go
index e812b47..9d136d3 100644
--- a/datamodel/spec_info.go
+++ b/datamodel/spec_info.go
@@ -24,6 +24,7 @@ const (
type SpecInfo struct {
SpecType string `json:"type"`
Version string `json:"version"`
+ VersionNumeric float32 `json:"versionNumeric"`
SpecFormat string `json:"format"`
SpecFileType string `json:"fileType"`
SpecBytes *[]byte `json:"bytes"` // the original byte array
@@ -54,21 +55,21 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
var parsedSpec yaml.Node
- specVersion := &SpecInfo{}
- specVersion.JsonParsingChannel = make(chan bool)
+ specInfo := &SpecInfo{}
+ specInfo.JsonParsingChannel = make(chan bool)
// set original bytes
- specVersion.SpecBytes = &spec
+ specInfo.SpecBytes = &spec
runes := []rune(strings.TrimSpace(string(spec)))
if len(runes) <= 0 {
- return specVersion, errors.New("there is nothing in the spec, it's empty - so there is nothing to be done")
+ return specInfo, errors.New("there is nothing in the spec, it's empty - so there is nothing to be done")
}
if runes[0] == '{' && runes[len(runes)-1] == '}' {
- specVersion.SpecFileType = JSONFileType
+ specInfo.SpecFileType = JSONFileType
} else {
- specVersion.SpecFileType = YAMLFileType
+ specInfo.SpecFileType = YAMLFileType
}
err := yaml.Unmarshal(spec, &parsedSpec)
@@ -76,7 +77,7 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
return nil, fmt.Errorf("unable to parse specification: %s", err.Error())
}
- specVersion.RootNode = &parsedSpec
+ specInfo.RootNode = &parsedSpec
_, openAPI3 := utils.FindKeyNode(utils.OpenApi3, parsedSpec.Content)
_, openAPI2 := utils.FindKeyNode(utils.OpenApi2, parsedSpec.Content)
@@ -88,26 +89,31 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
if spec.SpecType == utils.OpenApi3 {
switch spec.Version {
case "3.1.0", "3.1":
+ spec.VersionNumeric = 3.1
spec.APISchema = OpenAPI31SchemaData
default:
+ spec.VersionNumeric = 3.0
spec.APISchema = OpenAPI3SchemaData
}
}
if spec.SpecType == utils.OpenApi2 {
+ spec.VersionNumeric = 2.0
spec.APISchema = OpenAPI2SchemaData
}
- if utils.IsYAML(string(bytes)) {
- _ = parsedNode.Decode(&jsonSpec)
- b, _ := json.Marshal(&jsonSpec)
- spec.SpecJSONBytes = &b
- spec.SpecJSON = &jsonSpec
- } else {
- _ = json.Unmarshal(bytes, &jsonSpec)
- spec.SpecJSONBytes = &bytes
- spec.SpecJSON = &jsonSpec
- }
- close(spec.JsonParsingChannel) // this needs removing at some point
+ go func() {
+ if utils.IsYAML(string(bytes)) {
+ _ = parsedNode.Decode(&jsonSpec)
+ b, _ := json.Marshal(&jsonSpec)
+ spec.SpecJSONBytes = &b
+ spec.SpecJSON = &jsonSpec
+ } else {
+ _ = json.Unmarshal(bytes, &jsonSpec)
+ spec.SpecJSONBytes = &bytes
+ spec.SpecJSON = &jsonSpec
+ }
+ close(spec.JsonParsingChannel)
+ }()
}
if !bypass {
@@ -118,17 +124,17 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
return nil, versionError
}
- specVersion.SpecType = utils.OpenApi3
- specVersion.Version = version
- specVersion.SpecFormat = OAS3
+ specInfo.SpecType = utils.OpenApi3
+ specInfo.Version = version
+ specInfo.SpecFormat = OAS3
// parse JSON
- parseJSON(spec, specVersion, &parsedSpec)
+ parseJSON(spec, specInfo, &parsedSpec)
// double check for the right version, people mix this up.
if majorVersion < 3 {
- specVersion.Error = errors.New("spec is defined as an openapi spec, but is using a swagger (2.0), or unknown version")
- return specVersion, specVersion.Error
+ specInfo.Error = errors.New("spec is defined as an openapi spec, but is using a swagger (2.0), or unknown version")
+ return specInfo, specInfo.Error
}
}
@@ -138,17 +144,17 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
return nil, versionError
}
- specVersion.SpecType = utils.OpenApi2
- specVersion.Version = version
- specVersion.SpecFormat = OAS2
+ specInfo.SpecType = utils.OpenApi2
+ specInfo.Version = version
+ specInfo.SpecFormat = OAS2
// parse JSON
- parseJSON(spec, specVersion, &parsedSpec)
+ parseJSON(spec, specInfo, &parsedSpec)
// I am not certain this edge-case is very frequent, but let's make sure we handle it anyway.
if majorVersion > 2 {
- specVersion.Error = errors.New("spec is defined as a swagger (openapi 2.0) spec, but is an openapi 3 or unknown version")
- return specVersion, specVersion.Error
+ specInfo.Error = errors.New("spec is defined as a swagger (openapi 2.0) spec, but is an openapi 3 or unknown version")
+ return specInfo, specInfo.Error
}
}
if asyncAPI != nil {
@@ -157,45 +163,47 @@ func ExtractSpecInfoWithDocumentCheck(spec []byte, bypass bool) (*SpecInfo, erro
return nil, versionErr
}
- specVersion.SpecType = utils.AsyncApi
- specVersion.Version = version
+ specInfo.SpecType = utils.AsyncApi
+ specInfo.Version = version
// TODO: format for AsyncAPI.
// parse JSON
- parseJSON(spec, specVersion, &parsedSpec)
+ parseJSON(spec, specInfo, &parsedSpec)
// so far there is only 2 as a major release of AsyncAPI
if majorVersion > 2 {
- specVersion.Error = errors.New("spec is defined as asyncapi, but has a major version that is invalid")
- return specVersion, specVersion.Error
+ specInfo.Error = errors.New("spec is defined as asyncapi, but has a major version that is invalid")
+ return specInfo, specInfo.Error
}
}
- if specVersion.SpecType == "" {
+ if specInfo.SpecType == "" {
// parse JSON
- parseJSON(spec, specVersion, &parsedSpec)
- specVersion.Error = errors.New("spec type not supported by libopenapi, sorry")
- return specVersion, specVersion.Error
+ go parseJSON(spec, specInfo, &parsedSpec)
+ specInfo.Error = errors.New("spec type not supported by libopenapi, sorry")
+ return specInfo, specInfo.Error
}
} else {
- var jsonSpec map[string]interface{}
- if utils.IsYAML(string(spec)) {
- _ = parsedSpec.Decode(&jsonSpec)
- b, _ := json.Marshal(&jsonSpec)
- specVersion.SpecJSONBytes = &b
- specVersion.SpecJSON = &jsonSpec
- } else {
- _ = json.Unmarshal(spec, &jsonSpec)
- specVersion.SpecJSONBytes = &spec
- specVersion.SpecJSON = &jsonSpec
- }
- close(specVersion.JsonParsingChannel) // this needs removing at some point
+ go func() {
+ var jsonSpec map[string]interface{}
+ if utils.IsYAML(string(spec)) {
+ _ = parsedSpec.Decode(&jsonSpec)
+ b, _ := json.Marshal(&jsonSpec)
+ specInfo.SpecJSONBytes = &b
+ specInfo.SpecJSON = &jsonSpec
+ } else {
+ _ = json.Unmarshal(spec, &jsonSpec)
+ specInfo.SpecJSONBytes = &spec
+ specInfo.SpecJSON = &jsonSpec
+ }
+ close(specInfo.JsonParsingChannel) // this needs removing at some point
+ }()
}
// detect the original whitespace indentation
- specVersion.OriginalIndentation = utils.DetermineWhitespaceLength(string(spec))
+ specInfo.OriginalIndentation = utils.DetermineWhitespaceLength(string(spec))
- return specVersion, nil
+ return specInfo, nil
}
diff --git a/datamodel/spec_info_test.go b/datamodel/spec_info_test.go
index 702ae14..d38d3c1 100644
--- a/datamodel/spec_info_test.go
+++ b/datamodel/spec_info_test.go
@@ -5,7 +5,7 @@ package datamodel
import (
"fmt"
- "io/ioutil"
+ "os"
"testing"
"github.com/pb33f/libopenapi/utils"
@@ -116,6 +116,7 @@ info:
func TestExtractSpecInfo_ValidJSON(t *testing.T) {
r, e := ExtractSpecInfo([]byte(goodJSON))
+ <-r.JsonParsingChannel
assert.Greater(t, len(*r.SpecJSONBytes), 0)
assert.Error(t, e)
}
@@ -132,6 +133,7 @@ func TestExtractSpecInfo_Nothing(t *testing.T) {
func TestExtractSpecInfo_ValidYAML(t *testing.T) {
r, e := ExtractSpecInfo([]byte(goodYAML))
+ <-r.JsonParsingChannel
assert.Greater(t, len(*r.SpecJSONBytes), 0)
assert.Error(t, e)
}
@@ -149,6 +151,7 @@ func TestExtractSpecInfo_InvalidOpenAPIVersion(t *testing.T) {
func TestExtractSpecInfo_OpenAPI3(t *testing.T) {
r, e := ExtractSpecInfo([]byte(OpenApi3Spec))
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.Equal(t, utils.OpenApi3, r.SpecType)
assert.Equal(t, "3.0.1", r.Version)
@@ -159,6 +162,7 @@ func TestExtractSpecInfo_OpenAPI3(t *testing.T) {
func TestExtractSpecInfo_OpenAPIWat(t *testing.T) {
r, e := ExtractSpecInfo([]byte(OpenApiWat))
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.Equal(t, OpenApi3, r.SpecType)
assert.Equal(t, "3.2", r.Version)
@@ -167,6 +171,7 @@ func TestExtractSpecInfo_OpenAPIWat(t *testing.T) {
func TestExtractSpecInfo_OpenAPI31(t *testing.T) {
r, e := ExtractSpecInfo([]byte(OpenApi31))
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.Equal(t, OpenApi3, r.SpecType)
assert.Equal(t, "3.1", r.Version)
@@ -183,6 +188,7 @@ why:
yes: no`
r, e := ExtractSpecInfoWithDocumentCheck([]byte(random), true)
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.NotNil(t, r.RootNode)
assert.Equal(t, "something", r.RootNode.Content[0].Content[0].Value)
@@ -194,6 +200,7 @@ func TestExtractSpecInfo_AnyDocument_JSON(t *testing.T) {
random := `{ "something" : "yeah"}`
r, e := ExtractSpecInfoWithDocumentCheck([]byte(random), true)
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.NotNil(t, r.RootNode)
assert.Equal(t, "something", r.RootNode.Content[0].Content[0].Value)
@@ -212,6 +219,7 @@ why:
r, e := ExtractSpecInfoWithConfig([]byte(random), &DocumentConfiguration{
BypassDocumentCheck: true,
})
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.NotNil(t, r.RootNode)
assert.Equal(t, "something", r.RootNode.Content[0].Content[0].Value)
@@ -228,6 +236,7 @@ func TestExtractSpecInfo_OpenAPIFalse(t *testing.T) {
func TestExtractSpecInfo_OpenAPI2(t *testing.T) {
r, e := ExtractSpecInfo([]byte(OpenApi2Spec))
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.Equal(t, OpenApi2, r.SpecType)
assert.Equal(t, "2.0.1", r.Version)
@@ -246,6 +255,7 @@ func TestExtractSpecInfo_OpenAPI2_OddVersion(t *testing.T) {
func TestExtractSpecInfo_AsyncAPI(t *testing.T) {
r, e := ExtractSpecInfo([]byte(AsyncAPISpec))
+ <-r.JsonParsingChannel
assert.Nil(t, e)
assert.Equal(t, AsyncApi, r.SpecType)
assert.Equal(t, "2.0.0", r.Version)
@@ -290,7 +300,7 @@ func TestExtractSpecInfo_BadVersion_AsyncAPI(t *testing.T) {
func ExampleExtractSpecInfo() {
// load bytes from openapi spec file.
- bytes, _ := ioutil.ReadFile("../test_specs/petstorev3.json")
+ bytes, _ := os.ReadFile("../test_specs/petstorev3.json")
// create a new *SpecInfo instance from loaded bytes
specInfo, err := ExtractSpecInfo(bytes)
diff --git a/datamodel/translate.go b/datamodel/translate.go
index 2588442..455ba19 100644
--- a/datamodel/translate.go
+++ b/datamodel/translate.go
@@ -6,12 +6,16 @@ import (
"io"
"runtime"
"sync"
+
+ "github.com/pb33f/libopenapi/orderedmap"
)
-type ActionFunc[T any] func(T) error
-type TranslateFunc[IN any, OUT any] func(IN) (OUT, error)
-type TranslateSliceFunc[IN any, OUT any] func(int, IN) (OUT, error)
-type TranslateMapFunc[K any, V any, OUT any] func(K, V) (OUT, error)
+type (
+ ActionFunc[T any] func(T) error
+ TranslateFunc[IN any, OUT any] func(IN) (OUT, error)
+ TranslateSliceFunc[IN any, OUT any] func(int, IN) (OUT, error)
+ TranslateMapFunc[IN any, OUT any] func(IN) (OUT, error)
+)
type continueError struct {
error
@@ -28,7 +32,6 @@ type jobStatus[OUT any] struct {
type pipelineJobStatus[IN any, OUT any] struct {
done chan struct{}
cont bool
- eof bool
input IN
result OUT
}
@@ -124,14 +127,15 @@ JOBLOOP:
// translate() or result() may return `io.EOF` to break iteration.
// Results are provided sequentially to result(). Result order is
// nondeterministic.
-func TranslateMapParallel[K comparable, V any, OUT any](m map[K]V, translate TranslateMapFunc[K, V, OUT], result ActionFunc[OUT]) error {
- if len(m) == 0 {
+func TranslateMapParallel[K comparable, V any, OUT any](m orderedmap.Map[K, V], translate TranslateMapFunc[orderedmap.Pair[K, V], OUT], result ActionFunc[OUT]) error {
+ if m == nil {
return nil
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
concurrency := runtime.NumCPU()
+ c := orderedmap.Iterate(ctx, m)
resultChan := make(chan OUT, concurrency)
var reterr error
var mu sync.Mutex
@@ -141,11 +145,11 @@ func TranslateMapParallel[K comparable, V any, OUT any](m map[K]V, translate Tra
wg.Add(1)
go func() {
defer wg.Done()
- for k, v := range m {
+ for pair := range c {
wg.Add(1)
- go func(k K, v V) {
+ go func(pair orderedmap.Pair[K, V]) {
defer wg.Done()
- value, err := translate(k, v)
+ value, err := translate(pair)
if err == Continue {
return
}
@@ -162,7 +166,7 @@ func TranslateMapParallel[K comparable, V any, OUT any](m map[K]V, translate Tra
case resultChan <- value:
case <-ctx.Done():
}
- }(k, v)
+ }(pair)
}
}()
diff --git a/datamodel/translate_test.go b/datamodel/translate_test.go
index 406f8c2..957eeec 100644
--- a/datamodel/translate_test.go
+++ b/datamodel/translate_test.go
@@ -11,6 +11,7 @@ import (
"testing"
"github.com/pb33f/libopenapi/datamodel"
+ "github.com/pb33f/libopenapi/orderedmap"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -178,15 +179,15 @@ func TestTranslateMapParallel(t *testing.T) {
t.Run("Happy path", func(t *testing.T) {
var expectedResults []string
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
expectedResults = append(expectedResults, fmt.Sprintf("foobar %d", i+1000))
}
var translateCounter int64
- translateFunc := func(_ string, value int) (string, error) {
- result := fmt.Sprintf("foobar %d", value)
+ translateFunc := func(pair orderedmap.Pair[string, int]) (string, error) {
+ result := fmt.Sprintf("foobar %d", pair.Value())
atomic.AddInt64(&translateCounter, 1)
return result, nil
}
@@ -204,9 +205,9 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("nil", func(t *testing.T) {
- var m map[string]int
+ var m orderedmap.Map[string, int]
var translateCounter int64
- translateFunc := func(_ string, value int) (string, error) {
+ translateFunc := func(pair orderedmap.Pair[string, int]) (string, error) {
atomic.AddInt64(&translateCounter, 1)
return "", nil
}
@@ -222,13 +223,13 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("Error in translate", func(t *testing.T) {
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
}
var translateCounter int64
- translateFunc := func(_ string, _ int) (string, error) {
+ translateFunc := func(_ orderedmap.Pair[string, int]) (string, error) {
atomic.AddInt64(&translateCounter, 1)
return "", errors.New("Foobar")
}
@@ -241,12 +242,12 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("Error in result", func(t *testing.T) {
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
}
- translateFunc := func(_ string, value int) (string, error) {
+ translateFunc := func(_ orderedmap.Pair[string, int]) (string, error) {
return "", nil
}
var resultCounter int
@@ -260,13 +261,13 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("EOF in translate", func(t *testing.T) {
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
}
var translateCounter int64
- translateFunc := func(_ string, _ int) (string, error) {
+ translateFunc := func(_ orderedmap.Pair[string, int]) (string, error) {
atomic.AddInt64(&translateCounter, 1)
return "", io.EOF
}
@@ -279,12 +280,12 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("EOF in result", func(t *testing.T) {
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
}
- translateFunc := func(_ string, value int) (string, error) {
+ translateFunc := func(_ orderedmap.Pair[string, int]) (string, error) {
return "", nil
}
var resultCounter int
@@ -298,13 +299,13 @@ func TestTranslateMapParallel(t *testing.T) {
})
t.Run("Continue in translate", func(t *testing.T) {
- m := make(map[string]int)
+ m := orderedmap.New[string, int]()
for i := 0; i < mapSize; i++ {
- m[fmt.Sprintf("key%d", i)] = i + 1000
+ m.Set(fmt.Sprintf("key%d", i), i+1000)
}
var translateCounter int64
- translateFunc := func(_ string, _ int) (string, error) {
+ translateFunc := func(_ orderedmap.Pair[string, int]) (string, error) {
atomic.AddInt64(&translateCounter, 1)
return "", datamodel.Continue
}
@@ -331,7 +332,6 @@ func TestTranslatePipeline(t *testing.T) {
for _, testCase := range testCases {
itemCount := testCase.ItemCount
t.Run(fmt.Sprintf("Size %d", itemCount), func(t *testing.T) {
-
t.Run("Happy path", func(t *testing.T) {
var inputErr error
in := make(chan int)
diff --git a/document.go b/document.go
index 0783fee..a150aed 100644
--- a/document.go
+++ b/document.go
@@ -24,7 +24,6 @@ import (
v3high "github.com/pb33f/libopenapi/datamodel/high/v3"
v2low "github.com/pb33f/libopenapi/datamodel/low/v2"
v3low "github.com/pb33f/libopenapi/datamodel/low/v3"
- "github.com/pb33f/libopenapi/resolver"
"github.com/pb33f/libopenapi/utils"
what_changed "github.com/pb33f/libopenapi/what-changed"
"github.com/pb33f/libopenapi/what-changed/model"
@@ -37,6 +36,9 @@ type Document interface {
// GetVersion will return the exact version of the OpenAPI specification set for the document.
GetVersion() string
+ // GetRolodex will return the Rolodex instance that was used to load the document.
+ GetRolodex() *index.Rolodex
+
// GetSpecInfo will return the *datamodel.SpecInfo instance that contains all specification information.
GetSpecInfo() *datamodel.SpecInfo
@@ -44,6 +46,10 @@ type Document interface {
// allowing remote or local references, as well as a BaseURL to allow for relative file references.
SetConfiguration(configuration *datamodel.DocumentConfiguration)
+ // GetConfiguration will return the configuration for the document. This allows for finer grained control over
+ // allowing remote or local references, as well as a BaseURL to allow for relative file references.
+ GetConfiguration() *datamodel.DocumentConfiguration
+
// BuildV2Model will build out a Swagger (version 2) model from the specification used to create the document
// If there are any issues, then no model will be returned, instead a slice of errors will explain all the
// problems that occurred. This method will only support version 2 specifications and will throw an error for
@@ -99,6 +105,7 @@ type Document interface {
}
type document struct {
+ rolodex *index.Rolodex
version string
info *datamodel.SpecInfo
config *datamodel.DocumentConfiguration
@@ -158,6 +165,10 @@ func NewDocumentWithConfiguration(specByteArray []byte, configuration *datamodel
return d, err
}
+func (d *document) GetRolodex() *index.Rolodex {
+ return d.rolodex
+}
+
func (d *document) GetVersion() string {
return d.version
}
@@ -166,6 +177,10 @@ func (d *document) GetSpecInfo() *datamodel.SpecInfo {
return d.info
}
+func (d *document) GetConfiguration() *datamodel.DocumentConfiguration {
+ return d.config
+}
+
func (d *document) SetConfiguration(configuration *datamodel.DocumentConfiguration) {
d.config = configuration
}
@@ -195,12 +210,12 @@ func (d *document) RenderAndReload() ([]byte, Document, *DocumentModel[v3high.Do
errs = append(errs, err)
// build the model.
- model, buildErrs := newDoc.BuildV3Model()
+ m, buildErrs := newDoc.BuildV3Model()
if buildErrs != nil {
- return newBytes, newDoc, model, errs
+ return newBytes, newDoc, m, errs
}
// this document is now dead, long live the new document!
- return newBytes, newDoc, model, nil
+ return newBytes, newDoc, m, nil
}
func (d *document) Render() ([]byte, error) {
@@ -231,58 +246,62 @@ func (d *document) BuildV2Model() (*DocumentModel[v2high.Swagger], []error) {
if d.highSwaggerModel != nil {
return d.highSwaggerModel, nil
}
- var errors []error
+ var errs []error
if d.info == nil {
- errors = append(errors, fmt.Errorf("unable to build swagger document, no specification has been loaded"))
- return nil, errors
+ errs = append(errs, fmt.Errorf("unable to build swagger document, no specification has been loaded"))
+ return nil, errs
}
if d.info.SpecFormat != datamodel.OAS2 {
- errors = append(errors, fmt.Errorf("unable to build swagger document, "+
+ errs = append(errs, fmt.Errorf("unable to build swagger document, "+
"supplied spec is a different version (%v). Try 'BuildV3Model()'", d.info.SpecFormat))
- return nil, errors
+ return nil, errs
}
var lowDoc *v2low.Swagger
if d.config == nil {
- d.config = &datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- }
+ d.config = datamodel.NewDocumentConfiguration()
+ }
+
+ var docErr error
+ lowDoc, docErr = v2low.CreateDocumentFromConfig(d.info, d.config)
+ d.rolodex = lowDoc.Rolodex
+
+ if docErr != nil {
+ errs = append(errs, utils.UnwrapErrors(docErr)...)
}
- lowDoc, errors = v2low.CreateDocumentFromConfig(d.info, d.config)
// Do not short-circuit on circular reference errors, so the client
// has the option of ignoring them.
- for _, err := range errors {
- if refErr, ok := err.(*resolver.ResolvingError); ok {
+ for _, err := range errs {
+ var refErr *index.ResolvingError
+ if errors.As(err, &refErr) {
if refErr.CircularReference == nil {
- return nil, errors
+ return nil, errs
}
- } else {
- return nil, errors
}
}
highDoc := v2high.NewSwaggerDocument(lowDoc)
+
d.highSwaggerModel = &DocumentModel[v2high.Swagger]{
Model: *highDoc,
Index: lowDoc.Index,
}
- return d.highSwaggerModel, errors
+ return d.highSwaggerModel, errs
}
func (d *document) BuildV3Model() (*DocumentModel[v3high.Document], []error) {
if d.highOpenAPI3Model != nil {
return d.highOpenAPI3Model, nil
}
- var errors []error
+ var errs []error
if d.info == nil {
- errors = append(errors, fmt.Errorf("unable to build document, no specification has been loaded"))
- return nil, errors
+ errs = append(errs, fmt.Errorf("unable to build document, no specification has been loaded"))
+ return nil, errs
}
if d.info.SpecFormat != datamodel.OAS3 {
- errors = append(errors, fmt.Errorf("unable to build openapi document, "+
+ errs = append(errs, fmt.Errorf("unable to build openapi document, "+
"supplied spec is a different version (%v). Try 'BuildV2Model()'", d.info.SpecFormat))
- return nil, errors
+ return nil, errs
}
var lowDoc *v3low.Document
@@ -293,24 +312,32 @@ func (d *document) BuildV3Model() (*DocumentModel[v3high.Document], []error) {
}
}
- lowDoc, errors = v3low.CreateDocumentFromConfig(d.info, d.config)
+ var docErr error
+ lowDoc, docErr = v3low.CreateDocumentFromConfig(d.info, d.config)
+ d.rolodex = lowDoc.Rolodex
+
+ if docErr != nil {
+ errs = append(errs, utils.UnwrapErrors(docErr)...)
+ }
+
// Do not short-circuit on circular reference errors, so the client
// has the option of ignoring them.
- for _, err := range errors {
- if refErr, ok := err.(*resolver.ResolvingError); ok {
+ for _, err := range utils.UnwrapErrors(docErr) {
+ var refErr *index.ResolvingError
+ if errors.As(err, &refErr) {
if refErr.CircularReference == nil {
- return nil, errors
+ return nil, errs
}
- } else {
- return nil, errors
}
}
+
highDoc := v3high.NewDocument(lowDoc)
+
d.highOpenAPI3Model = &DocumentModel[v3high.Document]{
Model: *highDoc,
Index: lowDoc.Index,
}
- return d.highOpenAPI3Model, errors
+ return d.highOpenAPI3Model, errs
}
// CompareDocuments will accept a left and right Document implementing struct, build a model for the correct
@@ -320,36 +347,32 @@ func (d *document) BuildV3Model() (*DocumentModel[v3high.Document], []error) {
// model.DocumentChanges. If there are any changes found however between either Document, then a pointer to
// model.DocumentChanges is returned containing every single change, broken down, model by model.
func CompareDocuments(original, updated Document) (*model.DocumentChanges, []error) {
- var errors []error
+ var errs []error
if original.GetSpecInfo().SpecType == utils.OpenApi3 && updated.GetSpecInfo().SpecType == utils.OpenApi3 {
- v3ModelLeft, errs := original.BuildV3Model()
- if len(errs) > 0 {
- errors = errs
+ v3ModelLeft, oErrs := original.BuildV3Model()
+ if len(oErrs) > 0 {
+ errs = oErrs
}
- v3ModelRight, errs := updated.BuildV3Model()
- if len(errs) > 0 {
- errors = append(errors, errs...)
+ v3ModelRight, uErrs := updated.BuildV3Model()
+ if len(uErrs) > 0 {
+ errs = append(errs, uErrs...)
}
if v3ModelLeft != nil && v3ModelRight != nil {
- return what_changed.CompareOpenAPIDocuments(v3ModelLeft.Model.GoLow(), v3ModelRight.Model.GoLow()), errors
+ return what_changed.CompareOpenAPIDocuments(v3ModelLeft.Model.GoLow(), v3ModelRight.Model.GoLow()), errs
} else {
- return nil, errors
+ return nil, errs
}
}
if original.GetSpecInfo().SpecType == utils.OpenApi2 && updated.GetSpecInfo().SpecType == utils.OpenApi2 {
- v2ModelLeft, errs := original.BuildV2Model()
- if len(errs) > 0 {
- errors = errs
+ v2ModelLeft, oErrs := original.BuildV2Model()
+ if len(oErrs) > 0 {
+ errs = oErrs
}
- v2ModelRight, errs := updated.BuildV2Model()
- if len(errs) > 0 {
- errors = append(errors, errs...)
- }
- if v2ModelLeft != nil && v2ModelRight != nil {
- return what_changed.CompareSwaggerDocuments(v2ModelLeft.Model.GoLow(), v2ModelRight.Model.GoLow()), errors
- } else {
- return nil, errors
+ v2ModelRight, uErrs := updated.BuildV2Model()
+ if len(uErrs) > 0 {
+ errs = append(errs, uErrs...)
}
+ return what_changed.CompareSwaggerDocuments(v2ModelLeft.Model.GoLow(), v2ModelRight.Model.GoLow()), errs
}
return nil, []error{fmt.Errorf("unable to compare documents, one or both documents are not of the same version")}
}
diff --git a/document_examples_test.go b/document_examples_test.go
index f2c027c..a92de38 100644
--- a/document_examples_test.go
+++ b/document_examples_test.go
@@ -4,20 +4,21 @@
package libopenapi
import (
+ "bytes"
"fmt"
+ "github.com/pb33f/libopenapi/datamodel"
+ "github.com/pb33f/libopenapi/index"
+ "github.com/pb33f/libopenapi/orderedmap"
+ "log/slog"
"net/url"
"os"
"strings"
"testing"
- "github.com/pb33f/libopenapi/datamodel"
- "github.com/pb33f/libopenapi/orderedmap"
-
"github.com/pb33f/libopenapi/datamodel/high"
v3high "github.com/pb33f/libopenapi/datamodel/high/v3"
low "github.com/pb33f/libopenapi/datamodel/low/base"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
- "github.com/pb33f/libopenapi/resolver"
"github.com/pb33f/libopenapi/utils"
"github.com/stretchr/testify/assert"
)
@@ -66,13 +67,24 @@ func ExampleNewDocument_fromWithDocumentConfigurationFailure() {
digitalOcean, _ := os.ReadFile("test_specs/digitalocean.yaml")
// create a DocumentConfiguration that prevents loading file and remote references
- config := datamodel.DocumentConfiguration{
- AllowFileReferences: false,
- AllowRemoteReferences: false,
- }
+ config := datamodel.NewDocumentConfiguration()
+
+ // create a new structured logger to capture error logs that will be spewed out by the rolodex
+ // when it tries to load external references. We're going to create a byte buffer to capture the logs
+ // and then look at them after the document is built.
+ var logs []byte
+ buf := bytes.NewBuffer(logs)
+ logger := slog.New(slog.NewTextHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ config.Logger = logger // set the config logger to our new logger.
+
+ // Do not set any baseURL, as this will allow the rolodex to resolve relative references.
+ // without a baseURL (for remote references, or a basePath for local references) the rolodex
+ // will consider the reference to be local, and will not attempt to load it from the network.
// create a new document from specification bytes
- doc, err := NewDocumentWithConfiguration(digitalOcean, &config)
+ doc, err := NewDocumentWithConfiguration(digitalOcean, config)
// if anything went wrong, an error is thrown
if err != nil {
@@ -82,11 +94,16 @@ func ExampleNewDocument_fromWithDocumentConfigurationFailure() {
// only errors will be thrown, so just capture them and print the number of errors.
_, errors := doc.BuildV3Model()
+ // there should be 475 errors logs
+ logItems := strings.Split(buf.String(), "\n")
+ fmt.Printf("There are %d errors logged\n", len(logItems))
+
// if anything went wrong when building the v3 model, a slice of errors will be returned
if len(errors) > 0 {
fmt.Println("Error building Digital Ocean spec errors reported")
}
- // Output: Error building Digital Ocean spec errors reported
+ // Output: There are 475 errors logged
+ //Error building Digital Ocean spec errors reported
}
func ExampleNewDocument_fromWithDocumentConfigurationSuccess() {
@@ -103,9 +120,10 @@ func ExampleNewDocument_fromWithDocumentConfigurationSuccess() {
// create a DocumentConfiguration that allows loading file and remote references, and sets the baseURL
// to somewhere that can resolve the relative references.
config := datamodel.DocumentConfiguration{
- AllowFileReferences: true,
- AllowRemoteReferences: true,
- BaseURL: baseURL,
+ BaseURL: baseURL,
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ })),
}
// create a new document from specification bytes
@@ -435,7 +453,7 @@ components:
// resolving error is a pointer to *resolver.ResolvingError
// which provides access to rich details about the error.
- circularReference := resolvingError.(*resolver.ResolvingError).CircularReference
+ circularReference := resolvingError.(*index.ResolvingError).CircularReference
// capture the journey with all details
var buf strings.Builder
diff --git a/document_iteration_test.go b/document_iteration_test.go
new file mode 100644
index 0000000..2f2189c
--- /dev/null
+++ b/document_iteration_test.go
@@ -0,0 +1,303 @@
+package libopenapi
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/pb33f/libopenapi/datamodel"
+ "github.com/pb33f/libopenapi/datamodel/high/base"
+ v3 "github.com/pb33f/libopenapi/datamodel/high/v3"
+ "github.com/pb33f/libopenapi/orderedmap"
+ "github.com/stretchr/testify/require"
+ "golang.org/x/exp/slices"
+)
+
+type loopFrame struct {
+ Type string
+ Restricted bool
+}
+
+type context struct {
+ visited []string
+ stack []loopFrame
+}
+
+func Test_Speakeasy_Document_Iteration(t *testing.T) {
+ spec, err := os.ReadFile("test_specs/speakeasy-test.yaml")
+ require.NoError(t, err)
+
+ doc, err := NewDocumentWithConfiguration(spec, &datamodel.DocumentConfiguration{
+ BasePath: "./test_specs",
+ IgnorePolymorphicCircularReferences: true,
+ IgnoreArrayCircularReferences: true,
+ AllowFileReferences: true,
+ })
+ require.NoError(t, err)
+
+ m, errs := doc.BuildV3Model()
+ require.Empty(t, errs)
+
+ for pair := orderedmap.First(m.Model.Paths.PathItems); pair != nil; pair = pair.Next() {
+ t.Log(pair.Key())
+
+ iterateOperations(t, pair.Value().GetOperations())
+ }
+
+ for pair := orderedmap.First(m.Model.Webhooks); pair != nil; pair = pair.Next() {
+ t.Log(pair.Key())
+
+ iterateOperations(t, pair.Value().GetOperations())
+ }
+
+ for pair := orderedmap.First(m.Model.Components.Schemas); pair != nil; pair = pair.Next() {
+ t.Log(pair.Key())
+
+ handleSchema(t, pair.Value(), context{})
+ }
+}
+
+func iterateOperations(t *testing.T, ops map[string]*v3.Operation) {
+ t.Helper()
+
+ for method, op := range ops {
+ t.Log(method)
+
+ for _, param := range op.Parameters {
+ if param.Schema != nil {
+ handleSchema(t, param.Schema, context{})
+ }
+ }
+
+ if op.RequestBody != nil {
+ for pair := orderedmap.First(op.RequestBody.Content); pair != nil; pair = pair.Next() {
+ t.Log(pair.Key())
+
+ mediaType := pair.Value()
+
+ if mediaType.Schema != nil {
+ handleSchema(t, mediaType.Schema, context{})
+ }
+ }
+ }
+
+ for codePair := orderedmap.First(op.Responses.Codes); codePair != nil; codePair = codePair.Next() {
+ t.Log(codePair.Key())
+
+ for contentPair := orderedmap.First(codePair.Value().Content); contentPair != nil; contentPair = contentPair.Next() {
+ t.Log(contentPair.Key())
+
+ mediaType := contentPair.Value()
+
+ if mediaType.Schema != nil {
+ handleSchema(t, mediaType.Schema, context{})
+ }
+ }
+ }
+
+ for callacksPair := orderedmap.First(op.Callbacks); callacksPair != nil; callacksPair = callacksPair.Next() {
+ t.Log(callacksPair.Key())
+
+ for expressionPair := orderedmap.First(callacksPair.Value().Expression); expressionPair != nil; expressionPair = expressionPair.Next() {
+ t.Log(expressionPair.Key())
+
+ iterateOperations(t, expressionPair.Value().GetOperations())
+ }
+ }
+ }
+}
+
+func handleSchema(t *testing.T, schProxy *base.SchemaProxy, ctx context) {
+ t.Helper()
+
+ if checkCircularReference(t, &ctx, schProxy) {
+ return
+ }
+
+ sch, err := schProxy.BuildSchema()
+ require.NoError(t, err)
+
+ typ, subTypes := getResolvedType(sch)
+
+ if len(sch.Enum) > 0 {
+ switch typ {
+ case "string":
+ return
+ case "integer":
+ return
+ default:
+ // handle as base type
+ }
+ }
+
+ switch typ {
+ case "allOf":
+ fallthrough
+ case "anyOf":
+ fallthrough
+ case "oneOf":
+ if len(subTypes) > 0 {
+ return
+ }
+
+ handleAllOfAnyOfOneOf(t, sch, ctx)
+ case "array":
+ handleArray(t, sch, ctx)
+ case "object":
+ handleObject(t, sch, ctx)
+ default:
+ return
+ }
+}
+
+func getResolvedType(sch *base.Schema) (string, []string) {
+ subTypes := []string{}
+
+ for _, t := range sch.Type {
+ if t == "" { // treat empty type as any
+ subTypes = append(subTypes, "any")
+ } else if t != "null" {
+ subTypes = append(subTypes, t)
+ }
+ }
+
+ if len(sch.AllOf) > 0 {
+ return "allOf", nil
+ }
+
+ if len(sch.AnyOf) > 0 {
+ return "anyOf", nil
+ }
+
+ if len(sch.OneOf) > 0 {
+ return "oneOf", nil
+ }
+
+ if len(subTypes) == 0 {
+ if len(sch.Enum) > 0 {
+ return "string", nil
+ }
+
+ if sch.Properties.Len() > 0 {
+ return "object", nil
+ }
+
+ if sch.AdditionalProperties != nil {
+ return "object", nil
+ }
+
+ if sch.Items != nil {
+ return "array", nil
+ }
+
+ return "any", nil
+ }
+
+ if len(subTypes) == 1 {
+ return subTypes[0], nil
+ }
+
+ return "oneOf", subTypes
+}
+
+func handleAllOfAnyOfOneOf(t *testing.T, sch *base.Schema, ctx context) {
+ t.Helper()
+
+ var schemas []*base.SchemaProxy
+
+ switch {
+ case len(sch.AllOf) > 0:
+ schemas = sch.AllOf
+ case len(sch.AnyOf) > 0:
+ schemas = sch.AnyOf
+ ctx.stack = append(ctx.stack, loopFrame{Type: "anyOf", Restricted: len(sch.AnyOf) == 1})
+ case len(sch.OneOf) > 0:
+ schemas = sch.OneOf
+ ctx.stack = append(ctx.stack, loopFrame{Type: "oneOf", Restricted: len(sch.OneOf) == 1})
+ }
+
+ for _, s := range schemas {
+ handleSchema(t, s, ctx)
+ }
+}
+
+func handleArray(t *testing.T, sch *base.Schema, ctx context) {
+ t.Helper()
+
+ ctx.stack = append(ctx.stack, loopFrame{Type: "array", Restricted: sch.MinItems != nil && *sch.MinItems > 0})
+
+ if sch.Items != nil && sch.Items.IsA() {
+ handleSchema(t, sch.Items.A, ctx)
+ }
+
+ if sch.Contains != nil {
+ handleSchema(t, sch.Contains, ctx)
+ }
+
+ if sch.PrefixItems != nil {
+ for _, s := range sch.PrefixItems {
+ handleSchema(t, s, ctx)
+ }
+ }
+}
+
+func handleObject(t *testing.T, sch *base.Schema, ctx context) {
+ t.Helper()
+
+ for pair := orderedmap.First(sch.Properties); pair != nil; pair = pair.Next() {
+ ctx.stack = append(ctx.stack, loopFrame{Type: "object", Restricted: slices.Contains(sch.Required, pair.Key())})
+ handleSchema(t, pair.Value(), ctx)
+ }
+
+ if sch.AdditionalProperties != nil && sch.AdditionalProperties.IsA() {
+ handleSchema(t, sch.AdditionalProperties.A, ctx)
+ }
+}
+
+func checkCircularReference(t *testing.T, ctx *context, schProxy *base.SchemaProxy) bool {
+ loopRef := getSimplifiedRef(schProxy.GetReference())
+
+ if loopRef != "" {
+ if slices.Contains(ctx.visited, loopRef) {
+ isRestricted := true
+ containsObject := false
+
+ for _, v := range ctx.stack {
+ if v.Type == "object" {
+ containsObject = true
+ }
+
+ if v.Type == "array" && !v.Restricted {
+ isRestricted = false
+ } else if !v.Restricted {
+ isRestricted = false
+ }
+ }
+
+ if !containsObject {
+ isRestricted = true
+ }
+
+ require.False(t, isRestricted, "circular reference: %s", append(ctx.visited, loopRef))
+ return true
+ }
+
+ ctx.visited = append(ctx.visited, loopRef)
+ }
+
+ return false
+}
+
+// getSimplifiedRef will return the reference without the preceding file path
+// caveat is that if a spec has the same ref in two different files they include this may identify them incorrectly
+// but currently a problem anyway as libopenapi when returning references from an external file won't include the file path
+// for a local reference with that file and so we might fail to distinguish between them that way.
+// The fix needed is for libopenapi to also track which file the reference is in so we can always prefix them with the file path
+func getSimplifiedRef(ref string) string {
+ if ref == "" {
+ return ""
+ }
+
+ refParts := strings.Split(ref, "#/")
+ return "#/" + refParts[len(refParts)-1]
+}
diff --git a/document_test.go b/document_test.go
index 0a34730..b6644b9 100644
--- a/document_test.go
+++ b/document_test.go
@@ -4,6 +4,7 @@ package libopenapi
import (
"fmt"
+ "log/slog"
"os"
"strconv"
"strings"
@@ -19,7 +20,6 @@ import (
)
func TestLoadDocument_Simple_V2(t *testing.T) {
-
yml := `swagger: 2.0.1`
doc, err := NewDocument([]byte(yml))
assert.NoError(t, err)
@@ -31,11 +31,9 @@ func TestLoadDocument_Simple_V2(t *testing.T) {
assert.NotNil(t, doc.GetSpecInfo())
fmt.Print()
-
}
func TestLoadDocument_Simple_V2_Error(t *testing.T) {
-
yml := `swagger: 2.0`
doc, err := NewDocument([]byte(yml))
assert.NoError(t, err)
@@ -46,7 +44,6 @@ func TestLoadDocument_Simple_V2_Error(t *testing.T) {
}
func TestLoadDocument_Simple_V2_Error_BadSpec(t *testing.T) {
-
yml := `swagger: 2.0
definitions:
thing:
@@ -55,12 +52,11 @@ definitions:
assert.NoError(t, err)
v2Doc, docErr := doc.BuildV2Model()
- assert.Len(t, docErr, 2)
+ assert.Len(t, docErr, 3)
assert.Nil(t, v2Doc)
}
func TestLoadDocument_Simple_V3_Error(t *testing.T) {
-
yml := `openapi: 3.0.1`
doc, err := NewDocument([]byte(yml))
assert.NoError(t, err)
@@ -71,14 +67,12 @@ func TestLoadDocument_Simple_V3_Error(t *testing.T) {
}
func TestLoadDocument_Error_V2NoSpec(t *testing.T) {
-
doc := new(document) // not how this should be instantiated.
_, err := doc.BuildV2Model()
assert.Len(t, err, 1)
}
func TestLoadDocument_Error_V3NoSpec(t *testing.T) {
-
doc := new(document) // not how this should be instantiated.
_, err := doc.BuildV3Model()
assert.Len(t, err, 1)
@@ -91,7 +85,6 @@ func TestLoadDocument_Empty(t *testing.T) {
}
func TestLoadDocument_Simple_V3(t *testing.T) {
-
yml := `openapi: 3.0.1`
doc, err := NewDocument([]byte(yml))
assert.NoError(t, err)
@@ -102,8 +95,7 @@ func TestLoadDocument_Simple_V3(t *testing.T) {
assert.NotNil(t, v3Doc)
}
-func TestLoadDocument_Simple_V3_Error_BadSpec(t *testing.T) {
-
+func TestLoadDocument_Simple_V3_Error_BadSpec_BuildModel(t *testing.T) {
yml := `openapi: 3.0
paths:
"/some":
@@ -111,9 +103,9 @@ paths:
doc, err := NewDocument([]byte(yml))
assert.NoError(t, err)
- v3Doc, docErr := doc.BuildV3Model()
- assert.Len(t, docErr, 2)
- assert.Nil(t, v3Doc)
+ doc.BuildV3Model()
+ rolo := doc.GetRolodex()
+ assert.Len(t, rolo.GetCaughtErrors(), 1)
}
func TestDocument_Serialize_Error(t *testing.T) {
@@ -123,7 +115,6 @@ func TestDocument_Serialize_Error(t *testing.T) {
}
func TestDocument_Serialize(t *testing.T) {
-
yml := `openapi: 3.0
info:
title: The magic API
@@ -135,7 +126,6 @@ info:
}
func TestDocument_Serialize_Modified(t *testing.T) {
-
yml := `openapi: 3.0
info:
title: The magic API
@@ -157,7 +147,6 @@ info:
}
func TestDocument_RenderAndReload_ChangeCheck_Burgershop(t *testing.T) {
-
bs, _ := os.ReadFile("test_specs/burgershop.openapi.yaml")
doc, _ := NewDocument(bs)
doc.BuildV3Model()
@@ -171,11 +160,9 @@ func TestDocument_RenderAndReload_ChangeCheck_Burgershop(t *testing.T) {
assert.Nil(t, errs)
assert.NotNil(t, rend)
assert.Nil(t, compReport)
-
}
func TestDocument_RenderAndReload_ChangeCheck_Stripe(t *testing.T) {
-
bs, _ := os.ReadFile("test_specs/stripe.yaml")
doc, _ := NewDocument(bs)
doc.BuildV3Model()
@@ -204,11 +191,9 @@ func TestDocument_RenderAndReload_ChangeCheck_Stripe(t *testing.T) {
// there should be no other changes than the 519 descriptions.
assert.Equal(t, 0, len(filtered))
-
}
func TestDocument_RenderAndReload_ChangeCheck_Asana(t *testing.T) {
-
bs, _ := os.ReadFile("test_specs/asana.yaml")
doc, _ := NewDocument(bs)
doc.BuildV3Model()
@@ -228,17 +213,14 @@ func TestDocument_RenderAndReload_ChangeCheck_Asana(t *testing.T) {
// there are some properties re-rendered that trigger changes.
assert.Equal(t, 21, len(flatChanges))
-
}
func TestDocument_RenderAndReload(t *testing.T) {
-
// load an OpenAPI 3 specification from bytes
petstore, _ := os.ReadFile("test_specs/petstorev3.json")
// create a new document from specification bytes
doc, err := NewDocument(petstore)
-
// if anything went wrong, an error is thrown
if err != nil {
panic(fmt.Sprintf("cannot create new document: %e", err))
@@ -251,8 +233,7 @@ func TestDocument_RenderAndReload(t *testing.T) {
h := m.Model
h.Paths.PathItems.GetOrZero("/pet/findByStatus").Get.OperationId = "findACakeInABakery"
h.Paths.PathItems.GetOrZero("/pet/findByStatus").Get.Responses.Codes.GetOrZero("400").Description = "a nice bucket of mice"
- h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags =
- append(h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags, "gurgle", "giggle")
+ h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags = append(h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags, "gurgle", "giggle")
h.Paths.PathItems.GetOrZero("/pet/{petId}").Delete.Security = append(h.Paths.PathItems.GetOrZero("/pet/{petId}").Delete.Security,
&base.SecurityRequirement{Requirements: orderedmap.ToOrderedMap(map[string][]string{
@@ -284,13 +265,11 @@ func TestDocument_RenderAndReload(t *testing.T) {
}
func TestDocument_Render(t *testing.T) {
-
// load an OpenAPI 3 specification from bytes
petstore, _ := os.ReadFile("test_specs/petstorev3.json")
// create a new document from specification bytes
doc, err := NewDocument(petstore)
-
// if anything went wrong, an error is thrown
if err != nil {
panic(fmt.Sprintf("cannot create new document: %e", err))
@@ -304,8 +283,7 @@ func TestDocument_Render(t *testing.T) {
h.Paths.PathItems.GetOrZero("/pet/findByStatus").Get.OperationId = "findACakeInABakery"
h.Paths.PathItems.GetOrZero("/pet/findByStatus").
Get.Responses.Codes.GetOrZero("400").Description = "a nice bucket of mice"
- h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags =
- append(h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags, "gurgle", "giggle")
+ h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags = append(h.Paths.PathItems.GetOrZero("/pet/findByTags").Get.Tags, "gurgle", "giggle")
h.Paths.PathItems.GetOrZero("/pet/{petId}").Delete.Security = append(h.Paths.PathItems.GetOrZero("/pet/{petId}").Delete.Security,
&base.SecurityRequirement{Requirements: orderedmap.ToOrderedMap(map[string][]string{
@@ -344,7 +322,6 @@ func TestDocument_Render(t *testing.T) {
}
func TestDocument_RenderWithLargeIndention(t *testing.T) {
-
json := `{
"openapi": "3.0"
}`
@@ -356,7 +333,6 @@ func TestDocument_RenderWithLargeIndention(t *testing.T) {
}
func TestDocument_Render_ChangeCheck_Burgershop(t *testing.T) {
-
bs, _ := os.ReadFile("test_specs/burgershop.openapi.yaml")
doc, _ := NewDocument(bs)
doc.BuildV3Model()
@@ -372,7 +348,6 @@ func TestDocument_Render_ChangeCheck_Burgershop(t *testing.T) {
assert.Nil(t, errs)
assert.NotNil(t, rend)
assert.Nil(t, compReport)
-
}
func TestDocument_RenderAndReload_Swagger(t *testing.T) {
@@ -383,7 +358,6 @@ func TestDocument_RenderAndReload_Swagger(t *testing.T) {
_, _, _, e := doc.RenderAndReload()
assert.Len(t, e, 1)
assert.Equal(t, "this method only supports OpenAPI 3 documents, not Swagger", e[0].Error())
-
}
func TestDocument_Render_Swagger(t *testing.T) {
@@ -394,7 +368,6 @@ func TestDocument_Render_Swagger(t *testing.T) {
_, e := doc.Render()
assert.Error(t, e)
assert.Equal(t, "this method only supports OpenAPI 3 documents, not Swagger", e.Error())
-
}
func TestDocument_BuildModelPreBuild(t *testing.T) {
@@ -424,21 +397,18 @@ func TestDocument_AnyDocWithConfig(t *testing.T) {
func TestDocument_BuildModelCircular(t *testing.T) {
petstore, _ := os.ReadFile("test_specs/circular-tests.yaml")
doc, _ := NewDocument(petstore)
- m, e := doc.BuildV3Model()
- assert.NotNil(t, m)
- assert.Len(t, e, 3)
+ doc.BuildV3Model()
+ assert.Len(t, doc.GetRolodex().GetCaughtErrors(), 3)
}
func TestDocument_BuildModelBad(t *testing.T) {
petstore, _ := os.ReadFile("test_specs/badref-burgershop.openapi.yaml")
doc, _ := NewDocument(petstore)
- m, e := doc.BuildV3Model()
- assert.Nil(t, m)
- assert.Len(t, e, 9)
+ doc.BuildV3Model()
+ assert.Len(t, doc.GetRolodex().GetCaughtErrors(), 6)
}
func TestDocument_Serialize_JSON_Modified(t *testing.T) {
-
json := `{ 'openapi': '3.0',
'info': {
'title': 'The magic API'
@@ -463,7 +433,7 @@ func TestDocument_Serialize_JSON_Modified(t *testing.T) {
}
func TestExtractReference(t *testing.T) {
- var data = `
+ data := `
openapi: "3.1"
components:
parameters:
@@ -500,36 +470,31 @@ func TestDocument_BuildModel_CompareDocsV3_LeftError(t *testing.T) {
originalDoc, _ := NewDocument(burgerShopOriginal)
updatedDoc, _ := NewDocument(burgerShopUpdated)
changes, errors := CompareDocuments(originalDoc, updatedDoc)
- assert.Len(t, errors, 9)
+ assert.Len(t, errors, 6)
assert.Nil(t, changes)
}
func TestDocument_BuildModel_CompareDocsV3_RightError(t *testing.T) {
-
burgerShopOriginal, _ := os.ReadFile("test_specs/badref-burgershop.openapi.yaml")
burgerShopUpdated, _ := os.ReadFile("test_specs/burgershop.openapi-modified.yaml")
originalDoc, _ := NewDocument(burgerShopOriginal)
updatedDoc, _ := NewDocument(burgerShopUpdated)
changes, errors := CompareDocuments(updatedDoc, originalDoc)
- assert.Len(t, errors, 9)
+ assert.Len(t, errors, 6)
assert.Nil(t, changes)
-
}
func TestDocument_BuildModel_CompareDocsV2_Error(t *testing.T) {
-
burgerShopOriginal, _ := os.ReadFile("test_specs/petstorev2-badref.json")
burgerShopUpdated, _ := os.ReadFile("test_specs/petstorev2-badref.json")
originalDoc, _ := NewDocument(burgerShopOriginal)
updatedDoc, _ := NewDocument(burgerShopUpdated)
changes, errors := CompareDocuments(updatedDoc, originalDoc)
- assert.Len(t, errors, 2)
+ assert.Len(t, errors, 14)
assert.Nil(t, changes)
-
}
func TestDocument_BuildModel_CompareDocsV2V3Mix_Error(t *testing.T) {
-
burgerShopOriginal, _ := os.ReadFile("test_specs/petstorev2.json")
burgerShopUpdated, _ := os.ReadFile("test_specs/petstorev3.json")
originalDoc, _ := NewDocument(burgerShopOriginal)
@@ -537,7 +502,6 @@ func TestDocument_BuildModel_CompareDocsV2V3Mix_Error(t *testing.T) {
changes, errors := CompareDocuments(updatedDoc, originalDoc)
assert.Len(t, errors, 1)
assert.Nil(t, changes)
-
}
func TestSchemaRefIsFollowed(t *testing.T) {
@@ -545,7 +509,6 @@ func TestSchemaRefIsFollowed(t *testing.T) {
// create a new document from specification bytes
document, err := NewDocument(petstore)
-
// if anything went wrong, an error is thrown
if err != nil {
panic(fmt.Sprintf("cannot create new document: %e", err))
@@ -583,7 +546,7 @@ func TestSchemaRefIsFollowed(t *testing.T) {
}
func TestDocument_ParamsAndRefsRender(t *testing.T) {
- var d = `openapi: "3.1"
+ d := `openapi: "3.1"
components:
parameters:
limit:
@@ -642,7 +605,7 @@ paths:
// parameters:
// - $ref: "https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml#/components/parameters/crs"`
//
-// config := datamodel.NewOpenDocumentConfiguration()
+// config := datamodel.NewDocumentConfiguration()
//
// doc, err := NewDocumentWithConfiguration([]byte(spec), config)
// if err != nil {
@@ -658,7 +621,7 @@ paths:
//}
func TestDocument_ExampleMap(t *testing.T) {
- var d = `openapi: "3.1"
+ d := `openapi: "3.1"
components:
schemas:
ProjectRequest:
@@ -692,25 +655,31 @@ components:
}
func TestDocument_OperationsAsRefs(t *testing.T) {
-
ae := `operationId: thisIsAnOperationId
summary: a test thing
description: this is a test, that does a test.`
- _ = os.WriteFile("test-operation.yaml", []byte(ae), 0644)
+ _ = os.WriteFile("test-operation.yaml", []byte(ae), 0o644)
defer os.Remove("test-operation.yaml")
- var d = `openapi: "3.1"
+ d := `openapi: "3.1"
paths:
/an/operation:
get:
$ref: test-operation.yaml`
- doc, err := NewDocumentWithConfiguration([]byte(d), datamodel.NewOpenDocumentConfiguration())
+ cf := datamodel.NewDocumentConfiguration()
+ cf.BasePath = "."
+ cf.FileFilter = []string{"test-operation.yaml"}
+
+ doc, err := NewDocumentWithConfiguration([]byte(d), cf)
if err != nil {
panic(err)
}
+ assert.NotNil(t, doc.GetConfiguration())
+ assert.Equal(t, doc.GetConfiguration(), cf)
+
result, errs := doc.BuildV3Model()
if len(errs) > 0 {
panic(errs)
@@ -723,8 +692,7 @@ paths:
}
func TestDocument_InputAsJSON(t *testing.T) {
-
- var d = `{
+ d := `{
"openapi": "3.1",
"paths": {
"/an/operation": {
@@ -735,7 +703,7 @@ func TestDocument_InputAsJSON(t *testing.T) {
}
}`
- doc, err := NewDocumentWithConfiguration([]byte(d), datamodel.NewOpenDocumentConfiguration())
+ doc, err := NewDocumentWithConfiguration([]byte(d), datamodel.NewDocumentConfiguration())
if err != nil {
panic(err)
}
@@ -749,8 +717,7 @@ func TestDocument_InputAsJSON(t *testing.T) {
}
func TestDocument_InputAsJSON_LargeIndent(t *testing.T) {
-
- var d = `{
+ d := `{
"openapi": "3.1",
"paths": {
"/an/operation": {
@@ -761,7 +728,7 @@ func TestDocument_InputAsJSON_LargeIndent(t *testing.T) {
}
}`
- doc, err := NewDocumentWithConfiguration([]byte(d), datamodel.NewOpenDocumentConfiguration())
+ doc, err := NewDocumentWithConfiguration([]byte(d), datamodel.NewDocumentConfiguration())
if err != nil {
panic(err)
}
@@ -775,7 +742,6 @@ func TestDocument_InputAsJSON_LargeIndent(t *testing.T) {
}
func TestDocument_RenderWithIndention(t *testing.T) {
-
spec := `openapi: "3.1.0"
info:
title: Test
@@ -785,7 +751,7 @@ paths:
get:
operationId: 'test'`
- config := datamodel.NewOpenDocumentConfiguration()
+ config := datamodel.NewDocumentConfiguration()
doc, err := NewDocumentWithConfiguration([]byte(spec), config)
if err != nil {
@@ -800,8 +766,7 @@ paths:
}
func TestDocument_IgnorePolymorphicCircularReferences(t *testing.T) {
-
- var d = `openapi: 3.1.0
+ d := `openapi: 3.1.0
components:
schemas:
ProductCategory:
@@ -818,7 +783,7 @@ components:
- "name"
- "children"`
- config := datamodel.NewClosedDocumentConfiguration()
+ config := datamodel.NewDocumentConfiguration()
config.IgnorePolymorphicCircularReferences = true
doc, err := NewDocumentWithConfiguration([]byte(d), config)
@@ -830,12 +795,11 @@ components:
assert.Len(t, errs, 0)
assert.Len(t, m.Index.GetCircularReferences(), 0)
-
+ assert.Len(t, m.Index.GetResolver().GetIgnoredCircularPolyReferences(), 1)
}
func TestDocument_IgnoreArrayCircularReferences(t *testing.T) {
-
- var d = `openapi: 3.1.0
+ d := `openapi: 3.1.0
components:
schemas:
ProductCategory:
@@ -852,7 +816,7 @@ components:
- "name"
- "children"`
- config := datamodel.NewClosedDocumentConfiguration()
+ config := datamodel.NewDocumentConfiguration()
config.IgnoreArrayCircularReferences = true
doc, err := NewDocumentWithConfiguration([]byte(d), config)
@@ -864,7 +828,66 @@ components:
assert.Len(t, errs, 0)
assert.Len(t, m.Index.GetCircularReferences(), 0)
+ assert.Len(t, m.Index.GetResolver().GetIgnoredCircularArrayReferences(), 1)
+}
+func TestDocument_TestMixedReferenceOrigin(t *testing.T) {
+ bs, _ := os.ReadFile("test_specs/mixedref-burgershop.openapi.yaml")
+
+ config := datamodel.NewDocumentConfiguration()
+ config.AllowRemoteReferences = true
+ config.AllowFileReferences = true
+ config.SkipCircularReferenceCheck = true
+ config.BasePath = "test_specs"
+
+ config.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ }))
+
+ doc, _ := NewDocumentWithConfiguration(bs, config)
+ m, _ := doc.BuildV3Model()
+
+ // extract something that can only exist after being located by the rolodex.
+ mediaType := m.Model.Paths.PathItems.GetOrZero("/burgers/{burgerId}/dressings").
+ Get.Responses.Codes.GetOrZero("200").Content.GetOrZero("application/json").Schema.Schema().Items
+
+ items := mediaType.A.Schema()
+
+ origin := items.ParentProxy.GetReferenceOrigin()
+ assert.NotNil(t, origin)
+ assert.True(t, strings.HasSuffix(origin.AbsoluteLocation, "test_specs/burgershop.openapi.yaml"))
+}
+
+func BenchmarkReferenceOrigin(b *testing.B) {
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+
+ bs, _ := os.ReadFile("test_specs/mixedref-burgershop.openapi.yaml")
+
+ config := datamodel.NewDocumentConfiguration()
+ config.AllowRemoteReferences = true
+ config.AllowFileReferences = true
+ config.BasePath = "test_specs"
+ config.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug}))
+
+ doc, _ := NewDocumentWithConfiguration(bs, config)
+ m, _ := doc.BuildV3Model()
+
+ // extract something that can only exist after being located by the rolodex.
+ mediaType := m.Model.Paths.PathItems.GetOrZero("/burgers/{burgerId}/dressings").
+ Get.Responses.Codes.GetOrZero("200").Content.GetOrZero("application/json").Schema.Schema().Items
+
+ items := mediaType.A.Schema()
+
+ origin := items.ParentProxy.GetReferenceOrigin()
+ if origin == nil {
+ // fmt.Println("nil origin")
+ } else {
+ // fmt.Println(origin.AbsoluteLocation)
+ }
+ assert.NotNil(b, origin)
+ assert.True(b, strings.HasSuffix(origin.AbsoluteLocation, "test_specs/burgershop.openapi.yaml"))
+ }
}
// Ensure document ordering is preserved after building, rendering, and reloading.
diff --git a/go.mod b/go.mod
index 5f0a20a..fb34b34 100644
--- a/go.mod
+++ b/go.mod
@@ -1,6 +1,6 @@
module github.com/pb33f/libopenapi
-go 1.20
+go 1.21
require (
github.com/lucasjones/reggen v0.0.0-20200904144131-37ba4fa293bb
diff --git a/go.sum b/go.sum
index ec2a67b..75099b7 100644
--- a/go.sum
+++ b/go.sum
@@ -108,6 +108,7 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
+golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
diff --git a/index/circular_reference_result.go b/index/circular_reference_result.go
index feec176..6538b57 100644
--- a/index/circular_reference_result.go
+++ b/index/circular_reference_result.go
@@ -14,6 +14,7 @@ type CircularReferenceResult struct {
IsInfiniteLoop bool // if all the definitions in the reference loop are marked as required, this is an infinite circular reference, thus is not allowed.
}
+// GenerateJourneyPath generates a string representation of the journey taken to find the circular reference.
func (c *CircularReferenceResult) GenerateJourneyPath() string {
buf := strings.Builder{}
for i, ref := range c.Journey {
@@ -22,9 +23,6 @@ func (c *CircularReferenceResult) GenerateJourneyPath() string {
}
buf.WriteString(ref.Name)
- // buf.WriteString(" (")
- // buf.WriteString(ref.Definition)
- // buf.WriteString(")")
}
return buf.String()
diff --git a/index/extract_refs.go b/index/extract_refs.go
index ec1ab19..5cbf73c 100644
--- a/index/extract_refs.go
+++ b/index/extract_refs.go
@@ -6,6 +6,8 @@ package index
import (
"errors"
"fmt"
+ "net/url"
+ "path/filepath"
"strings"
"github.com/pb33f/libopenapi/utils"
@@ -23,7 +25,6 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
if len(node.Content) > 0 {
var prev, polyName string
for i, n := range node.Content {
-
if utils.IsNodeMap(n) || utils.IsNodeArray(n) {
level++
// check if we're using polymorphic values. These tend to create rabbit warrens of circular
@@ -44,9 +45,22 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
// https://github.com/pb33f/libopenapi/issues/76
schemaContainingNodes := []string{"schema", "items", "additionalProperties", "contains", "not", "unevaluatedItems", "unevaluatedProperties"}
if i%2 == 0 && slices.Contains(schemaContainingNodes, n.Value) && !utils.IsNodeArray(node) && (i+1 < len(node.Content)) {
+
+ var jsonPath, definitionPath, fullDefinitionPath string
+
+ if len(seenPath) > 0 || n.Value != "" {
+ loc := append(seenPath, n.Value)
+ // create definition and full definition paths
+ definitionPath = fmt.Sprintf("#/%s", strings.Join(loc, "/"))
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, strings.Join(loc, "/"))
+ _, jsonPath = utils.ConvertComponentIdIntoFriendlyPathSearch(definitionPath)
+ }
ref := &Reference{
- Node: node.Content[i+1],
- Path: fmt.Sprintf("$.%s.%s", strings.Join(seenPath, "."), n.Value),
+ FullDefinition: fullDefinitionPath,
+ Definition: definitionPath,
+ Node: node.Content[i+1],
+ Path: jsonPath,
+ Index: index,
}
isRef, _, _ := utils.IsNodeRefValue(node.Content[i+1])
@@ -86,9 +100,19 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
label = prop.Value
continue
}
+ var jsonPath, definitionPath, fullDefinitionPath string
+ if len(seenPath) > 0 || n.Value != "" && label != "" {
+ loc := append(seenPath, n.Value, label)
+ definitionPath = fmt.Sprintf("#/%s", strings.Join(loc, "/"))
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, strings.Join(loc, "/"))
+ _, jsonPath = utils.ConvertComponentIdIntoFriendlyPathSearch(definitionPath)
+ }
ref := &Reference{
- Node: prop,
- Path: fmt.Sprintf("$.%s.%s.%s", strings.Join(seenPath, "."), n.Value, label),
+ FullDefinition: fullDefinitionPath,
+ Definition: definitionPath,
+ Node: prop,
+ Path: jsonPath,
+ Index: index,
}
isRef, _, _ := utils.IsNodeRefValue(prop)
@@ -116,9 +140,25 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
if i%2 == 0 && slices.Contains(arrayOfSchemaContainingNodes, n.Value) && !utils.IsNodeArray(node) && (i+1 < len(node.Content)) {
// for each element in the array, add it to our schema definitions
for h, element := range node.Content[i+1].Content {
+
+ var jsonPath, definitionPath, fullDefinitionPath string
+ if len(seenPath) > 0 {
+ loc := append(seenPath, n.Value, fmt.Sprintf("%d", h))
+ definitionPath = fmt.Sprintf("#/%s", strings.Join(loc, "/"))
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, strings.Join(loc, "/"))
+ _, jsonPath = utils.ConvertComponentIdIntoFriendlyPathSearch(definitionPath)
+ } else {
+ definitionPath = fmt.Sprintf("#/%s", n.Value)
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, n.Value)
+ _, jsonPath = utils.ConvertComponentIdIntoFriendlyPathSearch(definitionPath)
+ }
+
ref := &Reference{
- Node: element,
- Path: fmt.Sprintf("$.%s.%s[%d]", strings.Join(seenPath, "."), n.Value, h),
+ FullDefinition: fullDefinitionPath,
+ Definition: definitionPath,
+ Node: element,
+ Path: jsonPath,
+ Index: index,
}
isRef, _, _ := utils.IsNodeRefValue(element)
@@ -149,20 +189,121 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
index.linesWithRefs[n.Line] = true
fp := make([]string, len(seenPath))
- for x, foundPathNode := range seenPath {
- fp[x] = foundPathNode
- }
+ copy(fp, seenPath)
value := node.Content[i+1].Value
-
segs := strings.Split(value, "/")
name := segs[len(segs)-1]
- _, p := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
+ uri := strings.Split(value, "#/")
+
+ // determine absolute path to this definition
+ var defRoot string
+ if strings.HasPrefix(index.specAbsolutePath, "http") {
+ defRoot = index.specAbsolutePath
+ } else {
+ defRoot = filepath.Dir(index.specAbsolutePath)
+ }
+
+ var componentName string
+ var fullDefinitionPath string
+ if len(uri) == 2 {
+ if uri[0] == "" {
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, uri[1])
+ componentName = value
+ } else {
+
+ if strings.HasPrefix(uri[0], "http") {
+ fullDefinitionPath = value
+ componentName = fmt.Sprintf("#/%s", uri[1])
+ } else {
+ if filepath.IsAbs(uri[0]) {
+ fullDefinitionPath = value
+ componentName = fmt.Sprintf("#/%s", uri[1])
+ } else {
+
+ // if the index has a base path, use that to resolve the path
+ if index.config.BasePath != "" && index.config.BaseURL == nil {
+ abs, _ := filepath.Abs(filepath.Join(index.config.BasePath, uri[0]))
+ if abs != defRoot {
+ abs, _ = filepath.Abs(filepath.Join(defRoot, uri[0]))
+ }
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", abs, uri[1])
+ componentName = fmt.Sprintf("#/%s", uri[1])
+ } else {
+ // if the index has a base URL, use that to resolve the path.
+ if index.config.BaseURL != nil && !filepath.IsAbs(defRoot) {
+ u := *index.config.BaseURL
+ abs, _ := filepath.Abs(filepath.Join(u.Path, uri[0]))
+ u.Path = abs
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", u.String(), uri[1])
+ componentName = fmt.Sprintf("#/%s", uri[1])
+
+ } else {
+
+ abs, _ := filepath.Abs(filepath.Join(defRoot, uri[0]))
+ fullDefinitionPath = fmt.Sprintf("%s#/%s", abs, uri[1])
+ componentName = fmt.Sprintf("#/%s", uri[1])
+ }
+ }
+ }
+ }
+ }
+
+ } else {
+ if strings.HasPrefix(uri[0], "http") {
+ fullDefinitionPath = value
+ } else {
+ // is it a relative file include?
+ if !strings.Contains(uri[0], "#") {
+
+ if strings.HasPrefix(defRoot, "http") {
+ if !filepath.IsAbs(uri[0]) {
+ u, _ := url.Parse(defRoot)
+ pathDir := filepath.Dir(u.Path)
+ pathAbs, _ := filepath.Abs(filepath.Join(pathDir, uri[0]))
+ u.Path = pathAbs
+ fullDefinitionPath = u.String()
+ }
+ } else {
+ if !filepath.IsAbs(uri[0]) {
+ // if the index has a base path, use that to resolve the path
+ if index.config.BasePath != "" {
+ abs, _ := filepath.Abs(filepath.Join(index.config.BasePath, uri[0]))
+ if abs != defRoot {
+ abs, _ = filepath.Abs(filepath.Join(defRoot, uri[0]))
+ }
+ fullDefinitionPath = abs
+ componentName = uri[0]
+ } else {
+ // if the index has a base URL, use that to resolve the path.
+ if index.config.BaseURL != nil {
+
+ u := *index.config.BaseURL
+ abs := filepath.Join(u.Path, uri[0])
+ u.Path = abs
+ fullDefinitionPath = u.String()
+ componentName = uri[0]
+ } else {
+ abs, _ := filepath.Abs(filepath.Join(defRoot, uri[0]))
+ fullDefinitionPath = abs
+ componentName = uri[0]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ _, p := utils.ConvertComponentIdIntoFriendlyPathSearch(componentName)
+
ref := &Reference{
- Definition: value,
- Name: name,
- Node: node,
- Path: p,
+ FullDefinition: fullDefinitionPath,
+ Definition: componentName,
+ Name: name,
+ Node: node,
+ Path: p,
+ Index: index,
}
// add to raw sequenced refs
@@ -184,10 +325,12 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
if len(node.Content) > 2 {
copiedNode := *node
copied := Reference{
- Definition: ref.Definition,
- Name: ref.Name,
- Node: &copiedNode,
- Path: p,
+ FullDefinition: fullDefinitionPath,
+ Definition: ref.Definition,
+ Name: ref.Name,
+ Node: &copiedNode,
+ Path: p,
+ Index: index,
}
// protect this data using a copy, prevent the resolver from destroying things.
index.refsWithSiblings[value] = copied
@@ -232,7 +375,7 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
continue
}
- index.allRefs[value] = ref
+ index.allRefs[fullDefinitionPath] = ref
found = append(found, ref)
}
@@ -332,9 +475,12 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
if len(seenPath) > 0 {
lastItem := seenPath[len(seenPath)-1]
if lastItem == "properties" {
+ seenPath = append(seenPath, strings.ReplaceAll(n.Value, "/", "~1"))
+ prev = n.Value
continue
}
}
+
// all enums need to have a type, extract the type from the node where the enum was found.
_, enumKeyValueNode := utils.FindKeyNodeTop("type", node.Content)
@@ -378,7 +524,8 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
}
}
- seenPath = append(seenPath, n.Value)
+ seenPath = append(seenPath, strings.ReplaceAll(n.Value, "/", "~1"))
+ //seenPath = append(seenPath, n.Value)
prev = n.Value
}
@@ -391,13 +538,6 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
}
}
}
- if len(seenPath) > 0 {
- seenPath = seenPath[:len(seenPath)-1]
- }
-
- }
- if len(seenPath) > 0 {
- seenPath = seenPath[:len(seenPath)-1]
}
index.refCount = len(index.allRefs)
@@ -414,18 +554,24 @@ func (index *SpecIndex) ExtractComponentsFromRefs(refs []*Reference) []*Referenc
c := make(chan bool)
locate := func(ref *Reference, refIndex int, sequence []*ReferenceMapped) {
- located := index.FindComponent(ref.Definition, ref.Node)
+ located := index.FindComponent(ref.FullDefinition)
if located != nil {
+
+ // have we already mapped this?
index.refLock.Lock()
- if index.allMappedRefs[ref.Definition] == nil {
+ if index.allMappedRefs[ref.FullDefinition] == nil {
found = append(found, located)
- index.allMappedRefs[ref.Definition] = located
- sequence[refIndex] = &ReferenceMapped{
- Reference: located,
- Definition: ref.Definition,
+ index.allMappedRefs[located.FullDefinition] = located
+ rm := &ReferenceMapped{
+ OriginalReference: ref,
+ Reference: located,
+ Definition: located.Definition,
+ FullDefinition: located.FullDefinition,
}
+ sequence[refIndex] = rm
}
index.refLock.Unlock()
+
} else {
_, path := utils.ConvertComponentIdIntoFriendlyPathSearch(ref.Definition)
@@ -463,15 +609,13 @@ func (index *SpecIndex) ExtractComponentsFromRefs(refs []*Reference) []*Referenc
for r := range refsToCheck {
// expand our index of all mapped refs
go locate(refsToCheck[r], r, mappedRefsInSequence)
- // locate(refsToCheck[r], r, mappedRefsInSequence) // used for sync testing.
+ //locate(refsToCheck[r], r, mappedRefsInSequence) // used for sync testing.
}
completedRefs := 0
for completedRefs < len(refsToCheck) {
- select {
- case <-c:
- completedRefs++
- }
+ <-c
+ completedRefs++
}
for m := range mappedRefsInSequence {
if mappedRefsInSequence[m] != nil {
diff --git a/index/extract_refs_test.go b/index/extract_refs_test.go
index 41b9ce0..672a7ea 100644
--- a/index/extract_refs_test.go
+++ b/index/extract_refs_test.go
@@ -114,7 +114,6 @@ components:
// https://github.com/pb33f/libopenapi/issues/112
func TestSpecIndex_ExtractRefs_CheckReferencesWithBracketsInName(t *testing.T) {
-
yml := `openapi: 3.0.0
components:
schemas:
@@ -137,7 +136,6 @@ components:
// https://github.com/daveshanley/vacuum/issues/339
func TestSpecIndex_ExtractRefs_CheckEnumNotPropertyCalledEnum(t *testing.T) {
-
yml := `openapi: 3.0.0
components:
schemas:
@@ -164,11 +162,22 @@ components:
example:
- yo
- hello
+ Schema2:
+ type: object
+ properties:
+ enumRef:
+ $ref: '#/components/schemas/enum'
+ enum:
+ type: string
+ enum: [big, small]
+ nullable: true
+ enum:
+ type: [string, null]
+ enum: [big, small]
`
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &rootNode)
c := CreateOpenAPIIndexConfig()
idx := NewSpecIndexWithConfig(&rootNode, c)
- assert.Len(t, idx.allEnums, 1)
-
+ assert.Len(t, idx.allEnums, 3)
}
diff --git a/index/find_component.go b/index/find_component.go
index e3664cf..d553d80 100644
--- a/index/find_component.go
+++ b/index/find_component.go
@@ -5,13 +5,9 @@ package index
import (
"fmt"
- "io"
- "net/http"
"net/url"
- "os"
"path/filepath"
"strings"
- "time"
"github.com/pb33f/libopenapi/utils"
"github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
@@ -21,435 +17,174 @@ import (
// FindComponent will locate a component by its reference, returns nil if nothing is found.
// This method will recurse through remote, local and file references. For each new external reference
// a new index will be created. These indexes can then be traversed recursively.
-func (index *SpecIndex) FindComponent(componentId string, parent *yaml.Node) *Reference {
+func (index *SpecIndex) FindComponent(componentId string) *Reference {
if index.root == nil {
return nil
}
- remoteLookup := func(id string) (*yaml.Node, *yaml.Node, error) {
- if index.config.AllowRemoteLookup {
- return index.lookupRemoteReference(id)
+ uri := strings.Split(componentId, "#/")
+ if len(uri) == 2 {
+ if uri[0] != "" {
+ if index.specAbsolutePath == uri[0] {
+ return index.FindComponentInRoot(fmt.Sprintf("#/%s", uri[1]))
+ } else {
+ return index.lookupRolodex(uri)
+ }
} else {
- return nil, nil, fmt.Errorf("remote lookups are not permitted, " +
- "please set AllowRemoteLookup to true in the configuration")
+ return index.FindComponentInRoot(fmt.Sprintf("#/%s", uri[1]))
}
- }
+ } else {
- fileLookup := func(id string) (*yaml.Node, *yaml.Node, error) {
- if index.config.AllowFileLookup {
- return index.lookupFileReference(id)
- } else {
- return nil, nil, fmt.Errorf("local lookups are not permitted, " +
- "please set AllowFileLookup to true in the configuration")
+ // does it contain a file extension?
+ fileExt := filepath.Ext(componentId)
+ if fileExt != "" {
+ return index.lookupRolodex(uri)
}
- }
- switch DetermineReferenceResolveType(componentId) {
- case LocalResolve: // ideally, every single ref in every single spec is local. however, this is not the case.
+ // root search
return index.FindComponentInRoot(componentId)
+ }
+}
- case HttpResolve:
- uri := strings.Split(componentId, "#")
- if len(uri) >= 2 {
- return index.performExternalLookup(uri, componentId, remoteLookup, parent)
- }
- if len(uri) == 1 {
- // if there is no reference, second segment is empty / has no name
- // this means there is no component to look-up and the entire file should be pulled in.
- // to stop all the other code from breaking (that is expecting a component), let's just post-pend
- // a hash to the end of the componentId and ensure the uri slice is as expected.
- // described in https://github.com/pb33f/libopenapi/issues/37
- componentId = fmt.Sprintf("%s#", componentId)
- uri = append(uri, "")
- return index.performExternalLookup(uri, componentId, remoteLookup, parent)
- }
+func FindComponent(root *yaml.Node, componentId, absoluteFilePath string, index *SpecIndex) *Reference {
+ // check component for url encoding.
+ if strings.Contains(componentId, "%") {
+ // decode the url.
+ componentId, _ = url.QueryUnescape(componentId)
+ }
- case FileResolve:
- uri := strings.Split(componentId, "#")
- if len(uri) == 2 {
- return index.performExternalLookup(uri, componentId, fileLookup, parent)
- }
- if len(uri) == 1 {
- // if there is no reference, second segment is empty / has no name
- // this means there is no component to look-up and the entire file should be pulled in.
- // to stop all the other code from breaking (that is expecting a component), let's just post-pend
- // a hash to the end of the componentId and ensure the uri slice is as expected.
- // described in https://github.com/pb33f/libopenapi/issues/37
- //
- // ^^ this same issue was re-reported in file based lookups in vacuum.
- // more info here: https://github.com/daveshanley/vacuum/issues/225
- componentId = fmt.Sprintf("%s#", componentId)
- uri = append(uri, "")
- return index.performExternalLookup(uri, componentId, fileLookup, parent)
+ name, friendlySearch := utils.ConvertComponentIdIntoFriendlyPathSearch(componentId)
+ if friendlySearch == "$." {
+ friendlySearch = "$"
+ }
+ path, err := yamlpath.NewPath(friendlySearch)
+ if path == nil || err != nil {
+ return nil // no component found
+ }
+ res, _ := path.Find(root)
+
+ if len(res) == 1 {
+ resNode := res[0]
+ fullDef := fmt.Sprintf("%s%s", absoluteFilePath, componentId)
+ // extract properties
+ ref := &Reference{
+ FullDefinition: fullDef,
+ Definition: componentId,
+ Name: name,
+ Node: resNode,
+ Path: friendlySearch,
+ RemoteLocation: absoluteFilePath,
+ Index: index,
+ RequiredRefProperties: extractDefinitionRequiredRefProperties(resNode, map[string][]string{}, fullDef, index),
}
+ return ref
}
return nil
}
-var httpClient = &http.Client{Timeout: time.Duration(60) * time.Second}
-
-type RemoteURLHandler = func(url string) (*http.Response, error)
-
-func getRemoteDoc(g RemoteURLHandler, u string, d chan []byte, e chan error) {
- resp, err := g(u)
- if err != nil {
- e <- err
- close(e)
- close(d)
- return
- }
- var body []byte
- body, _ = io.ReadAll(resp.Body)
- d <- body
- close(e)
- close(d)
-}
-
-func (index *SpecIndex) lookupRemoteReference(ref string) (*yaml.Node, *yaml.Node, error) {
- // split string to remove file reference
- uri := strings.Split(ref, "#")
-
- // have we already seen this remote source?
- var parsedRemoteDocument *yaml.Node
- alreadySeen, foundDocument := index.CheckForSeenRemoteSource(uri[0])
-
- if alreadySeen {
- parsedRemoteDocument = foundDocument
- } else {
-
- d := make(chan bool)
- var body []byte
- var err error
-
- go func(uri string) {
- bc := make(chan []byte)
- ec := make(chan error)
- var getter RemoteURLHandler = httpClient.Get
- if index.config != nil && index.config.RemoteURLHandler != nil {
- getter = index.config.RemoteURLHandler
- }
-
- // if we have a remote handler, use it instead of the default.
- if index.config != nil && index.config.FSHandler != nil {
- go func() {
- remoteFS := index.config.FSHandler
- remoteFile, rErr := remoteFS.Open(uri)
- if rErr != nil {
- e := fmt.Errorf("unable to open remote file: %s", rErr)
- ec <- e
- return
- }
- b, ioErr := io.ReadAll(remoteFile)
- if ioErr != nil {
- e := fmt.Errorf("unable to read remote file bytes: %s", ioErr)
- ec <- e
- return
- }
- bc <- b
- }()
- } else {
- go getRemoteDoc(getter, uri, bc, ec)
- }
- select {
- case v := <-bc:
- body = v
- break
- case er := <-ec:
- err = er
- break
- }
- if len(body) > 0 {
- var remoteDoc yaml.Node
- er := yaml.Unmarshal(body, &remoteDoc)
- if er != nil {
- err = er
- d <- true
- return
- }
- parsedRemoteDocument = &remoteDoc
- if index.config != nil {
- index.config.seenRemoteSources.Store(uri, &remoteDoc)
- }
- }
- d <- true
- }(uri[0])
-
- // wait for double go fun.
- <-d
- if err != nil {
- // no bueno.
- return nil, nil, err
- }
- }
-
- // lookup item from reference by using a path query.
- var query string
- if len(uri) >= 2 {
- query = fmt.Sprintf("$%s", strings.ReplaceAll(uri[1], "/", "."))
- } else {
- query = "$"
- }
-
- query, err := url.PathUnescape(query)
- if err != nil {
- return nil, nil, err
- }
-
- // remove any URL encoding
- query = strings.Replace(query, "~1", "./", 1)
- query = strings.ReplaceAll(query, "~1", "/")
-
- path, err := yamlpath.NewPath(query)
- if err != nil {
- return nil, nil, err
- }
- result, _ := path.Find(parsedRemoteDocument)
- if len(result) == 1 {
- return result[0], parsedRemoteDocument, nil
- }
- return nil, nil, nil
-}
-
-func (index *SpecIndex) lookupFileReference(ref string) (*yaml.Node, *yaml.Node, error) {
- // split string to remove file reference
- uri := strings.Split(ref, "#")
- file := strings.ReplaceAll(uri[0], "file:", "")
- filePath := filepath.Dir(file)
- fileName := filepath.Base(file)
-
- var parsedRemoteDocument *yaml.Node
-
- if index.seenRemoteSources[file] != nil {
- parsedRemoteDocument = index.seenRemoteSources[file]
- } else {
-
- base := index.config.BasePath
- fileToRead := filepath.Join(base, filePath, fileName)
- var body []byte
- var err error
-
- // if we have an FS handler, use it instead of the default behavior
- if index.config != nil && index.config.FSHandler != nil {
- remoteFS := index.config.FSHandler
- remoteFile, rErr := remoteFS.Open(fileToRead)
- if rErr != nil {
- e := fmt.Errorf("unable to open file: %s", rErr)
- return nil, nil, e
- }
- body, err = io.ReadAll(remoteFile)
- if err != nil {
- e := fmt.Errorf("unable to read file bytes: %s", err)
- return nil, nil, e
- }
-
- } else {
-
- // try and read the file off the local file system, if it fails
- // check for a baseURL and then ask our remote lookup function to go try and get it.
- body, err = os.ReadFile(fileToRead)
-
- if err != nil {
-
- // if we have a baseURL, then we can try and get the file from there.
- if index.config != nil && index.config.BaseURL != nil {
-
- u := index.config.BaseURL
- remoteRef := GenerateCleanSpecConfigBaseURL(u, ref, true)
- a, b, e := index.lookupRemoteReference(remoteRef)
- if e != nil {
- // give up, we can't find the file, not locally, not remotely. It's toast.
- return nil, nil, e
- }
- return a, b, nil
-
- } else {
- // no baseURL? then we can't do anything, give up.
- return nil, nil, err
- }
- }
- }
- var remoteDoc yaml.Node
- err = yaml.Unmarshal(body, &remoteDoc)
- if err != nil {
- return nil, nil, err
- }
- parsedRemoteDocument = &remoteDoc
- if index.seenLocalSources != nil {
- index.sourceLock.Lock()
- index.seenLocalSources[file] = &remoteDoc
- index.sourceLock.Unlock()
- }
- }
-
- // lookup item from reference by using a path query.
- var query string
- if len(uri) >= 2 {
- query = fmt.Sprintf("$%s", strings.ReplaceAll(uri[1], "/", "."))
- } else {
- query = "$"
- }
-
- query, err := url.PathUnescape(query)
- if err != nil {
- return nil, nil, err
- }
-
- // remove any URL encoding
- query = strings.Replace(query, "~1", "./", 1)
- query = strings.ReplaceAll(query, "~1", "/")
-
- path, err := yamlpath.NewPath(query)
- if err != nil {
- return nil, nil, err
- }
- result, _ := path.Find(parsedRemoteDocument)
- if len(result) == 1 {
- return result[0], parsedRemoteDocument, nil
- }
-
- return nil, parsedRemoteDocument, nil
-}
-
func (index *SpecIndex) FindComponentInRoot(componentId string) *Reference {
if index.root != nil {
-
- // check component for url encoding.
- if strings.Contains(componentId, "%") {
- // decode the url.
- componentId, _ = url.QueryUnescape(componentId)
- }
-
- name, friendlySearch := utils.ConvertComponentIdIntoFriendlyPathSearch(componentId)
- path, err := yamlpath.NewPath(friendlySearch)
- if path == nil || err != nil {
- return nil // no component found
- }
- res, _ := path.Find(index.root)
-
- if len(res) == 1 {
- resNode := res[0]
- if res[0].Kind == yaml.DocumentNode {
- resNode = res[0].Content[0]
- }
- ref := &Reference{
- Definition: componentId,
- Name: name,
- Node: resNode,
- Path: friendlySearch,
- RequiredRefProperties: index.extractDefinitionRequiredRefProperties(res[0], map[string][]string{}),
- }
-
- return ref
- }
+ return FindComponent(index.root, componentId, index.specAbsolutePath, index)
}
return nil
}
-func (index *SpecIndex) performExternalLookup(uri []string, componentId string,
- lookupFunction ExternalLookupFunction, parent *yaml.Node) *Reference {
- if len(uri) > 0 {
- index.externalLock.RLock()
- externalSpecIndex := index.externalSpecIndex[uri[0]]
- index.externalLock.RUnlock()
+func (index *SpecIndex) lookupRolodex(uri []string) *Reference {
+ if index.rolodex == nil {
+ return nil
+ }
- if externalSpecIndex == nil {
- _, newRoot, err := lookupFunction(componentId)
- if err != nil {
- indexError := &IndexingError{
- Err: err,
- Node: parent,
- Path: componentId,
+ if len(uri) > 0 {
+
+ // split string to remove file reference
+ file := strings.ReplaceAll(uri[0], "file:", "")
+
+ var absoluteFileLocation, fileName string
+
+ // is this a local or a remote file?
+ fileName = filepath.Base(file)
+ if filepath.IsAbs(file) || strings.HasPrefix(file, "http") {
+ absoluteFileLocation = file
+ } else {
+ if index.specAbsolutePath != "" {
+ if index.config.BaseURL == nil {
+
+ // consider the file local
+ dir := filepath.Dir(index.config.SpecAbsolutePath)
+ absoluteFileLocation, _ = filepath.Abs(filepath.Join(dir, file))
}
- index.errorLock.Lock()
- index.refErrors = append(index.refErrors, indexError)
- index.errorLock.Unlock()
+ }
+ }
+
+ // if the absolute file location has no file ext, then get the rolodex root.
+ ext := filepath.Ext(absoluteFileLocation)
+ var parsedDocument *yaml.Node
+ var err error
+
+ idx := index
+ if ext != "" {
+ // extract the document from the rolodex.
+ rFile, rError := index.rolodex.Open(absoluteFileLocation)
+
+ if rError != nil {
+ index.logger.Error("unable to open the rolodex file, check specification references and base path",
+ "file", absoluteFileLocation, "error", rError)
return nil
}
- // cool, cool, lets index this spec also. This is a recursive action and will keep going
- // until all remote references have been found.
- var bp *url.URL
- var bd string
-
- if index.config.BaseURL != nil {
- bp = index.config.BaseURL
+ if rFile == nil {
+ index.logger.Error("cannot locate file in the rolodex, check specification references and base path",
+ "file", absoluteFileLocation)
+ return nil
}
- if index.config.BasePath != "" {
- bd = index.config.BasePath
+ if rFile.GetIndex() != nil {
+ idx = rFile.GetIndex()
}
- var path, newBasePath string
- var newUrl *url.URL
-
- if bp != nil {
- path = GenerateCleanSpecConfigBaseURL(bp, uri[0], false)
- newUrl, _ = url.Parse(path)
- newBasePath = filepath.Dir(filepath.Join(index.config.BasePath, filepath.Dir(newUrl.Path)))
- }
- if bd != "" {
- if len(uri[0]) > 0 {
- // if there is no base url defined, but we can know we have been requested remotely,
- // set the base url to the remote url base path.
- // first check if the first param is actually a URL
- io, er := url.ParseRequestURI(uri[0])
- if er != nil {
- newBasePath = filepath.Dir(filepath.Join(bd, uri[0]))
- } else {
- if newUrl == nil || newUrl.String() != io.String() {
- newUrl, _ = url.Parse(fmt.Sprintf("%s://%s%s", io.Scheme, io.Host, filepath.Dir(io.Path)))
- }
- newBasePath = filepath.Dir(filepath.Join(bd, uri[1]))
- }
- } else {
- newBasePath = filepath.Dir(filepath.Join(bd, uri[0]))
- }
- }
-
- if newUrl != nil || newBasePath != "" {
- newConfig := &SpecIndexConfig{
- BaseURL: newUrl,
- BasePath: newBasePath,
- AllowRemoteLookup: index.config.AllowRemoteLookup,
- AllowFileLookup: index.config.AllowFileLookup,
- ParentIndex: index,
- seenRemoteSources: index.config.seenRemoteSources,
- remoteLock: index.config.remoteLock,
- uri: uri,
- }
-
- var newIndex *SpecIndex
- seen := index.SearchAncestryForSeenURI(uri[0])
- if seen == nil {
-
- newIndex = NewSpecIndexWithConfig(newRoot, newConfig)
- index.refLock.Lock()
- index.externalLock.Lock()
- index.externalSpecIndex[uri[0]] = newIndex
- index.externalLock.Unlock()
- newIndex.relativePath = path
- newIndex.parentIndex = index
- index.AddChild(newIndex)
- index.refLock.Unlock()
- externalSpecIndex = newIndex
- } else {
- externalSpecIndex = seen
- }
+ parsedDocument, err = rFile.GetContentAsYAMLNode()
+ if err != nil {
+ index.logger.Error("unable to parse rolodex file", "file", absoluteFileLocation, "error", err)
+ return nil
}
+ } else {
+ parsedDocument = index.root
}
- if externalSpecIndex != nil {
- foundRef := externalSpecIndex.FindComponentInRoot(uri[1])
+ wholeFile := false
+ query := ""
+ if len(uri) < 2 {
+ wholeFile = true
+ } else {
+ query = fmt.Sprintf("#/%s", strings.Replace(uri[1], "~1", "./", 1))
+ query = strings.ReplaceAll(query, "~1", "/")
+ }
+
+ // check if there is a component we want to suck in, or if the
+ // entire root needs to come in.
+ var foundRef *Reference
+ if wholeFile {
+ if parsedDocument.Kind == yaml.DocumentNode {
+ parsedDocument = parsedDocument.Content[0]
+ }
+
+ foundRef = &Reference{
+ FullDefinition: absoluteFileLocation,
+ Definition: fileName,
+ Name: fileName,
+ Index: idx,
+ Node: parsedDocument,
+ IsRemote: true,
+ RemoteLocation: absoluteFileLocation,
+ Path: "$",
+ RequiredRefProperties: extractDefinitionRequiredRefProperties(parsedDocument, map[string][]string{}, absoluteFileLocation, index),
+ }
+ return foundRef
+ } else {
+ foundRef = FindComponent(parsedDocument, query, absoluteFileLocation, index)
if foundRef != nil {
- nameSegs := strings.Split(uri[1], "/")
- ref := &Reference{
- Definition: componentId,
- Name: nameSegs[len(nameSegs)-1],
- Node: foundRef.Node,
- IsRemote: true,
- RemoteLocation: componentId,
- Path: foundRef.Path,
- }
- return ref
+ foundRef.IsRemote = true
+ foundRef.RemoteLocation = absoluteFileLocation
+ return foundRef
}
}
}
diff --git a/index/find_component_test.go b/index/find_component_test.go
index 872abc1..919c675 100644
--- a/index/find_component_test.go
+++ b/index/find_component_test.go
@@ -4,18 +4,10 @@
package index
import (
- "errors"
- "fmt"
- "io"
- "io/fs"
- "net/http"
- "net/http/httptest"
- "os"
- "reflect"
- "testing"
-
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
+ "os"
+ "testing"
)
func TestSpecIndex_performExternalLookup(t *testing.T) {
@@ -36,19 +28,142 @@ func TestSpecIndex_performExternalLookup(t *testing.T) {
}
func TestSpecIndex_CheckCircularIndex(t *testing.T) {
- yml, _ := os.ReadFile("../test_specs/first.yaml")
+
+ cFile := "../test_specs/first.yaml"
+ yml, _ := os.ReadFile(cFile)
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &rootNode)
- c := CreateOpenAPIIndexConfig()
- c.BasePath = "../test_specs"
- index := NewSpecIndexWithConfig(&rootNode, c)
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "../test_specs"
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+ cf.Rolodex = rolo
+
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+
+ assert.NoError(t, err)
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+
+ indexedErr := rolo.IndexTheRolodex()
+ rolo.BuildIndexes()
+
+ assert.NoError(t, indexedErr)
+
+ index := rolo.GetRootIndex()
+
assert.Nil(t, index.uri)
- assert.NotNil(t, index.children[0].uri)
- assert.NotNil(t, index.children[0].children[0].uri)
- assert.NotNil(t, index.SearchIndexForReference("second.yaml#/properties/property2"))
- assert.NotNil(t, index.SearchIndexForReference("second.yaml"))
- assert.Nil(t, index.SearchIndexForReference("fourth.yaml"))
+
+ a, _ := index.SearchIndexForReference("second.yaml#/properties/property2")
+ b, _ := index.SearchIndexForReference("second.yaml")
+ c, _ := index.SearchIndexForReference("fourth.yaml")
+
+ assert.NotNil(t, a)
+ assert.NotNil(t, b)
+ assert.Nil(t, c)
+}
+
+func TestSpecIndex_CheckCircularIndex_NoDirFS(t *testing.T) {
+
+ cFile := "../test_specs/first.yaml"
+ yml, _ := os.ReadFile(cFile)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "../test_specs"
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+ cf.Rolodex = rolo
+
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ IndexConfig: cf,
+ }
+
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+
+ assert.NoError(t, err)
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+
+ indexedErr := rolo.IndexTheRolodex()
+ rolo.BuildIndexes()
+
+ assert.NoError(t, indexedErr)
+
+ index := rolo.GetRootIndex()
+
+ assert.Nil(t, index.uri)
+
+ a, _ := index.SearchIndexForReference("second.yaml#/properties/property2")
+ b, _ := index.SearchIndexForReference("second.yaml")
+ c, _ := index.SearchIndexForReference("fourth.yaml")
+
+ assert.NotNil(t, a)
+ assert.NotNil(t, b)
+ assert.Nil(t, c)
+}
+
+func TestFindComponent_RolodexFileParseError(t *testing.T) {
+
+ badData := "I cannot be parsed: \"I am not a YAML file or a JSON file"
+ _ = os.WriteFile("bad.yaml", []byte(badData), 0644)
+ defer os.Remove("bad.yaml")
+
+ badRef := `openapi: 3.1.0
+components:
+ schemas:
+ thing:
+ type: object
+ properties:
+ thong:
+ $ref: 'bad.yaml'
+`
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(badRef), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "."
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+ cf.Rolodex = rolo
+
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"bad.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+
+ assert.NoError(t, err)
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+
+ indexedErr := rolo.IndexTheRolodex()
+ rolo.BuildIndexes()
+
+ // should error
+ assert.Error(t, indexedErr)
+
+ index := rolo.GetRootIndex()
+
+ assert.Nil(t, index.uri)
+
+ // can't be found.
+ a, _ := index.SearchIndexForReference("bad.yaml")
+ assert.Nil(t, a)
}
func TestSpecIndex_performExternalLookup_invalidURL(t *testing.T) {
@@ -64,16 +179,16 @@ components:
c := CreateOpenAPIIndexConfig()
index := NewSpecIndexWithConfig(&rootNode, c)
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
+ assert.Len(t, index.GetReferenceIndexErrors(), 1)
}
func TestSpecIndex_FindComponentInRoot(t *testing.T) {
yml := `openapi: 3.1.0
components:
- schemas:
- thing:
- properties:
- thong: hi!`
+ schemas:
+ thing:
+ properties:
+ thong: hi!`
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &rootNode)
@@ -85,75 +200,15 @@ components:
assert.Len(t, index.GetReferenceIndexErrors(), 0)
}
-func TestSpecIndex_FailLookupRemoteComponent_badPath(t *testing.T) {
- yml := `openapi: 3.1.0
-components:
- schemas:
- thing:
- properties:
- thong:
- $ref: 'https://pb33f.io/site.webmanifest#/....$.ok../oh#/$$_-'`
+func TestSpecIndex_FailFindComponentInRoot(t *testing.T) {
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
+ index := &SpecIndex{}
+ assert.Nil(t, index.FindComponentInRoot("does it even matter? of course not. no"))
- c := CreateOpenAPIIndexConfig()
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- thing := index.FindComponentInRoot("#/$splish/$.../slash#$///./")
- assert.Nil(t, thing)
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
}
-func TestSpecIndex_FailLookupRemoteComponent_Ok_butNotFound(t *testing.T) {
- yml := `openapi: 3.1.0
-components:
- schemas:
- thing:
- properties:
- thong:
- $ref: 'https://pb33f.io/site.webmanifest#/valid-but-missing'`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
-
- c := CreateOpenAPIIndexConfig()
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- thing := index.FindComponentInRoot("#/valid-but-missing")
- assert.Nil(t, thing)
- assert.Len(t, index.GetReferenceIndexErrors(), 1)
-}
-
-// disabled test because remote host is flaky.
-//func TestSpecIndex_LocateRemoteDocsWithNoBaseURLSupplied(t *testing.T) {
-// // This test will push the index to do try and locate remote references that use relative references
-// spec := `openapi: 3.0.2
-//info:
-// title: Test
-// version: 1.0.0
-//paths:
-// /test:
-// get:
-// parameters:
-// - $ref: "https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml#/components/parameters/crs"`
-//
-// var rootNode yaml.Node
-// _ = yaml.Unmarshal([]byte(spec), &rootNode)
-//
-// c := CreateOpenAPIIndexConfig()
-// index := NewSpecIndexWithConfig(&rootNode, c)
-//
-// // extract crs param from index
-// crsParam := index.GetMappedReferences()["https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml#/components/parameters/crs"]
-// assert.NotNil(t, crsParam)
-// assert.True(t, crsParam.IsRemote)
-// assert.Equal(t, "crs", crsParam.Node.Content[1].Value)
-// assert.Equal(t, "query", crsParam.Node.Content[3].Value)
-// assert.Equal(t, "form", crsParam.Node.Content[9].Value)
-//}
-
func TestSpecIndex_LocateRemoteDocsWithRemoteURLHandler(t *testing.T) {
+
// This test will push the index to do try and locate remote references that use relative references
spec := `openapi: 3.0.2
info:
@@ -168,10 +223,29 @@ paths:
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(spec), &rootNode)
- c := CreateOpenAPIIndexConfig()
- c.RemoteURLHandler = httpClient.Get
+ // create a new config that allows remote lookups.
+ cf := &SpecIndexConfig{}
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
- index := NewSpecIndexWithConfig(&rootNode, c)
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithConfig(cf)
+
+ // add remote filesystem
+ rolo.AddRemoteFS("", remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, indexedErr)
+
+ index := rolo.GetRootIndex()
// extract crs param from index
crsParam := index.GetMappedReferences()["https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml#/components/parameters/crs"]
@@ -197,12 +271,10 @@ paths:
_ = yaml.Unmarshal([]byte(spec), &rootNode)
c := CreateOpenAPIIndexConfig()
- c.RemoteURLHandler = httpClient.Get
index := NewSpecIndexWithConfig(&rootNode, c)
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, `invalid URL escape "%$p"`, index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'https://petstore3.swagger.io/api/v3/openapi.yaml#/paths/~1pet~1%$petId%7D/get/parameters' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
+ assert.Len(t, index.GetReferenceIndexErrors(), 1)
+ assert.Equal(t, "component '#/paths/~1pet~1%$petId%7D/get/parameters' does not exist in the specification", index.GetReferenceIndexErrors()[0].Error())
}
func TestSpecIndex_LocateRemoteDocsWithEscapedCharacters(t *testing.T) {
@@ -220,287 +292,91 @@ paths:
_ = yaml.Unmarshal([]byte(spec), &rootNode)
c := CreateOpenAPIIndexConfig()
- c.RemoteURLHandler = httpClient.Get
index := NewSpecIndexWithConfig(&rootNode, c)
- assert.Len(t, index.GetReferenceIndexErrors(), 0)
+ assert.Len(t, index.GetReferenceIndexErrors(), 1)
}
-func TestGetRemoteDoc(t *testing.T) {
- // Mock HTTP server
- server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
- rw.Write([]byte(`OK`))
- }))
- // Close the server when test finishes
- defer server.Close()
+func TestFindComponent_LookupRolodex_GrabRoot(t *testing.T) {
- // Channel for data and error
- dataChan := make(chan []byte)
- errorChan := make(chan error)
-
- go getRemoteDoc(http.Get, server.URL, dataChan, errorChan)
-
- data := <-dataChan
- err := <-errorChan
-
- if err != nil {
- t.Errorf("Expected no error, got %v", err)
- }
-
- expectedData := []byte(`OK`)
- if !reflect.DeepEqual(data, expectedData) {
- t.Errorf("Expected %v, got %v", expectedData, data)
- }
-}
-
-type FS struct{}
-type FSBadOpen struct{}
-type FSBadRead struct{}
-
-type file struct {
- name string
- data string
-}
-
-type openFile struct {
- f *file
- offset int64
-}
-
-func (f *openFile) Close() error { return nil }
-func (f *openFile) Stat() (fs.FileInfo, error) { return nil, nil }
-func (f *openFile) Read(b []byte) (int, error) {
- if f.offset >= int64(len(f.f.data)) {
- return 0, io.EOF
- }
- if f.offset < 0 {
- return 0, &fs.PathError{Op: "read", Path: f.f.name, Err: fs.ErrInvalid}
- }
- n := copy(b, f.f.data[f.offset:])
- f.offset += int64(n)
- return n, nil
-}
-
-type badFileOpen struct{}
-
-func (f *badFileOpen) Close() error { return errors.New("bad file close") }
-func (f *badFileOpen) Stat() (fs.FileInfo, error) { return nil, errors.New("bad file stat") }
-func (f *badFileOpen) Read(b []byte) (int, error) {
- return 0, nil
-}
-
-type badFileRead struct {
- f *file
- offset int64
-}
-
-func (f *badFileRead) Close() error { return errors.New("bad file close") }
-func (f *badFileRead) Stat() (fs.FileInfo, error) { return nil, errors.New("bad file stat") }
-func (f *badFileRead) Read(b []byte) (int, error) {
- return 0, fmt.Errorf("bad file read")
-}
-
-func (f FS) Open(name string) (fs.File, error) {
-
- data := `type: string
-name: something
-in: query`
-
- return &openFile{&file{"test.yaml", data}, 0}, nil
-}
-
-func (f FSBadOpen) Open(name string) (fs.File, error) {
- return nil, errors.New("bad file open")
-}
-
-func (f FSBadRead) Open(name string) (fs.File, error) {
- return &badFileRead{&file{}, 0}, nil
-}
-
-func TestSpecIndex_UseRemoteHandler(t *testing.T) {
-
- spec := `openapi: 3.1.0
+ spec := `openapi: 3.0.2
info:
- title: Test Remote Handler
+ title: Test
version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "https://i-dont-exist-but-it-does-not-matter.com/some-place/some-file.yaml"`
+components:
+ schemas:
+ thang:
+ type: object
+`
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(spec), &rootNode)
c := CreateOpenAPIIndexConfig()
- c.FSHandler = FS{}
index := NewSpecIndexWithConfig(&rootNode, c)
+ r := NewRolodex(c)
+ index.rolodex = r
+
+ n := index.lookupRolodex([]string{"bingobango"})
+
+ // if the reference is not found, it should return the root.
+ assert.NotNil(t, n)
- // extract crs param from index
- crsParam := index.GetMappedReferences()["https://i-dont-exist-but-it-does-not-matter.com/some-place/some-file.yaml"]
- assert.NotNil(t, crsParam)
- assert.True(t, crsParam.IsRemote)
- assert.Equal(t, "string", crsParam.Node.Content[1].Value)
- assert.Equal(t, "something", crsParam.Node.Content[3].Value)
- assert.Equal(t, "query", crsParam.Node.Content[5].Value)
}
-func TestSpecIndex_UseFileHandler(t *testing.T) {
+func TestFindComponentInRoot_GrabDocRoot(t *testing.T) {
- spec := `openapi: 3.1.0
+ spec := `openapi: 3.0.2
info:
- title: Test Remote Handler
+ title: Test
version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "some-file-that-does-not-exist.yaml"`
+components:
+ schemas:
+ thang:
+ type: object
+`
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(spec), &rootNode)
c := CreateOpenAPIIndexConfig()
- c.FSHandler = FS{}
index := NewSpecIndexWithConfig(&rootNode, c)
+ r := NewRolodex(c)
+ index.rolodex = r
+
+ n := index.FindComponentInRoot("#/")
+
+ // if the reference is not found, it should return the root.
+ assert.NotNil(t, n)
- // extract crs param from index
- crsParam := index.GetMappedReferences()["some-file-that-does-not-exist.yaml"]
- assert.NotNil(t, crsParam)
- assert.True(t, crsParam.IsRemote)
- assert.Equal(t, "string", crsParam.Node.Content[1].Value)
- assert.Equal(t, "something", crsParam.Node.Content[3].Value)
- assert.Equal(t, "query", crsParam.Node.Content[5].Value)
}
-func TestSpecIndex_UseRemoteHandler_Error_Open(t *testing.T) {
+func TestFindComponent_LookupRolodex_NoURL(t *testing.T) {
- spec := `openapi: 3.1.0
+ spec := `openapi: 3.0.2
info:
- title: Test Remote Handler
+ title: Test
version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "https://-i-cannot-be-opened.com"`
+components:
+ schemas:
+ thang:
+ type: object
+`
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(spec), &rootNode)
c := CreateOpenAPIIndexConfig()
- c.FSHandler = FSBadOpen{}
- c.RemoteURLHandler = httpClient.Get
index := NewSpecIndexWithConfig(&rootNode, c)
+ r := NewRolodex(c)
+ index.rolodex = r
+
+ n := index.lookupRolodex(nil)
+
+ // no url, no ref.
+ assert.Nil(t, n)
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, "unable to open remote file: bad file open", index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'https://-i-cannot-be-opened.com' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
-}
-
-func TestSpecIndex_UseFileHandler_Error_Open(t *testing.T) {
-
- spec := `openapi: 3.1.0
-info:
- title: Test File Handler
- version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "I-can-never-be-opened.yaml"`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(spec), &rootNode)
-
- c := CreateOpenAPIIndexConfig()
- c.FSHandler = FSBadOpen{}
- c.RemoteURLHandler = httpClient.Get
-
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, "unable to open file: bad file open", index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'I-can-never-be-opened.yaml' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
-}
-
-func TestSpecIndex_UseRemoteHandler_Error_Read(t *testing.T) {
-
- spec := `openapi: 3.1.0
-info:
- title: Test Remote Handler
- version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "https://-i-cannot-be-opened.com"`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(spec), &rootNode)
-
- c := CreateOpenAPIIndexConfig()
- c.FSHandler = FSBadRead{}
- c.RemoteURLHandler = httpClient.Get
-
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, "unable to read remote file bytes: bad file read", index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'https://-i-cannot-be-opened.com' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
-}
-
-func TestSpecIndex_UseFileHandler_Error_Read(t *testing.T) {
-
- spec := `openapi: 3.1.0
-info:
- title: Test File Handler
- version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "I-am-impossible-to-open-forever.yaml"`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(spec), &rootNode)
-
- c := CreateOpenAPIIndexConfig()
- c.FSHandler = FSBadRead{}
- c.RemoteURLHandler = httpClient.Get
-
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, "unable to read file bytes: bad file read", index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'I-am-impossible-to-open-forever.yaml' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
-}
-
-func TestSpecIndex_UseFileHandler_ErrorReference(t *testing.T) {
-
- spec := `openapi: 3.1.0
-info:
- title: Test File Handler
- version: 1.0.0
-paths:
- /test:
- get:
- parameters:
- - $ref: "exisiting.yaml#/paths/~1pet~1%$petId%7D/get/parameters"`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(spec), &rootNode)
-
- c := CreateOpenAPIIndexConfig()
- c.FSHandler = FS{}
- c.RemoteURLHandler = httpClient.Get
-
- index := NewSpecIndexWithConfig(&rootNode, c)
-
- assert.Len(t, index.GetReferenceIndexErrors(), 2)
- assert.Equal(t, `invalid URL escape "%$p"`, index.GetReferenceIndexErrors()[0].Error())
- assert.Equal(t, "component 'exisiting.yaml#/paths/~1pet~1%$petId%7D/get/parameters' does not exist in the specification", index.GetReferenceIndexErrors()[1].Error())
}
diff --git a/index/index_model.go b/index/index_model.go
index 08f629b..c0c4f14 100644
--- a/index/index_model.go
+++ b/index/index_model.go
@@ -4,13 +4,14 @@
package index
import (
+ "github.com/pb33f/libopenapi/datamodel"
+ "golang.org/x/sync/syncmap"
"io/fs"
+ "log/slog"
"net/http"
"net/url"
- "os"
"sync"
- "golang.org/x/sync/syncmap"
"gopkg.in/yaml.v3"
)
@@ -24,15 +25,18 @@ const (
// Reference is a wrapper around *yaml.Node results to make things more manageable when performing
// algorithms on data models. the *yaml.Node def is just a bit too low level for tracking state.
type Reference struct {
+ FullDefinition string
Definition string
Name string
Node *yaml.Node
ParentNode *yaml.Node
- ParentNodeSchemaType string // used to determine if the parent node is an array or not.
+ ParentNodeSchemaType string // used to determine if the parent node is an array or not.
+ ParentNodeTypes []string // used to capture deep journeys, if any item is an array, we need to know.
Resolved bool
Circular bool
Seen bool
IsRemote bool
+ Index *SpecIndex // index that contains this reference.
RemoteLocation string
Path string // this won't always be available.
RequiredRefProperties map[string][]string // definition names (eg, #/definitions/One) to a list of required properties on this definition which reference that definition
@@ -40,8 +44,10 @@ type Reference struct {
// ReferenceMapped is a helper struct for mapped references put into sequence (we lose the key)
type ReferenceMapped struct {
- Reference *Reference
- Definition string
+ OriginalReference *Reference
+ Reference *Reference
+ Definition string
+ FullDefinition string
}
// SpecIndexConfig is a configuration struct for the SpecIndex introduced in 0.6.0 that provides an expandable
@@ -66,6 +72,7 @@ type SpecIndexConfig struct {
// If resolving remotely, the RemoteURLHandler will be used to fetch the remote document.
// If not set, the default http client will be used.
// Resolves [#132]: https://github.com/pb33f/libopenapi/issues/132
+ // deprecated: Use the Rolodex instead
RemoteURLHandler func(url string) (*http.Response, error)
// FSHandler is an entity that implements the `fs.FS` interface that will be used to fetch local or remote documents.
@@ -80,6 +87,7 @@ type SpecIndexConfig struct {
// it also overrides the RemoteURLHandler if set.
//
// Resolves[#85] https://github.com/pb33f/libopenapi/issues/85
+ // deprecated: Use the Rolodex instead
FSHandler fs.FS
// If resolving locally, the BasePath will be the root from which relative references will be resolved from
@@ -94,21 +102,53 @@ type SpecIndexConfig struct {
AllowRemoteLookup bool // Allow remote lookups for references. Defaults to false
AllowFileLookup bool // Allow file lookups for references. Defaults to false
- // ParentIndex allows the index to be created with knowledge of a parent, before being parsed. This allows
- // a breakglass to be used to prevent loops, checking the tree before recursing down.
- ParentIndex *SpecIndex
-
// If set to true, the index will not be built out, which means only the foundational elements will be
// parsed and added to the index. This is useful to avoid building out an index if the specification is
- // broken up into references and you want it fully resolved.
+ // broken up into references and want it fully resolved.
//
// Use the `BuildIndex()` method on the index to build it out once resolved/ready.
AvoidBuildIndex bool
+ // If set to true, the index will not check for circular references automatically, this should be triggered
+ // manually, otherwise resolving may explode.
+ AvoidCircularReferenceCheck bool
+
+ // Logger is a logger that will be used for logging errors and warnings. If not set, the default logger
+ // will be used, set to the Error level.
+ Logger *slog.Logger
+
+ // SpecInfo is a pointer to the SpecInfo struct that contains the root node and the spec version. It's the
+ // struct that was used to create this index.
+ SpecInfo *datamodel.SpecInfo
+
+ // Rolodex is what provides all file and remote based lookups. Without the rolodex, no remote or file lookups
+ // can be used. Normally you won't need to worry about setting this as each root document gets a rolodex
+ // of its own automatically.
+ Rolodex *Rolodex
+
+ // The absolute path to the spec file for the index. Will be absolute, either as a http link or a file.
+ // If the index is for a single file spec, then the root will be empty.
+ SpecAbsolutePath string
+
+ // IgnorePolymorphicCircularReferences will skip over checking for circular references in polymorphic schemas.
+ // A polymorphic schema is any schema that is composed other schemas using references via `oneOf`, `anyOf` of `allOf`.
+ // This is disabled by default, which means polymorphic circular references will be checked.
+ IgnorePolymorphicCircularReferences bool
+
+ // IgnoreArrayCircularReferences will skip over checking for circular references in arrays. Sometimes a circular
+ // reference is required to describe a data-shape correctly. Often those shapes are valid circles if the
+ // type of the schema implementing the loop is an array. An empty array would technically break the loop.
+ // So if libopenapi is returning circular references for this use case, then this option should be enabled.
+ // this is disabled by default, which means array circular references will be checked.
+ IgnoreArrayCircularReferences bool
+
+ // SkipDocumentCheck will skip the document check when building the index. A document check will look for an 'openapi'
+ // or 'swagger' node in the root of the document. If it's not found, then the document is not a valid OpenAPI or
+ // the file is a JSON Schema. To allow JSON Schema files to be included set this to true.
+ SkipDocumentCheck bool
+
// private fields
- seenRemoteSources *syncmap.Map
- remoteLock *sync.Mutex
- uri []string
+ uri []string
}
// CreateOpenAPIIndexConfig is a helper function to create a new SpecIndexConfig with the AllowRemoteLookup and
@@ -116,12 +156,9 @@ type SpecIndexConfig struct {
//
// The default BasePath is the current working directory.
func CreateOpenAPIIndexConfig() *SpecIndexConfig {
- cw, _ := os.Getwd()
return &SpecIndexConfig{
- BasePath: cw,
AllowRemoteLookup: true,
AllowFileLookup: true,
- seenRemoteSources: &syncmap.Map{},
}
}
@@ -130,19 +167,15 @@ func CreateOpenAPIIndexConfig() *SpecIndexConfig {
//
// The default BasePath is the current working directory.
func CreateClosedAPIIndexConfig() *SpecIndexConfig {
- cw, _ := os.Getwd()
- return &SpecIndexConfig{
- BasePath: cw,
- AllowRemoteLookup: false,
- AllowFileLookup: false,
- seenRemoteSources: &syncmap.Map{},
- }
+ return &SpecIndexConfig{}
}
// SpecIndex is a complete pre-computed index of the entire specification. Numbers are pre-calculated and
// quick direct access to paths, operations, tags are all available. No need to walk the entire node tree in rules,
// everything is pre-walked if you need it.
type SpecIndex struct {
+ specAbsolutePath string
+ rolodex *Rolodex // the rolodex is used to fetch remote and file based documents.
allRefs map[string]*Reference // all (deduplicated) refs
rawSequencedRefs []*Reference // all raw references in sequence as they are scanned, not deduped.
linesWithRefs map[int]bool // lines that link to references.
@@ -178,7 +211,7 @@ type SpecIndex struct {
rootSecurity []*Reference // root security definitions.
rootSecurityNode *yaml.Node // root security node.
refsWithSiblings map[string]Reference // references with sibling elements next to them
- pathRefsLock sync.Mutex // create lock for all refs maps, we want to build data as fast as we can
+ pathRefsLock sync.RWMutex // create lock for all refs maps, we want to build data as fast as we can
externalDocumentsCount int // number of externalDocument nodes found
operationTagsCount int // number of unique tags in operations
globalTagsCount int // number of global tags defined
@@ -228,36 +261,49 @@ type SpecIndex struct {
enumCount int
descriptionCount int
summaryCount int
- seenRemoteSources map[string]*yaml.Node
- seenLocalSources map[string]*yaml.Node
refLock sync.Mutex
- sourceLock sync.Mutex
componentLock sync.RWMutex
- externalLock sync.RWMutex
errorLock sync.RWMutex
circularReferences []*CircularReferenceResult // only available when the resolver has been used.
allowCircularReferences bool // decide if you want to error out, or allow circular references, default is false.
- relativePath string // relative path of the spec file.
config *SpecIndexConfig // configuration for the index
- httpClient *http.Client
componentIndexChan chan bool
polyComponentIndexChan chan bool
-
- // when things get complex (looking at you digital ocean) then we need to know
- // what we have seen across indexes, so we need to be able to travel back up to the root
- // cto avoid re-downloading sources.
- parentIndex *SpecIndex
- uri []string
- children []*SpecIndex
+ resolver *Resolver
+ cache *syncmap.Map
+ built bool
+ uri []string
+ logger *slog.Logger
+ nodeMap map[int]map[int]*yaml.Node
+ nodeMapCompleted chan bool
}
-func (index *SpecIndex) AddChild(child *SpecIndex) {
- index.children = append(index.children, child)
+// GetResolver returns the resolver for this index.
+func (index *SpecIndex) GetResolver() *Resolver {
+ return index.resolver
}
-// GetChildren returns the children of this index.
-func (index *SpecIndex) GetChildren() []*SpecIndex {
- return index.children
+// GetConfig returns the SpecIndexConfig for this index.
+func (index *SpecIndex) GetConfig() *SpecIndexConfig {
+ return index.config
+}
+
+func (index *SpecIndex) SetCache(sync *syncmap.Map) {
+ index.cache = sync
+}
+
+func (index *SpecIndex) GetCache() *syncmap.Map {
+ return index.cache
+}
+
+// SetAbsolutePath sets the absolute path to the spec file for the index. Will be absolute, either as a http link or a file.
+func (index *SpecIndex) SetAbsolutePath(absolutePath string) {
+ index.specAbsolutePath = absolutePath
+}
+
+// GetSpecAbsolutePath returns the absolute path to the spec file for the index. Will be absolute, either as a http link or a file.
+func (index *SpecIndex) GetSpecAbsolutePath() string {
+ return index.specAbsolutePath
}
// ExternalLookupFunction is for lookup functions that take a JSONSchema reference and tries to find that node in the
diff --git a/index/index_model_test.go b/index/index_model_test.go
index 3bd8e93..4eafefc 100644
--- a/index/index_model_test.go
+++ b/index/index_model_test.go
@@ -8,18 +8,9 @@ import (
"testing"
)
-func TestSpecIndex_Children(t *testing.T) {
+func TestSpecIndex_GetConfig(t *testing.T) {
idx1 := new(SpecIndex)
- idx2 := new(SpecIndex)
- idx3 := new(SpecIndex)
- idx4 := new(SpecIndex)
- idx5 := new(SpecIndex)
- idx1.AddChild(idx2)
- idx1.AddChild(idx3)
- idx3.AddChild(idx4)
- idx4.AddChild(idx5)
- assert.Equal(t, 2, len(idx1.GetChildren()))
- assert.Equal(t, 1, len(idx3.GetChildren()))
- assert.Equal(t, 1, len(idx4.GetChildren()))
- assert.Equal(t, 0, len(idx5.GetChildren()))
+ c := SpecIndexConfig{}
+ idx1.config = &c
+ assert.Equal(t, &c, idx1.GetConfig())
}
diff --git a/index/index_utils.go b/index/index_utils.go
index e74400c..8703c43 100644
--- a/index/index_utils.go
+++ b/index/index_utils.go
@@ -5,9 +5,7 @@ package index
import (
"gopkg.in/yaml.v3"
- "net/http"
"strings"
- "time"
)
func isHttpMethod(val string) bool {
@@ -30,21 +28,6 @@ func isHttpMethod(val string) bool {
return false
}
-func DetermineReferenceResolveType(ref string) int {
- if ref != "" && ref[0] == '#' {
- return LocalResolve
- }
- if ref != "" && len(ref) >= 5 && (ref[:5] == "https" || ref[:5] == "http:") {
- return HttpResolve
- }
- if strings.Contains(ref, ".json") ||
- strings.Contains(ref, ".yaml") ||
- strings.Contains(ref, ".yml") {
- return FileResolve
- }
- return -1
-}
-
func boostrapIndexCollections(rootNode *yaml.Node, index *SpecIndex) {
index.root = rootNode
index.allRefs = make(map[string]*Reference)
@@ -82,10 +65,7 @@ func boostrapIndexCollections(rootNode *yaml.Node, index *SpecIndex) {
index.securityRequirementRefs = make(map[string]map[string][]*Reference)
index.polymorphicRefs = make(map[string]*Reference)
index.refsWithSiblings = make(map[string]Reference)
- index.seenRemoteSources = make(map[string]*yaml.Node)
- index.seenLocalSources = make(map[string]*yaml.Node)
index.opServersRefs = make(map[string]map[string][]*Reference)
- index.httpClient = &http.Client{Timeout: time.Duration(5) * time.Second}
index.componentIndexChan = make(chan bool)
index.polyComponentIndexChan = make(chan bool)
}
diff --git a/index/map_index_nodes.go b/index/map_index_nodes.go
new file mode 100644
index 0000000..cdf6260
--- /dev/null
+++ b/index/map_index_nodes.go
@@ -0,0 +1,92 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "gopkg.in/yaml.v3"
+)
+
+type nodeMap struct {
+ line int
+ column int
+ node *yaml.Node
+}
+
+// NodeOrigin represents where a node has come from within a specification. This is not useful for single file specs,
+// but becomes very, very important when dealing with exploded specifications, and we need to know where in the mass
+// of files a node has come from.
+type NodeOrigin struct {
+ // Node is the node in question
+ Node *yaml.Node `json:"-"`
+
+ // Line is yhe original line of where the node was found in the original file
+ Line int `json:"line" yaml:"line"`
+
+ // Column is the original column of where the node was found in the original file
+ Column int `json:"column" yaml:"column"`
+
+ // AbsoluteLocation is the absolute path to the reference was extracted from.
+ // This can either be an absolute path to a file, or a URL.
+ AbsoluteLocation string `json:"absolute_location" yaml:"absolute_location"`
+
+ // Index is the index that contains the node that was located in.
+ Index *SpecIndex `json:"-" yaml:"-"`
+}
+
+// GetNode returns a node from the spec based on a line and column. The second return var bool is true
+// if the node was found, false if not.
+func (index *SpecIndex) GetNode(line int, column int) (*yaml.Node, bool) {
+ if index.nodeMap[line] == nil {
+ return nil, false
+ }
+ node := index.nodeMap[line][column]
+ return node, node != nil
+}
+
+// MapNodes maps all nodes in the document to a map of line/column to node.
+func (index *SpecIndex) MapNodes(rootNode *yaml.Node) {
+ cruising := make(chan bool)
+ nodeChan := make(chan *nodeMap)
+ go func(nodeChan chan *nodeMap) {
+ for {
+ select {
+ case node, ok := <-nodeChan:
+ if !ok {
+ cruising <- true
+ return
+ }
+ if index.nodeMap[node.line] == nil {
+ index.nodeMap[node.line] = make(map[int]*yaml.Node)
+ }
+ index.nodeMap[node.line][node.column] = node.node
+ }
+ }
+ }(nodeChan)
+ go enjoyALuxuryCruise(rootNode, nodeChan, true)
+ <-cruising
+ close(cruising)
+ index.nodeMapCompleted <- true
+ close(index.nodeMapCompleted)
+}
+
+func enjoyALuxuryCruise(node *yaml.Node, nodeChan chan *nodeMap, root bool) {
+ if len(node.Content) > 0 {
+ for _, child := range node.Content {
+ nodeChan <- &nodeMap{
+ line: child.Line,
+ column: child.Column,
+ node: child,
+ }
+ enjoyALuxuryCruise(child, nodeChan, false)
+ }
+ }
+ nodeChan <- &nodeMap{
+ line: node.Line,
+ column: node.Column,
+ node: node,
+ }
+ if root {
+ close(nodeChan)
+ }
+}
diff --git a/index/map_index_nodes_test.go b/index/map_index_nodes_test.go
new file mode 100644
index 0000000..62f74fb
--- /dev/null
+++ b/index/map_index_nodes_test.go
@@ -0,0 +1,86 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/pb33f/libopenapi/utils"
+ "github.com/stretchr/testify/assert"
+ "github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
+ "gopkg.in/yaml.v3"
+ "os"
+ "reflect"
+ "testing"
+)
+
+func TestSpecIndex_MapNodes(t *testing.T) {
+
+ petstore, _ := os.ReadFile("../test_specs/petstorev3.json")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(petstore, &rootNode)
+
+ index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
+
+ <-index.nodeMapCompleted
+
+ // look up a node and make sure they match exactly (same pointer)
+ path, _ := yamlpath.NewPath("$.paths./pet.put")
+ nodes, _ := path.Find(&rootNode)
+
+ keyNode, valueNode := utils.FindKeyNodeTop("operationId", nodes[0].Content)
+ mappedKeyNode, _ := index.GetNode(keyNode.Line, keyNode.Column)
+ mappedValueNode, _ := index.GetNode(valueNode.Line, valueNode.Column)
+
+ assert.Equal(t, keyNode, mappedKeyNode)
+ assert.Equal(t, valueNode, mappedValueNode)
+
+ // make sure the pointers are the same
+ p1 := reflect.ValueOf(keyNode).Pointer()
+ p2 := reflect.ValueOf(mappedKeyNode).Pointer()
+ assert.Equal(t, p1, p2)
+
+ // check missing line
+ var ok bool
+ mappedKeyNode, ok = index.GetNode(999, 999)
+ assert.False(t, ok)
+ assert.Nil(t, mappedKeyNode)
+
+ mappedKeyNode, ok = index.GetNode(12, 999)
+ assert.False(t, ok)
+ assert.Nil(t, mappedKeyNode)
+
+ index.nodeMap[15] = nil
+ mappedKeyNode, ok = index.GetNode(15, 999)
+ assert.False(t, ok)
+ assert.Nil(t, mappedKeyNode)
+}
+
+func BenchmarkSpecIndex_MapNodes(b *testing.B) {
+
+ petstore, _ := os.ReadFile("../test_specs/petstorev3.json")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(petstore, &rootNode)
+ path, _ := yamlpath.NewPath("$.paths./pet.put")
+
+ for i := 0; i < b.N; i++ {
+
+ index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
+
+ <-index.nodeMapCompleted
+
+ // look up a node and make sure they match exactly (same pointer)
+ nodes, _ := path.Find(&rootNode)
+
+ keyNode, valueNode := utils.FindKeyNodeTop("operationId", nodes[0].Content)
+ mappedKeyNode, _ := index.GetNode(keyNode.Line, keyNode.Column)
+ mappedValueNode, _ := index.GetNode(valueNode.Line, valueNode.Column)
+
+ assert.Equal(b, keyNode, mappedKeyNode)
+ assert.Equal(b, valueNode, mappedValueNode)
+
+ // make sure the pointers are the same
+ p1 := reflect.ValueOf(keyNode).Pointer()
+ p2 := reflect.ValueOf(mappedKeyNode).Pointer()
+ assert.Equal(b, p1, p2)
+ }
+}
diff --git a/index/resolver.go b/index/resolver.go
new file mode 100644
index 0000000..2ab9e96
--- /dev/null
+++ b/index/resolver.go
@@ -0,0 +1,814 @@
+// Copyright 2022 Dave Shanley / Quobix
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "errors"
+ "fmt"
+ "github.com/pb33f/libopenapi/utils"
+ "golang.org/x/exp/slices"
+ "gopkg.in/yaml.v3"
+ "net/url"
+ "path/filepath"
+ "strings"
+)
+
+// ResolvingError represents an issue the resolver had trying to stitch the tree together.
+type ResolvingError struct {
+ // ErrorRef is the error thrown by the resolver
+ ErrorRef error
+
+ // Node is the *yaml.Node reference that contains the resolving error
+ Node *yaml.Node
+
+ // Path is the shortened journey taken by the resolver
+ Path string
+
+ // CircularReference is set if the error is a reference to the circular reference.
+ CircularReference *CircularReferenceResult
+}
+
+func (r *ResolvingError) Error() string {
+ errs := utils.UnwrapErrors(r.ErrorRef)
+ var msgs []string
+ for _, e := range errs {
+ var idxErr *IndexingError
+ if errors.As(e, &idxErr) {
+ msgs = append(msgs, fmt.Sprintf("%s: %s [%d:%d]", idxErr.Error(),
+ idxErr.Path, idxErr.Node.Line, idxErr.Node.Column))
+ } else {
+ var l, c int
+ if r.Node != nil {
+ l = r.Node.Line
+ c = r.Node.Column
+ }
+ msgs = append(msgs, fmt.Sprintf("%s: %s [%d:%d]", e.Error(),
+ r.Path, l, c))
+ }
+ }
+ return strings.Join(msgs, "\n")
+}
+
+// Resolver will use a *index.SpecIndex to stitch together a resolved root tree using all the discovered
+// references in the doc.
+type Resolver struct {
+ specIndex *SpecIndex
+ resolvedRoot *yaml.Node
+ resolvingErrors []*ResolvingError
+ circularReferences []*CircularReferenceResult
+ ignoredPolyReferences []*CircularReferenceResult
+ ignoredArrayReferences []*CircularReferenceResult
+ referencesVisited int
+ indexesVisited int
+ journeysTaken int
+ relativesSeen int
+ IgnorePoly bool
+ IgnoreArray bool
+}
+
+// NewResolver will create a new resolver from a *index.SpecIndex
+func NewResolver(index *SpecIndex) *Resolver {
+ if index == nil {
+ return nil
+ }
+ r := &Resolver{
+ specIndex: index,
+ resolvedRoot: index.GetRootNode(),
+ }
+ index.resolver = r
+ return r
+}
+
+// GetIgnoredCircularPolyReferences returns all ignored circular references that are polymorphic
+func (resolver *Resolver) GetIgnoredCircularPolyReferences() []*CircularReferenceResult {
+ return resolver.ignoredPolyReferences
+}
+
+// GetIgnoredCircularArrayReferences returns all ignored circular references that are arrays
+func (resolver *Resolver) GetIgnoredCircularArrayReferences() []*CircularReferenceResult {
+ return resolver.ignoredArrayReferences
+}
+
+// GetResolvingErrors returns all errors found during resolving
+func (resolver *Resolver) GetResolvingErrors() []*ResolvingError {
+ return resolver.resolvingErrors
+}
+
+func (resolver *Resolver) GetCircularReferences() []*CircularReferenceResult {
+ return resolver.GetSafeCircularReferences()
+}
+
+// GetSafeCircularReferences returns all circular reference errors found.
+func (resolver *Resolver) GetSafeCircularReferences() []*CircularReferenceResult {
+ var refs []*CircularReferenceResult
+ for _, ref := range resolver.circularReferences {
+ if !ref.IsInfiniteLoop {
+ refs = append(refs, ref)
+ }
+ }
+ return refs
+}
+
+// GetInfiniteCircularReferences returns all circular reference errors found that are infinite / unrecoverable
+func (resolver *Resolver) GetInfiniteCircularReferences() []*CircularReferenceResult {
+ var refs []*CircularReferenceResult
+ for _, ref := range resolver.circularReferences {
+ if ref.IsInfiniteLoop {
+ refs = append(refs, ref)
+ }
+ }
+ return refs
+}
+
+// GetPolymorphicCircularErrors returns all circular errors that stem from polymorphism
+func (resolver *Resolver) GetPolymorphicCircularErrors() []*CircularReferenceResult {
+ var res []*CircularReferenceResult
+ for i := range resolver.circularReferences {
+ if !resolver.circularReferences[i].IsInfiniteLoop {
+ continue
+ }
+ if !resolver.circularReferences[i].IsPolymorphicResult {
+ continue
+ }
+ res = append(res, resolver.circularReferences[i])
+ }
+ return res
+}
+
+// GetNonPolymorphicCircularErrors returns all circular errors that DO NOT stem from polymorphism
+func (resolver *Resolver) GetNonPolymorphicCircularErrors() []*CircularReferenceResult {
+ var res []*CircularReferenceResult
+ for i := range resolver.circularReferences {
+ if !resolver.circularReferences[i].IsInfiniteLoop {
+ continue
+ }
+
+ if !resolver.circularReferences[i].IsPolymorphicResult {
+ res = append(res, resolver.circularReferences[i])
+ }
+ }
+ return res
+}
+
+// IgnorePolymorphicCircularReferences will ignore any circular references that are polymorphic (oneOf, anyOf, allOf)
+// This must be set before any resolving is done.
+func (resolver *Resolver) IgnorePolymorphicCircularReferences() {
+ resolver.IgnorePoly = true
+}
+
+// IgnoreArrayCircularReferences will ignore any circular references that stem from arrays. This must be set before
+// any resolving is done.
+func (resolver *Resolver) IgnoreArrayCircularReferences() {
+ resolver.IgnoreArray = true
+}
+
+// GetJourneysTaken returns the number of journeys taken by the resolver
+func (resolver *Resolver) GetJourneysTaken() int {
+ return resolver.journeysTaken
+}
+
+// GetReferenceVisited returns the number of references visited by the resolver
+func (resolver *Resolver) GetReferenceVisited() int {
+ return resolver.referencesVisited
+}
+
+// GetIndexesVisited returns the number of indexes visited by the resolver
+func (resolver *Resolver) GetIndexesVisited() int {
+ return resolver.indexesVisited
+}
+
+// GetRelativesSeen returns the number of siblings (nodes at the same level) seen for each reference found.
+func (resolver *Resolver) GetRelativesSeen() int {
+ return resolver.relativesSeen
+}
+
+// Resolve will resolve the specification, everything that is not polymorphic and not circular, will be resolved.
+// this data can get big, it results in a massive duplication of data. This is a destructive method and will permanently
+// re-organize the node tree. Make sure you have copied your original tree before running this (if you want to preserve
+// original data)
+func (resolver *Resolver) Resolve() []*ResolvingError {
+
+ visitIndex(resolver, resolver.specIndex)
+
+ for _, circRef := range resolver.circularReferences {
+ // If the circular reference is not required, we can ignore it, as it's a terminable loop rather than an infinite one
+ if !circRef.IsInfiniteLoop {
+ continue
+ }
+
+ resolver.resolvingErrors = append(resolver.resolvingErrors, &ResolvingError{
+ ErrorRef: fmt.Errorf("infinite circular reference detected: %s", circRef.Start.Name),
+ Node: circRef.LoopPoint.Node,
+ Path: circRef.GenerateJourneyPath(),
+ })
+ }
+
+ return resolver.resolvingErrors
+}
+
+// CheckForCircularReferences Check for circular references, without resolving, a non-destructive run.
+func (resolver *Resolver) CheckForCircularReferences() []*ResolvingError {
+ visitIndexWithoutDamagingIt(resolver, resolver.specIndex)
+ for _, circRef := range resolver.circularReferences {
+ // If the circular reference is not required, we can ignore it, as it's a terminable loop rather than an infinite one
+ if !circRef.IsInfiniteLoop {
+ continue
+ }
+
+ resolver.resolvingErrors = append(resolver.resolvingErrors, &ResolvingError{
+ ErrorRef: fmt.Errorf("infinite circular reference detected: %s", circRef.Start.Name),
+ Node: circRef.LoopPoint.Node,
+ Path: circRef.GenerateJourneyPath(),
+ CircularReference: circRef,
+ })
+ }
+ // update our index with any circular refs we found.
+ resolver.specIndex.SetCircularReferences(resolver.circularReferences)
+ return resolver.resolvingErrors
+}
+
+func visitIndexWithoutDamagingIt(res *Resolver, idx *SpecIndex) {
+ mapped := idx.GetMappedReferencesSequenced()
+ mappedIndex := idx.GetMappedReferences()
+ res.indexesVisited++
+ for _, ref := range mapped {
+ seenReferences := make(map[string]bool)
+ var journey []*Reference
+ res.journeysTaken++
+ res.VisitReference(ref.Reference, seenReferences, journey, false)
+ }
+ schemas := idx.GetAllComponentSchemas()
+ for s, schemaRef := range schemas {
+ if mappedIndex[s] == nil {
+ seenReferences := make(map[string]bool)
+ var journey []*Reference
+ res.journeysTaken++
+ res.VisitReference(schemaRef, seenReferences, journey, false)
+ }
+ }
+}
+
+func visitIndex(res *Resolver, idx *SpecIndex) {
+ mapped := idx.GetMappedReferencesSequenced()
+ mappedIndex := idx.GetMappedReferences()
+ res.indexesVisited++
+ for _, ref := range mapped {
+ seenReferences := make(map[string]bool)
+ var journey []*Reference
+ res.journeysTaken++
+ if ref != nil && ref.Reference != nil {
+ n := res.VisitReference(ref.Reference, seenReferences, journey, true)
+ ref.Reference.Node.Content = n
+ if !ref.Reference.Circular {
+ ref.OriginalReference.Node.Content = n
+ }
+ }
+ }
+
+ schemas := idx.GetAllComponentSchemas()
+ for s, schemaRef := range schemas {
+ if mappedIndex[s] == nil {
+ seenReferences := make(map[string]bool)
+ var journey []*Reference
+ res.journeysTaken++
+ schemaRef.Node.Content = res.VisitReference(schemaRef, seenReferences, journey, true)
+ }
+ }
+
+ schemas = idx.GetAllSecuritySchemes()
+ for s, schemaRef := range schemas {
+ if mappedIndex[s] == nil {
+ seenReferences := make(map[string]bool)
+ var journey []*Reference
+ res.journeysTaken++
+ schemaRef.Node.Content = res.VisitReference(schemaRef, seenReferences, journey, true)
+ }
+ }
+
+ // map everything
+ for _, sequenced := range idx.GetAllSequencedReferences() {
+ locatedDef := mappedIndex[sequenced.Definition]
+ if locatedDef != nil {
+ if !locatedDef.Circular && locatedDef.Seen {
+ sequenced.Node.Content = locatedDef.Node.Content
+ }
+ }
+ }
+}
+
+// VisitReference will visit a reference as part of a journey and will return resolved nodes.
+func (resolver *Resolver) VisitReference(ref *Reference, seen map[string]bool, journey []*Reference, resolve bool) []*yaml.Node {
+ resolver.referencesVisited++
+ if resolve && ref.Seen {
+ if ref.Resolved {
+ return ref.Node.Content
+ }
+ }
+ if !resolve && ref.Seen {
+ return ref.Node.Content
+ }
+
+ journey = append(journey, ref)
+ relatives := resolver.extractRelatives(ref, ref.Node, nil, seen, journey, resolve)
+
+ seen = make(map[string]bool)
+
+ seen[ref.Definition] = true
+ for _, r := range relatives {
+ // check if we have seen this on the journey before, if so! it's circular
+ skip := false
+ for i, j := range journey {
+ if j.FullDefinition == r.FullDefinition {
+
+ var foundDup *Reference
+ foundRef, _ := resolver.specIndex.SearchIndexForReferenceByReference(r)
+ if foundRef != nil {
+ foundDup = foundRef
+ }
+
+ var circRef *CircularReferenceResult
+ if !foundDup.Circular {
+ loop := append(journey, foundDup)
+
+ visitedDefinitions := make(map[string]bool)
+ isInfiniteLoop, _ := resolver.isInfiniteCircularDependency(foundDup, visitedDefinitions, nil)
+
+ isArray := false
+ if r.ParentNodeSchemaType == "array" || slices.Contains(r.ParentNodeTypes, "array") {
+ isArray = true
+ }
+ circRef = &CircularReferenceResult{
+ Journey: loop,
+ Start: foundDup,
+ LoopIndex: i,
+ LoopPoint: foundDup,
+ IsArrayResult: isArray,
+ IsInfiniteLoop: isInfiniteLoop,
+ }
+
+ if resolver.IgnoreArray && isArray {
+ resolver.ignoredArrayReferences = append(resolver.ignoredArrayReferences, circRef)
+ } else {
+ resolver.circularReferences = append(resolver.circularReferences, circRef)
+ }
+
+ foundDup.Seen = true
+ foundDup.Circular = true
+ }
+ skip = true
+ }
+ }
+
+ if !skip {
+ var original *Reference
+ foundRef, _ := resolver.specIndex.SearchIndexForReferenceByReference(r)
+ if foundRef != nil {
+ original = foundRef
+ }
+ resolved := resolver.VisitReference(original, seen, journey, resolve)
+ if resolve && !original.Circular {
+ ref.Resolved = true
+ r.Resolved = true
+ r.Node.Content = resolved // this is where we perform the actual resolving.
+ }
+ r.Seen = true
+ ref.Seen = true
+ }
+ }
+
+ ref.Seen = true
+
+ return ref.Node.Content
+}
+
+func (resolver *Resolver) isInfiniteCircularDependency(ref *Reference, visitedDefinitions map[string]bool, initialRef *Reference) (bool, map[string]bool) {
+ if ref == nil {
+ return false, visitedDefinitions
+ }
+
+ for refDefinition := range ref.RequiredRefProperties {
+ r, _ := resolver.specIndex.SearchIndexForReference(refDefinition)
+ if initialRef != nil && initialRef.FullDefinition == r.FullDefinition {
+ return true, visitedDefinitions
+ }
+ if len(visitedDefinitions) > 0 && ref.FullDefinition == r.FullDefinition {
+ return true, visitedDefinitions
+ }
+
+ if visitedDefinitions[r.FullDefinition] {
+ continue
+ }
+
+ visitedDefinitions[r.Definition] = true
+
+ ir := initialRef
+ if ir == nil {
+ ir = ref
+ }
+
+ var isChildICD bool
+ isChildICD, visitedDefinitions = resolver.isInfiniteCircularDependency(r, visitedDefinitions, ir)
+ if isChildICD {
+ return true, visitedDefinitions
+ }
+ }
+
+ return false, visitedDefinitions
+}
+
+func (resolver *Resolver) extractRelatives(ref *Reference, node, parent *yaml.Node,
+ foundRelatives map[string]bool,
+ journey []*Reference, resolve bool) []*Reference {
+
+ if len(journey) > 100 {
+ return nil
+ }
+
+ var found []*Reference
+
+ if len(node.Content) > 0 {
+ for i, n := range node.Content {
+ if utils.IsNodeMap(n) || utils.IsNodeArray(n) {
+
+ found = append(found, resolver.extractRelatives(ref, n, node, foundRelatives, journey, resolve)...)
+ }
+
+ if i%2 == 0 && n.Value == "$ref" {
+
+ if !utils.IsNodeStringValue(node.Content[i+1]) {
+ continue
+ }
+
+ value := node.Content[i+1].Value
+ var locatedRef *Reference
+ var fullDef string
+ var definition string
+
+ // explode value
+ exp := strings.Split(value, "#/")
+ if len(exp) == 2 {
+ definition = fmt.Sprintf("#/%s", exp[1])
+ if exp[0] != "" {
+
+ if strings.HasPrefix(exp[0], "http") {
+ fullDef = value
+ } else {
+ if filepath.IsAbs(exp[0]) {
+ fullDef = value
+ } else {
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+
+ // split the http URI into parts
+ httpExp := strings.Split(ref.FullDefinition, "#/")
+
+ u, _ := url.Parse(httpExp[0])
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), exp[0]))
+ u.Path = abs
+ u.Fragment = ""
+ fullDef = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+
+ } else {
+
+ // split the referring ref full def into parts
+ fileDef := strings.Split(ref.FullDefinition, "#/")
+
+ // extract the location of the ref and build a full def path.
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(fileDef[0]), exp[0]))
+ fullDef = fmt.Sprintf("%s#/%s", abs, exp[1])
+
+ }
+ }
+ }
+ } else {
+ // local component, full def is based on passed in ref
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+
+ // split the http URI into parts
+ httpExp := strings.Split(ref.FullDefinition, "#/")
+
+ // parse a URL from the full def
+ u, _ := url.Parse(httpExp[0])
+
+ // extract the location of the ref and build a full def path.
+ fullDef = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+
+ } else {
+ // split the full def into parts
+ fileDef := strings.Split(ref.FullDefinition, "#/")
+ fullDef = fmt.Sprintf("%s#/%s", fileDef[0], exp[1])
+ }
+ }
+ } else {
+
+ definition = value
+
+ // if the reference is a http link
+ if strings.HasPrefix(value, "http") {
+ fullDef = value
+ } else {
+
+ if filepath.IsAbs(value) {
+ fullDef = value
+ } else {
+
+ // split the full def into parts
+ fileDef := strings.Split(ref.FullDefinition, "#/")
+
+ // is the file def a http link?
+ if strings.HasPrefix(fileDef[0], "http") {
+ u, _ := url.Parse(fileDef[0])
+ path, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), exp[0]))
+ u.Path = path
+ fullDef = u.String()
+
+ } else {
+ fullDef, _ = filepath.Abs(filepath.Join(filepath.Dir(fileDef[0]), exp[0]))
+ }
+ }
+ }
+ }
+
+ searchRef := &Reference{
+ Definition: definition,
+ FullDefinition: fullDef,
+ RemoteLocation: ref.RemoteLocation,
+ IsRemote: true,
+ }
+
+ locatedRef, _ = resolver.specIndex.SearchIndexForReferenceByReference(searchRef)
+
+ if locatedRef == nil {
+ _, path := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
+ err := &ResolvingError{
+ ErrorRef: fmt.Errorf("cannot resolve reference `%s`, it's missing", value),
+ Node: n,
+ Path: path,
+ }
+ resolver.resolvingErrors = append(resolver.resolvingErrors, err)
+ continue
+ }
+
+ if resolve {
+ // if this is a reference also, we want to resolve it.
+ if ok, _, _ := utils.IsNodeRefValue(ref.Node); ok {
+ ref.Node.Content = locatedRef.Node.Content
+ ref.Resolved = true
+ }
+ }
+
+ schemaType := ""
+ if parent != nil {
+ _, arrayTypevn := utils.FindKeyNodeTop("type", parent.Content)
+ if arrayTypevn != nil {
+ if arrayTypevn.Value == "array" {
+ schemaType = "array"
+ }
+ }
+ }
+ if ref.ParentNodeSchemaType != "" {
+ locatedRef.ParentNodeTypes = append(locatedRef.ParentNodeTypes, ref.ParentNodeSchemaType)
+ }
+ locatedRef.ParentNodeSchemaType = schemaType
+ found = append(found, locatedRef)
+ foundRelatives[value] = true
+ }
+
+ if i%2 == 0 && n.Value != "$ref" && n.Value != "" {
+
+ if n.Value == "allOf" ||
+ n.Value == "oneOf" ||
+ n.Value == "anyOf" {
+
+ // if this is a polymorphic link, we want to follow it and see if it becomes circular
+ if utils.IsNodeMap(node.Content[i+1]) { // check for nested items
+ // check if items is present, to indicate an array
+ if _, v := utils.FindKeyNodeTop("items", node.Content[i+1].Content); v != nil {
+ if utils.IsNodeMap(v) {
+ if d, _, l := utils.IsNodeRefValue(v); d {
+
+ // create full definition lookup based on ref.
+ def := l
+ exp := strings.Split(l, "#/")
+ if len(exp) == 2 {
+ if exp[0] != "" {
+ if !strings.HasPrefix(exp[0], "http") {
+ if !filepath.IsAbs(exp[0]) {
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+ u, _ := url.Parse(ref.FullDefinition)
+ p, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), exp[0]))
+ u.Path = p
+ u.Fragment = ""
+ def = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+ } else {
+ fd := strings.Split(ref.FullDefinition, "#/")
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(fd[0]), exp[0]))
+ def = fmt.Sprintf("%s#/%s", abs, exp[1])
+ }
+ }
+ } else {
+ if len(exp[1]) > 0 {
+ def = l
+ } else {
+ def = exp[0]
+ }
+ }
+ } else {
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+ u, _ := url.Parse(ref.FullDefinition)
+ u.Fragment = ""
+ def = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+
+ } else {
+ if strings.HasPrefix(ref.FullDefinition, "#/") {
+ def = fmt.Sprintf("#/%s", exp[1])
+ } else {
+ fdexp := strings.Split(ref.FullDefinition, "#/")
+ def = fmt.Sprintf("%s#/%s", fdexp[0], exp[1])
+ }
+ }
+ }
+ } else {
+
+ if strings.HasPrefix(l, "http") {
+ def = l
+ } else {
+ if filepath.IsAbs(l) {
+ def = l
+ } else {
+
+ // check if were dealing with a remote file
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+
+ // split the url.
+ u, _ := url.Parse(ref.FullDefinition)
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), l))
+ u.Path = abs
+ u.Fragment = ""
+ def = u.String()
+ } else {
+ lookupRef := strings.Split(ref.FullDefinition, "#/")
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(lookupRef[0]), l))
+ def = abs
+ }
+ }
+ }
+ }
+
+ mappedRefs, _ := resolver.specIndex.SearchIndexForReference(def)
+ if mappedRefs != nil && !mappedRefs.Circular {
+ circ := false
+ for f := range journey {
+ if journey[f].FullDefinition == mappedRefs.FullDefinition {
+ circ = true
+ break
+ }
+ }
+ if !circ {
+ resolver.VisitReference(mappedRefs, foundRelatives, journey, resolve)
+ } else {
+ loop := append(journey, mappedRefs)
+ circRef := &CircularReferenceResult{
+ Journey: loop,
+ Start: mappedRefs,
+ LoopIndex: i,
+ LoopPoint: mappedRefs,
+ PolymorphicType: n.Value,
+ IsPolymorphicResult: true,
+ }
+
+ mappedRefs.Seen = true
+ mappedRefs.Circular = true
+ if resolver.IgnorePoly {
+ resolver.ignoredPolyReferences = append(resolver.ignoredPolyReferences, circRef)
+ } else {
+ resolver.circularReferences = append(resolver.circularReferences, circRef)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ // for array based polymorphic items
+ if utils.IsNodeArray(node.Content[i+1]) { // check for nested items
+ // check if items is present, to indicate an array
+ for q := range node.Content[i+1].Content {
+ v := node.Content[i+1].Content[q]
+ if utils.IsNodeMap(v) {
+ if d, _, l := utils.IsNodeRefValue(v); d {
+
+ // create full definition lookup based on ref.
+ def := l
+ exp := strings.Split(l, "#/")
+ if len(exp) == 2 {
+ if exp[0] != "" {
+ if !strings.HasPrefix(exp[0], "http") {
+ if !filepath.IsAbs(exp[0]) {
+
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+
+ u, _ := url.Parse(ref.FullDefinition)
+ p, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), exp[0]))
+ u.Path = p
+ def = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+
+ } else {
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(ref.FullDefinition), exp[0]))
+ def = fmt.Sprintf("%s#/%s", abs, exp[1])
+ }
+ }
+ } else {
+ if len(exp[1]) > 0 {
+ def = l
+ } else {
+ def = exp[0]
+ }
+ }
+
+ } else {
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+ u, _ := url.Parse(ref.FullDefinition)
+ u.Fragment = ""
+ def = fmt.Sprintf("%s#/%s", u.String(), exp[1])
+
+ } else {
+ if strings.HasPrefix(ref.FullDefinition, "#/") {
+ def = fmt.Sprintf("#/%s", exp[1])
+ } else {
+ fdexp := strings.Split(ref.FullDefinition, "#/")
+ def = fmt.Sprintf("%s#/%s", fdexp[0], exp[1])
+ }
+ }
+ }
+ } else {
+
+ if strings.HasPrefix(l, "http") {
+ def = l
+ } else {
+ if filepath.IsAbs(l) {
+ def = l
+ } else {
+
+ // check if were dealing with a remote file
+ if strings.HasPrefix(ref.FullDefinition, "http") {
+
+ // split the url.
+ u, _ := url.Parse(ref.FullDefinition)
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), l))
+ u.Path = abs
+ u.Fragment = ""
+ def = u.String()
+ } else {
+ lookupRef := strings.Split(ref.FullDefinition, "#/")
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(lookupRef[0]), l))
+ def = abs
+ }
+ }
+ }
+ }
+
+ mappedRefs, _ := resolver.specIndex.SearchIndexForReference(def)
+ if mappedRefs != nil && !mappedRefs.Circular {
+ circ := false
+ for f := range journey {
+ if journey[f].FullDefinition == mappedRefs.FullDefinition {
+ circ = true
+ break
+ }
+ }
+ if !circ {
+ resolver.VisitReference(mappedRefs, foundRelatives, journey, resolve)
+ } else {
+ loop := append(journey, mappedRefs)
+
+ circRef := &CircularReferenceResult{
+ Journey: loop,
+ Start: mappedRefs,
+ LoopIndex: i,
+ LoopPoint: mappedRefs,
+ PolymorphicType: n.Value,
+ IsPolymorphicResult: true,
+ }
+
+ mappedRefs.Seen = true
+ mappedRefs.Circular = true
+ if resolver.IgnorePoly {
+ resolver.ignoredPolyReferences = append(resolver.ignoredPolyReferences, circRef)
+ } else {
+ resolver.circularReferences = append(resolver.circularReferences, circRef)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ break
+ }
+ }
+ }
+ }
+ resolver.relativesSeen += len(found)
+ return found
+}
diff --git a/index/resolver_test.go b/index/resolver_test.go
new file mode 100644
index 0000000..472a376
--- /dev/null
+++ b/index/resolver_test.go
@@ -0,0 +1,1092 @@
+package index
+
+import (
+ "errors"
+ "fmt"
+ "github.com/pb33f/libopenapi/datamodel"
+ "github.com/pb33f/libopenapi/utils"
+ "github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
+)
+
+func TestNewResolver(t *testing.T) {
+ assert.Nil(t, NewResolver(nil))
+}
+
+func TestResolvingError_Error(t *testing.T) {
+
+ errs := []error{
+ &ResolvingError{
+ Path: "$.test1",
+ ErrorRef: errors.New("test1"),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ },
+ &ResolvingError{
+ Path: "$.test2",
+ ErrorRef: errors.New("test2"),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ },
+ }
+
+ assert.Equal(t, "test1: $.test1 [1:1]", errs[0].Error())
+ assert.Equal(t, "test2: $.test2 [1:1]", errs[1].Error())
+}
+
+func TestResolvingError_Error_Index(t *testing.T) {
+
+ errs := []error{
+ &ResolvingError{
+ ErrorRef: errors.Join(&IndexingError{
+ Path: "$.test1",
+ Err: errors.New("test1"),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ }),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ },
+ &ResolvingError{
+ ErrorRef: errors.Join(&IndexingError{
+ Path: "$.test2",
+ Err: errors.New("test2"),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ }),
+ Node: &yaml.Node{
+ Line: 1,
+ Column: 1,
+ },
+ },
+ }
+
+ assert.Equal(t, "test1: $.test1 [1:1]", errs[0].Error())
+ assert.Equal(t, "test2: $.test2 [1:1]", errs[1].Error())
+}
+
+func Benchmark_ResolveDocumentStripe(b *testing.B) {
+ baseDir := "../test_specs/stripe.yaml"
+ resolveFile, _ := os.ReadFile(baseDir)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(resolveFile, &rootNode)
+
+ for n := 0; n < b.N; n++ {
+
+ cf := CreateOpenAPIIndexConfig()
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+
+ indexedErr := rolo.IndexTheRolodex()
+ assert.Len(b, utils.UnwrapErrors(indexedErr), 3)
+
+ }
+}
+
+func TestResolver_ResolveComponents_CircularSpec(t *testing.T) {
+ circular, _ := os.ReadFile("../test_specs/circular-tests.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ cf := CreateClosedAPIIndexConfig()
+ cf.AvoidCircularReferenceCheck = true
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+
+ indexedErr := rolo.IndexTheRolodex()
+ assert.NoError(t, indexedErr)
+
+ rolo.Resolve()
+ assert.Len(t, rolo.GetCaughtErrors(), 3)
+
+ _, err := yaml.Marshal(rolo.GetRootIndex().GetResolver().resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences(t *testing.T) {
+ circular, _ := os.ReadFile("../test_specs/circular-tests.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ cf := CreateClosedAPIIndexConfig()
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&rootNode)
+
+ indexedErr := rolo.IndexTheRolodex()
+ assert.Error(t, indexedErr)
+ assert.Len(t, utils.UnwrapErrors(indexedErr), 3)
+
+ rolo.CheckForCircularReferences()
+
+ assert.Len(t, rolo.GetCaughtErrors(), 3)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetResolvingErrors(), 3)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetInfiniteCircularReferences(), 3)
+
+}
+
+func TestResolver_CheckForCircularReferences_CatchArray(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 1)
+ assert.Len(t, resolver.GetResolvingErrors(), 1) // infinite loop is a resolving error.
+ assert.Len(t, resolver.GetInfiniteCircularReferences(), 1)
+ assert.True(t, resolver.GetInfiniteCircularReferences()[0].IsArrayResult)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_IgnoreArray(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ resolver.IgnoreArrayCircularReferences()
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0)
+ assert.Len(t, resolver.GetCircularReferences(), 0)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_IgnorePoly_Any(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ resolver.IgnorePolymorphicCircularReferences()
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0)
+ assert.Len(t, resolver.GetCircularReferences(), 0)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_IgnorePoly_All(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ allOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ resolver.IgnorePolymorphicCircularReferences()
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0)
+ assert.Len(t, resolver.GetCircularReferences(), 0)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_IgnorePoly_One(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ resolver.IgnorePolymorphicCircularReferences()
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0)
+ assert.Len(t, resolver.GetCircularReferences(), 0)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_CatchPoly_Any(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0) // not an infinite loop if poly.
+ assert.Len(t, resolver.GetCircularReferences(), 1)
+ assert.Equal(t, "anyOf", resolver.GetCircularReferences()[0].PolymorphicType)
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CheckForCircularReferences_CatchPoly_All(t *testing.T) {
+ circular := []byte(`openapi: 3.0.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ allOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`)
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetResolvingErrors(), 0) // not an infinite loop if poly.
+ assert.Len(t, resolver.GetCircularReferences(), 1)
+ assert.Equal(t, "allOf", resolver.GetCircularReferences()[0].PolymorphicType)
+ assert.True(t, resolver.GetCircularReferences()[0].IsPolymorphicResult)
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CircularReferencesRequiredValid(t *testing.T) {
+ circular, _ := os.ReadFile("../test_specs/swagger-valid-recursive-model.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_CircularReferencesRequiredInvalid(t *testing.T) {
+ circular, _ := os.ReadFile("../test_specs/swagger-invalid-recursive-model.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(circular, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 2)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_DeepJourney(t *testing.T) {
+ var journey []*Reference
+ for f := 0; f < 200; f++ {
+ journey = append(journey, nil)
+ }
+ idx := NewSpecIndexWithConfig(nil, CreateClosedAPIIndexConfig())
+ resolver := NewResolver(idx)
+ assert.Nil(t, resolver.extractRelatives(nil, nil, nil, nil, journey, false))
+}
+
+func TestResolver_ResolveComponents_Stripe_NoRolodex(t *testing.T) {
+ baseDir := "../test_specs/stripe.yaml"
+
+ resolveFile, _ := os.ReadFile(baseDir)
+
+ var stripeRoot yaml.Node
+ _ = yaml.Unmarshal(resolveFile, &stripeRoot)
+
+ info, _ := datamodel.ExtractSpecInfoWithDocumentCheck(resolveFile, true)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.SpecInfo = info
+
+ idx := NewSpecIndexWithConfig(&stripeRoot, cf)
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 3)
+
+ _, err := yaml.Marshal(resolver.resolvedRoot)
+ assert.NoError(t, err)
+}
+
+func TestResolver_ResolveComponents_Stripe(t *testing.T) {
+ baseDir := "../test_specs/stripe.yaml"
+
+ resolveFile, _ := os.ReadFile(baseDir)
+
+ var stripeRoot yaml.Node
+ _ = yaml.Unmarshal(resolveFile, &stripeRoot)
+
+ info, _ := datamodel.ExtractSpecInfoWithDocumentCheck(resolveFile, true)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.SpecInfo = info
+ cf.AvoidCircularReferenceCheck = true
+
+ rolo := NewRolodex(cf)
+ rolo.SetRootNode(&stripeRoot)
+
+ indexedErr := rolo.IndexTheRolodex()
+ assert.NoError(t, indexedErr)
+
+ // after resolving, the rolodex will have errors.
+ rolo.Resolve()
+
+ assert.Len(t, rolo.GetCaughtErrors(), 3)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetNonPolymorphicCircularErrors(), 3)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetPolymorphicCircularErrors(), 0)
+
+}
+
+func TestResolver_ResolveComponents_BurgerShop(t *testing.T) {
+ mixedref, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(mixedref, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+}
+
+func TestResolver_ResolveComponents_PolyNonCircRef(t *testing.T) {
+ yml := `paths:
+ /hey:
+ get:
+ responses:
+ "200":
+ $ref: '#/components/schemas/crackers'
+components:
+ schemas:
+ cheese:
+ description: cheese
+ anyOf:
+ items:
+ $ref: '#/components/schemas/crackers'
+ crackers:
+ description: crackers
+ allOf:
+ - $ref: '#/components/schemas/tea'
+ tea:
+ description: tea`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.CheckForCircularReferences()
+ assert.Len(t, circ, 0)
+}
+
+func TestResolver_ResolveComponents_PolyCircRef(t *testing.T) {
+ yml := `openapi: 3.1.0
+components:
+ schemas:
+ cheese:
+ description: cheese
+ anyOf:
+ - $ref: '#/components/schemas/crackers'
+ crackers:
+ description: crackers
+ anyOf:
+ - $ref: '#/components/schemas/cheese'
+ tea:
+ description: tea`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ _ = resolver.CheckForCircularReferences()
+ resolver.circularReferences[0].IsInfiniteLoop = true // override
+ assert.Len(t, idx.GetCircularReferences(), 1)
+ assert.Len(t, resolver.GetPolymorphicCircularErrors(), 1)
+ assert.Equal(t, 2, idx.GetCircularReferences()[0].LoopIndex)
+
+}
+
+func TestResolver_ResolveComponents_Missing(t *testing.T) {
+ yml := `paths:
+ /hey:
+ get:
+ responses:
+ "200":
+ $ref: '#/components/schemas/crackers'
+components:
+ schemas:
+ cheese:
+ description: cheese
+ properties:
+ thang:
+ $ref: '#/components/schemas/crackers'
+ crackers:
+ description: crackers
+ properties:
+ butter:
+ $ref: 'go home, I am drunk'`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ err := resolver.Resolve()
+ assert.Len(t, err, 2)
+ assert.Equal(t, "cannot resolve reference `go home, I am drunk`, it's missing: $go home, I am drunk [18:11]", err[0].Error())
+}
+
+func TestResolver_ResolveThroughPaths(t *testing.T) {
+ yml := `paths:
+ /pizza/{cake}/{pizza}/pie:
+ parameters:
+ - name: juicy
+ /companies/{companyId}/data/payments/{paymentId}:
+ get:
+ tags:
+ - Accounts receivable
+ parameters:
+ - $ref: '#/paths/~1pizza~1%7Bcake%7D~1%7Bpizza%7D~1pie/parameters/0'`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ err := resolver.Resolve()
+ assert.Len(t, err, 0)
+}
+
+func TestResolver_ResolveComponents_MixedRef(t *testing.T) {
+ mixedref, _ := os.ReadFile("../test_specs/mixedref-burgershop.openapi.yaml")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(mixedref, &rootNode)
+
+ // create a test server.
+ server := test_buildMixedRefServer()
+ defer server.Close()
+
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "../test_specs"
+
+ // setting this baseURL will override the base
+ cf.BaseURL, _ = url.Parse(server.URL)
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
+ remoteFS.SetIndexConfig(cf)
+
+ // set our remote handler func
+
+ c := http.Client{}
+
+ remoteFS.RemoteHandlerFunc = c.Get
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"burgershop.openapi.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
+ // add file systems to the rolodex
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rolo.AddRemoteFS(server.URL, remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, indexedErr)
+
+ rolo.Resolve()
+ index := rolo.GetRootIndex
+ resolver := index().GetResolver()
+
+ assert.Len(t, resolver.GetCircularReferences(), 0)
+ assert.Equal(t, 2, resolver.GetIndexesVisited())
+
+ // in v0.8.2 a new check was added when indexing, to prevent re-indexing the same file multiple times.
+ assert.Equal(t, 6, resolver.GetRelativesSeen())
+ assert.Equal(t, 5, resolver.GetJourneysTaken())
+ assert.Equal(t, 7, resolver.GetReferenceVisited())
+}
+
+func TestResolver_ResolveComponents_k8s(t *testing.T) {
+ k8s, _ := os.ReadFile("../test_specs/k8s.json")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(k8s, &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+}
+
+// Example of how to resolve the Stripe OpenAPI specification, and check for circular reference errors
+func ExampleNewResolver() {
+ // create a yaml.Node reference as a root node.
+ var rootNode yaml.Node
+
+ // load in the Stripe OpenAPI spec (lots of polymorphic complexity in here)
+ stripeBytes, _ := os.ReadFile("../test_specs/stripe.yaml")
+
+ // unmarshal bytes into our rootNode.
+ _ = yaml.Unmarshal(stripeBytes, &rootNode)
+
+ // create a new spec index (resolver depends on it)
+ indexConfig := CreateClosedAPIIndexConfig()
+ idx := NewSpecIndexWithConfig(&rootNode, indexConfig)
+
+ // create a new resolver using the index.
+ resolver := NewResolver(idx)
+
+ // resolve the document, if there are circular reference errors, they are returned/
+ // WARNING: this is a destructive action and the rootNode will be PERMANENTLY altered and cannot be unresolved
+ circularErrors := resolver.Resolve()
+
+ // The Stripe API has a bunch of circular reference problems, mainly from polymorphism.
+ // So let's print them out.
+ //
+ fmt.Printf("There are %d circular reference errors, %d of them are polymorphic errors, %d are not",
+ len(circularErrors), len(resolver.GetPolymorphicCircularErrors()), len(resolver.GetNonPolymorphicCircularErrors()))
+ // Output: There are 3 circular reference errors, 0 of them are polymorphic errors, 3 are not
+}
+
+func ExampleResolvingError() {
+ re := ResolvingError{
+ ErrorRef: errors.New("je suis une erreur"),
+ Node: &yaml.Node{
+ Line: 5,
+ Column: 21,
+ },
+ Path: "#/definitions/JeSuisUneErreur",
+ CircularReference: &CircularReferenceResult{},
+ }
+
+ fmt.Printf("%s", re.Error())
+ // Output: je suis une erreur: #/definitions/JeSuisUneErreur [5:21]
+}
+
+func TestDocument_IgnoreArrayCircularReferences(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ resolver.IgnoreArrayCircularReferences()
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetIgnoredCircularArrayReferences(), 1)
+
+}
+
+func TestDocument_IgnorePolyCircularReferences(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ resolver.IgnorePolymorphicCircularReferences()
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetIgnoredCircularPolyReferences(), 1)
+
+}
+
+func TestDocument_IgnorePolyCircularReferences_NoArrayForRef(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ bingo:
+ type: object
+ properties:
+ bango:
+ $ref: "#/components/schemas/ProductCategory"
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ items:
+ anyOf:
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ resolver.IgnorePolymorphicCircularReferences()
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetIgnoredCircularPolyReferences(), 1)
+
+}
+
+func TestResolver_isInfiniteCircularDep_NoRef(t *testing.T) {
+ resolver := NewResolver(nil)
+ a, b := resolver.isInfiniteCircularDependency(nil, nil, nil)
+ assert.False(t, a)
+ assert.Nil(t, b)
+}
+
+func TestResolver_AllowedCircle(t *testing.T) {
+
+ d := `openapi: 3.1.0
+paths:
+ /test:
+ get:
+ responses:
+ '200':
+ description: OK
+components:
+ schemas:
+ Obj:
+ type: object
+ properties:
+ other:
+ $ref: '#/components/schemas/Obj2'
+ Obj2:
+ type: object
+ properties:
+ other:
+ $ref: '#/components/schemas/Obj'
+ required:
+ - other`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetInfiniteCircularReferences(), 0)
+ assert.Len(t, resolver.GetSafeCircularReferences(), 1)
+
+}
+
+func TestResolver_AllowedCircle_Array(t *testing.T) {
+
+ d := `openapi: 3.1.0
+components:
+ schemas:
+ Obj:
+ type: object
+ properties:
+ other:
+ $ref: '#/components/schemas/Obj2'
+ required:
+ - other
+ Obj2:
+ type: object
+ properties:
+ children:
+ type: array
+ items:
+ $ref: '#/components/schemas/Obj'
+ required:
+ - children`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ cf := CreateClosedAPIIndexConfig()
+ cf.IgnoreArrayCircularReferences = true
+
+ idx := NewSpecIndexWithConfig(&rootNode, cf)
+
+ resolver := NewResolver(idx)
+ resolver.IgnoreArrayCircularReferences()
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 0)
+ assert.Len(t, resolver.GetInfiniteCircularReferences(), 0)
+ assert.Len(t, resolver.GetSafeCircularReferences(), 0)
+ assert.Len(t, resolver.GetIgnoredCircularArrayReferences(), 1)
+
+}
+
+func TestResolver_NotAllowedDeepCircle(t *testing.T) {
+
+ d := `openapi: 3.0
+components:
+ schemas:
+ Three:
+ description: "test three"
+ properties:
+ bester:
+ "$ref": "#/components/schemas/Seven"
+ required:
+ - bester
+ Seven:
+ properties:
+ wow:
+ "$ref": "#/components/schemas/Three"
+ required:
+ - wow`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ idx := NewSpecIndexWithConfig(&rootNode, CreateClosedAPIIndexConfig())
+
+ resolver := NewResolver(idx)
+ assert.NotNil(t, resolver)
+
+ circ := resolver.Resolve()
+ assert.Len(t, circ, 1)
+ assert.Len(t, resolver.GetInfiniteCircularReferences(), 1)
+ assert.Len(t, resolver.GetSafeCircularReferences(), 0)
+
+}
+
+func TestLocateRefEnd_WithResolve(t *testing.T) {
+
+ yml, _ := os.ReadFile("../../test_specs/first.yaml")
+ var bsn yaml.Node
+ _ = yaml.Unmarshal(yml, &bsn)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = "../test_specs"
+
+ localFSConfig := &LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+ localFs, _ := NewLocalFSWithConfig(localFSConfig)
+ rolo := NewRolodex(cf)
+ rolo.AddLocalFS(cf.BasePath, localFs)
+ rolo.SetRootNode(&bsn)
+ rolo.IndexTheRolodex()
+
+ wd, _ := os.Getwd()
+ cp, _ := filepath.Abs(filepath.Join(wd, "../test_specs/third.yaml"))
+ third := localFs.GetFiles()[cp]
+ refs := third.GetIndex().GetMappedReferences()
+ fullDef := fmt.Sprintf("%s#/properties/property/properties/statistics", cp)
+ ref := refs[fullDef]
+
+ assert.Equal(t, "statistics", ref.Name)
+ isRef, _, _ := utils.IsNodeRefValue(ref.Node)
+ assert.True(t, isRef)
+
+ // resolve the stack, it should convert the ref to a node.
+ rolo.Resolve()
+
+ isRef, _, _ = utils.IsNodeRefValue(ref.Node)
+ assert.False(t, isRef)
+}
+
+func TestResolveDoc_Issue195(t *testing.T) {
+
+ spec := `openapi: 3.0.1
+info:
+ title: Some Example!
+paths:
+ "/pet/findByStatus":
+ get:
+ responses:
+ default:
+ content:
+ application/json:
+ schema:
+ "$ref": https://raw.githubusercontent.com/OAI/OpenAPI-Specification/main/examples/v3.0/petstore.yaml#/components/schemas/Error`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(spec), &rootNode)
+
+ // create an index config
+ config := CreateOpenAPIIndexConfig()
+
+ // the rolodex will automatically try and check for circular references, you don't want to do this
+ // if you're resolving the spec, as the node tree is marked as 'seen' and you won't be able to resolve
+ // correctly.
+ config.AvoidCircularReferenceCheck = true
+
+ // new in 0.13+ is the ability to add remote and local file systems to the index
+ // requires a new part, the rolodex. It holds all the indexes and knows where to find
+ // every reference across local and remote files.
+ rolodex := NewRolodex(config)
+
+ // add a new remote file system.
+ remoteFS, _ := NewRemoteFSWithConfig(config)
+
+ // add the remote file system to the rolodex
+ rolodex.AddRemoteFS("", remoteFS)
+
+ // set the root node of the rolodex, this is your spec.
+ rolodex.SetRootNode(&rootNode)
+
+ // index the rolodex
+ indexingError := rolodex.IndexTheRolodex()
+ if indexingError != nil {
+ panic(indexingError)
+ }
+
+ // resolve the rolodex
+ rolodex.Resolve()
+
+ // there should be no errors at this point
+ resolvingErrors := rolodex.GetCaughtErrors()
+ if resolvingErrors != nil {
+ panic(resolvingErrors)
+ }
+
+ // perform some lookups.
+ var nodes []*yaml.Node
+
+ // pull out schema type
+ path, _ := yamlpath.NewPath("$.paths./pet/findByStatus.get.responses.default.content['application/json'].schema.type")
+ nodes, _ = path.Find(&rootNode)
+ assert.Equal(t, nodes[0].Value, "object")
+
+ // pull out required array
+ path, _ = yamlpath.NewPath("$.paths./pet/findByStatus.get.responses.default.content['application/json'].schema.required")
+ nodes, _ = path.Find(&rootNode)
+ assert.Equal(t, nodes[0].Content[0].Value, "code")
+ assert.Equal(t, nodes[0].Content[1].Value, "message")
+
+}
diff --git a/index/rolodex.go b/index/rolodex.go
new file mode 100644
index 0000000..4f4513b
--- /dev/null
+++ b/index/rolodex.go
@@ -0,0 +1,625 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "errors"
+ "fmt"
+ "gopkg.in/yaml.v3"
+ "io"
+ "io/fs"
+ "log/slog"
+ "math"
+ "net/url"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "sync"
+ "time"
+)
+
+// CanBeIndexed is an interface that allows a file to be indexed.
+type CanBeIndexed interface {
+ Index(config *SpecIndexConfig) (*SpecIndex, error)
+}
+
+// RolodexFile is an interface that represents a file in the rolodex. It combines multiple `fs` interfaces
+// like `fs.FileInfo` and `fs.File` into one interface, so the same struct can be used for everything.
+type RolodexFile interface {
+ GetContent() string
+ GetFileExtension() FileExtension
+ GetFullPath() string
+ GetErrors() []error
+ GetContentAsYAMLNode() (*yaml.Node, error)
+ GetIndex() *SpecIndex
+ Name() string
+ ModTime() time.Time
+ IsDir() bool
+ Sys() any
+ Size() int64
+ Mode() os.FileMode
+}
+
+// RolodexFS is an interface that represents a RolodexFS, is the same interface as `fs.FS`, except it
+// also exposes a GetFiles() signature, to extract all files in the FS.
+type RolodexFS interface {
+ Open(name string) (fs.File, error)
+ GetFiles() map[string]RolodexFile
+}
+
+// Rolodex is a file system abstraction that allows for the indexing of multiple file systems
+// and the ability to resolve references across those file systems. It is used to hold references to external
+// files, and the indexes they hold. The rolodex is the master lookup for all references.
+type Rolodex struct {
+ localFS map[string]fs.FS
+ remoteFS map[string]fs.FS
+ indexed bool
+ built bool
+ manualBuilt bool
+ resolved bool
+ circChecked bool
+ indexConfig *SpecIndexConfig
+ indexingDuration time.Duration
+ indexes []*SpecIndex
+ indexMap map[string]*SpecIndex
+ indexLock sync.Mutex
+ rootIndex *SpecIndex
+ rootNode *yaml.Node
+ caughtErrors []error
+ safeCircularReferences []*CircularReferenceResult
+ infiniteCircularReferences []*CircularReferenceResult
+ ignoredCircularReferences []*CircularReferenceResult
+ logger *slog.Logger
+ rolodex *Rolodex
+}
+
+// NewRolodex creates a new rolodex with the provided index configuration.
+func NewRolodex(indexConfig *SpecIndexConfig) *Rolodex {
+
+ logger := indexConfig.Logger
+ if logger == nil {
+ logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ }
+
+ r := &Rolodex{
+ indexConfig: indexConfig,
+ localFS: make(map[string]fs.FS),
+ remoteFS: make(map[string]fs.FS),
+ logger: logger,
+ indexMap: make(map[string]*SpecIndex),
+ }
+ indexConfig.Rolodex = r
+ return r
+}
+
+// GetIgnoredCircularReferences returns a list of circular references that were ignored during the indexing process.
+// These can be array or polymorphic references.
+func (r *Rolodex) GetIgnoredCircularReferences() []*CircularReferenceResult {
+ debounced := make(map[string]*CircularReferenceResult)
+ for _, c := range r.ignoredCircularReferences {
+ if _, ok := debounced[c.LoopPoint.FullDefinition]; !ok {
+ debounced[c.LoopPoint.FullDefinition] = c
+ }
+ }
+ var debouncedResults []*CircularReferenceResult
+ for _, v := range debounced {
+ debouncedResults = append(debouncedResults, v)
+ }
+ return debouncedResults
+}
+
+// GetIndexingDuration returns the duration it took to index the rolodex.
+func (r *Rolodex) GetIndexingDuration() time.Duration {
+ return r.indexingDuration
+}
+
+// GetRootIndex returns the root index of the rolodex (the entry point, the main document)
+func (r *Rolodex) GetRootIndex() *SpecIndex {
+ return r.rootIndex
+}
+
+// GetRootNode returns the root index of the rolodex (the entry point, the main document)
+func (r *Rolodex) GetRootNode() *yaml.Node {
+ return r.rootNode
+}
+
+// GetIndexes returns all the indexes in the rolodex.
+func (r *Rolodex) GetIndexes() []*SpecIndex {
+ return r.indexes
+}
+
+// GetCaughtErrors returns all the errors that were caught during the indexing process.
+func (r *Rolodex) GetCaughtErrors() []error {
+ return r.caughtErrors
+}
+
+// AddLocalFS adds a local file system to the rolodex.
+func (r *Rolodex) AddLocalFS(baseDir string, fileSystem fs.FS) {
+ absBaseDir, _ := filepath.Abs(baseDir)
+ if f, ok := fileSystem.(*LocalFS); ok {
+ f.rolodex = r
+ f.logger = r.logger
+ }
+ r.localFS[absBaseDir] = fileSystem
+}
+
+// SetRootNode sets the root node of the rolodex (the entry point, the main document)
+func (r *Rolodex) SetRootNode(node *yaml.Node) {
+ r.rootNode = node
+}
+
+func (r *Rolodex) AddExternalIndex(idx *SpecIndex, location string) {
+ r.indexLock.Lock()
+ if r.indexMap[location] == nil {
+ r.indexMap[location] = idx
+ }
+ r.indexLock.Unlock()
+}
+
+func (r *Rolodex) AddIndex(idx *SpecIndex) {
+ r.indexes = append(r.indexes, idx)
+ if idx != nil {
+ p := idx.specAbsolutePath
+ r.AddExternalIndex(idx, p)
+ }
+}
+
+// AddRemoteFS adds a remote file system to the rolodex.
+func (r *Rolodex) AddRemoteFS(baseURL string, fileSystem fs.FS) {
+ if f, ok := fileSystem.(*RemoteFS); ok {
+ f.rolodex = r
+ f.logger = r.logger
+ }
+ r.remoteFS[baseURL] = fileSystem
+}
+
+// IndexTheRolodex indexes the rolodex, building out the indexes for each file, and then building the root index.
+func (r *Rolodex) IndexTheRolodex() error {
+ if r.indexed {
+ return nil
+ }
+
+ var caughtErrors []error
+
+ var indexBuildQueue []*SpecIndex
+
+ indexRolodexFile := func(
+ location string, fs fs.FS,
+ doneChan chan bool,
+ errChan chan error,
+ indexChan chan *SpecIndex) {
+
+ var wg sync.WaitGroup
+
+ indexFileFunc := func(idxFile CanBeIndexed, fullPath string) {
+ defer wg.Done()
+
+ // copy config and set the
+ copiedConfig := *r.indexConfig
+ copiedConfig.SpecAbsolutePath = fullPath
+ copiedConfig.AvoidBuildIndex = true // we will build out everything in two steps.
+ idx, err := idxFile.Index(&copiedConfig)
+
+ if err != nil {
+ errChan <- err
+ }
+
+ if err == nil {
+ // for each index, we need a resolver
+ resolver := NewResolver(idx)
+
+ // check if the config has been set to ignore circular references in arrays and polymorphic schemas
+ if copiedConfig.IgnoreArrayCircularReferences {
+ resolver.IgnoreArrayCircularReferences()
+ }
+ if copiedConfig.IgnorePolymorphicCircularReferences {
+ resolver.IgnorePolymorphicCircularReferences()
+ }
+ indexChan <- idx
+ }
+
+ }
+
+ if lfs, ok := fs.(RolodexFS); ok {
+ wait := false
+ for _, f := range lfs.GetFiles() {
+ if idxFile, ko := f.(CanBeIndexed); ko {
+ wg.Add(1)
+ wait = true
+ go indexFileFunc(idxFile, f.GetFullPath())
+ }
+ }
+ if wait {
+ wg.Wait()
+ }
+ doneChan <- true
+ return
+ } else {
+ errChan <- errors.New("rolodex file system is not a RolodexFS")
+ doneChan <- true
+ }
+ }
+
+ indexingCompleted := 0
+ totalToIndex := len(r.localFS) + len(r.remoteFS)
+ doneChan := make(chan bool)
+ errChan := make(chan error)
+ indexChan := make(chan *SpecIndex)
+
+ // run through every file system and index every file, fan out as many goroutines as possible.
+ started := time.Now()
+ for k, v := range r.localFS {
+ go indexRolodexFile(k, v, doneChan, errChan, indexChan)
+ }
+ for k, v := range r.remoteFS {
+ go indexRolodexFile(k, v, doneChan, errChan, indexChan)
+ }
+
+ for indexingCompleted < totalToIndex {
+ select {
+ case <-doneChan:
+ indexingCompleted++
+ case err := <-errChan:
+ indexingCompleted++
+ caughtErrors = append(caughtErrors, err)
+ case idx := <-indexChan:
+ indexBuildQueue = append(indexBuildQueue, idx)
+ }
+ }
+
+ // now that we have indexed all the files, we can build the index.
+ r.indexes = indexBuildQueue
+
+ sort.Slice(indexBuildQueue, func(i, j int) bool {
+ return indexBuildQueue[i].specAbsolutePath < indexBuildQueue[j].specAbsolutePath
+ })
+
+ for _, idx := range indexBuildQueue {
+ idx.BuildIndex()
+ if r.indexConfig.AvoidCircularReferenceCheck {
+ continue
+ }
+ errs := idx.resolver.CheckForCircularReferences()
+ for e := range errs {
+ caughtErrors = append(caughtErrors, errs[e])
+ }
+ if len(idx.resolver.GetIgnoredCircularPolyReferences()) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, idx.resolver.GetIgnoredCircularPolyReferences()...)
+ }
+ if len(idx.resolver.GetIgnoredCircularArrayReferences()) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, idx.resolver.GetIgnoredCircularArrayReferences()...)
+ }
+ }
+
+ // indexed and built every supporting file, we can build the root index (our entry point)
+ if r.rootNode != nil {
+
+ // if there is a base path, then we need to set the root spec config to point to a theoretical root.yaml
+ // which does not exist, but is used to formulate the absolute path to root references correctly.
+ if r.indexConfig.BasePath != "" && r.indexConfig.BaseURL == nil {
+
+ basePath := r.indexConfig.BasePath
+ if !filepath.IsAbs(basePath) {
+ basePath, _ = filepath.Abs(basePath)
+ }
+
+ if len(r.localFS) > 0 || len(r.remoteFS) > 0 {
+ r.indexConfig.SpecAbsolutePath = filepath.Join(basePath, "root.yaml")
+ }
+ }
+
+ index := NewSpecIndexWithConfig(r.rootNode, r.indexConfig)
+ resolver := NewResolver(index)
+
+ if r.indexConfig.IgnoreArrayCircularReferences {
+ resolver.IgnoreArrayCircularReferences()
+ }
+ if r.indexConfig.IgnorePolymorphicCircularReferences {
+ resolver.IgnorePolymorphicCircularReferences()
+ }
+
+ r.logger.Debug("[rolodex] starting root index build")
+ index.BuildIndex()
+ r.logger.Debug("[rolodex] root index build completed")
+
+ if !r.indexConfig.AvoidCircularReferenceCheck {
+ resolvingErrors := resolver.CheckForCircularReferences()
+ r.circChecked = true
+ for e := range resolvingErrors {
+ caughtErrors = append(caughtErrors, resolvingErrors[e])
+ }
+ if len(resolver.GetIgnoredCircularPolyReferences()) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, resolver.GetIgnoredCircularPolyReferences()...)
+ }
+ if len(resolver.GetIgnoredCircularArrayReferences()) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, resolver.GetIgnoredCircularArrayReferences()...)
+ }
+ }
+ r.rootIndex = index
+ if len(index.refErrors) > 0 {
+ caughtErrors = append(caughtErrors, index.refErrors...)
+ }
+ }
+ r.indexingDuration = time.Since(started)
+ r.indexed = true
+ r.caughtErrors = caughtErrors
+ r.built = true
+ return errors.Join(caughtErrors...)
+
+}
+
+// CheckForCircularReferences checks for circular references in the rolodex.
+func (r *Rolodex) CheckForCircularReferences() {
+ if !r.circChecked {
+ if r.rootIndex != nil && r.rootIndex.resolver != nil {
+ resolvingErrors := r.rootIndex.resolver.CheckForCircularReferences()
+ for e := range resolvingErrors {
+ r.caughtErrors = append(r.caughtErrors, resolvingErrors[e])
+ }
+ if len(r.rootIndex.resolver.ignoredPolyReferences) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, r.rootIndex.resolver.ignoredPolyReferences...)
+ }
+ if len(r.rootIndex.resolver.ignoredArrayReferences) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, r.rootIndex.resolver.ignoredArrayReferences...)
+ }
+ r.safeCircularReferences = append(r.safeCircularReferences, r.rootIndex.resolver.GetSafeCircularReferences()...)
+ r.infiniteCircularReferences = append(r.infiniteCircularReferences, r.rootIndex.resolver.GetInfiniteCircularReferences()...)
+ }
+ r.circChecked = true
+ }
+}
+
+// Resolve resolves references in the rolodex.
+func (r *Rolodex) Resolve() {
+
+ var resolvers []*Resolver
+ if r.rootIndex != nil && r.rootIndex.resolver != nil {
+ resolvers = append(resolvers, r.rootIndex.resolver)
+ }
+ for _, idx := range r.indexes {
+ if idx.resolver != nil {
+ resolvers = append(resolvers, idx.resolver)
+ }
+ }
+ for _, res := range resolvers {
+ resolvingErrors := res.Resolve()
+ for e := range resolvingErrors {
+ r.caughtErrors = append(r.caughtErrors, resolvingErrors[e])
+ }
+ if r.rootIndex != nil && len(r.rootIndex.resolver.ignoredPolyReferences) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, res.ignoredPolyReferences...)
+ }
+ if r.rootIndex != nil && len(r.rootIndex.resolver.ignoredArrayReferences) > 0 {
+ r.ignoredCircularReferences = append(r.ignoredCircularReferences, res.ignoredArrayReferences...)
+ }
+ r.safeCircularReferences = append(r.safeCircularReferences, res.GetSafeCircularReferences()...)
+ r.infiniteCircularReferences = append(r.infiniteCircularReferences, res.GetInfiniteCircularReferences()...)
+ }
+ r.resolved = true
+}
+
+// BuildIndexes builds the indexes in the rolodex, this is generally not required unless manually building a rolodex.
+func (r *Rolodex) BuildIndexes() {
+ if r.manualBuilt {
+ return
+ }
+ for _, idx := range r.indexes {
+ idx.BuildIndex()
+ }
+ if r.rootIndex != nil {
+ r.rootIndex.BuildIndex()
+ }
+ r.manualBuilt = true
+}
+
+// Open opens a file in the rolodex, and returns a RolodexFile.
+func (r *Rolodex) Open(location string) (RolodexFile, error) {
+ if r == nil {
+ return nil, fmt.Errorf("rolodex has not been initialized, cannot open file '%s'", location)
+ }
+
+ if len(r.localFS) <= 0 && len(r.remoteFS) <= 0 {
+ return nil, fmt.Errorf("rolodex has no file systems configured, cannot open '%s'. Add a BaseURL or BasePath to your configuration so the rolodex knows how to resolve references", location)
+ }
+
+ var errorStack []error
+ var localFile *LocalFile
+ var remoteFile *RemoteFile
+ fileLookup := location
+ isUrl := false
+ u, _ := url.Parse(location)
+ if u != nil && u.Scheme != "" {
+ isUrl = true
+ }
+
+ if !isUrl {
+ for k, v := range r.localFS {
+
+ // check if this is a URL or an abs/rel reference.
+ if !filepath.IsAbs(location) {
+ fileLookup, _ = filepath.Abs(filepath.Join(k, location))
+ }
+
+ f, err := v.Open(fileLookup)
+ if err != nil {
+ // try a lookup that is not absolute, but relative
+ f, err = v.Open(location)
+ if err != nil {
+ errorStack = append(errorStack, err)
+ continue
+ }
+ }
+ // check if this is a native rolodex FS, then the work is done.
+ if lf, ko := interface{}(f).(*LocalFile); ko {
+ localFile = lf
+ break
+ } else {
+ // not a native FS, so we need to read the file and create a local file.
+ bytes, rErr := io.ReadAll(f)
+ if rErr != nil {
+ errorStack = append(errorStack, rErr)
+ continue
+ }
+ s, sErr := f.Stat()
+ if sErr != nil {
+ errorStack = append(errorStack, sErr)
+ continue
+ }
+ if len(bytes) > 0 {
+ localFile = &LocalFile{
+ filename: filepath.Base(fileLookup),
+ name: filepath.Base(fileLookup),
+ extension: ExtractFileType(fileLookup),
+ data: bytes,
+ fullPath: fileLookup,
+ lastModified: s.ModTime(),
+ index: r.rootIndex,
+ }
+ break
+ }
+ }
+ }
+
+ if localFile == nil {
+
+ // if there was no file found locally, then search the remote FS.
+ for _, v := range r.remoteFS {
+ f, err := v.Open(location)
+ if err != nil {
+ errorStack = append(errorStack, err)
+ continue
+ }
+ if f != nil {
+ return f.(*RemoteFile), nil
+ }
+ }
+ }
+
+ } else {
+
+ if !r.indexConfig.AllowRemoteLookup {
+ return nil, fmt.Errorf("remote lookup for '%s' not allowed, please set the index configuration to "+
+ "AllowRemoteLookup to true", fileLookup)
+ }
+
+ for _, v := range r.remoteFS {
+ f, err := v.Open(fileLookup)
+ if err == nil {
+
+ if rf, ok := interface{}(f).(*RemoteFile); ok {
+ remoteFile = rf
+ break
+ } else {
+
+ bytes, rErr := io.ReadAll(f)
+ if rErr != nil {
+ errorStack = append(errorStack, rErr)
+ continue
+ }
+ s, sErr := f.Stat()
+ if sErr != nil {
+ errorStack = append(errorStack, sErr)
+ continue
+ }
+ if len(bytes) > 0 {
+ remoteFile = &RemoteFile{
+ filename: filepath.Base(fileLookup),
+ name: filepath.Base(fileLookup),
+ extension: ExtractFileType(fileLookup),
+ data: bytes,
+ fullPath: fileLookup,
+ lastModified: s.ModTime(),
+ index: r.rootIndex,
+ }
+ break
+ }
+ }
+ }
+ }
+ }
+
+ if localFile != nil {
+ return &rolodexFile{
+ rolodex: r,
+ location: localFile.fullPath,
+ localFile: localFile,
+ }, errors.Join(errorStack...)
+ }
+
+ if remoteFile != nil {
+ return &rolodexFile{
+ rolodex: r,
+ location: remoteFile.fullPath,
+ remoteFile: remoteFile,
+ }, errors.Join(errorStack...)
+ }
+
+ return nil, errors.Join(errorStack...)
+}
+
+var suffixes = []string{"B", "KB", "MB", "GB", "TB"}
+
+func Round(val float64, roundOn float64, places int) (newVal float64) {
+ var round float64
+ pow := math.Pow(10, float64(places))
+ digit := pow * val
+ _, div := math.Modf(digit)
+ if div >= roundOn {
+ round = math.Ceil(digit)
+ } else {
+ round = math.Floor(digit)
+ }
+ newVal = round / pow
+ return
+}
+
+func HumanFileSize(size float64) string {
+ base := math.Log(size) / math.Log(1024)
+ getSize := Round(math.Pow(1024, base-math.Floor(base)), .5, 2)
+ getSuffix := suffixes[int(math.Floor(base))]
+ return strconv.FormatFloat(getSize, 'f', -1, 64) + " " + string(getSuffix)
+}
+
+func (r *Rolodex) RolodexFileSizeAsString() string {
+ size := r.RolodexFileSize()
+ return HumanFileSize(float64(size))
+}
+
+func (r *Rolodex) RolodexTotalFiles() int {
+ // look through each file system and count the files
+ var total int
+ for _, v := range r.localFS {
+ if lfs, ok := v.(RolodexFS); ok {
+ total += len(lfs.GetFiles())
+ }
+ }
+ for _, v := range r.remoteFS {
+ if lfs, ok := v.(RolodexFS); ok {
+ total += len(lfs.GetFiles())
+ }
+ }
+ return total
+}
+
+func (r *Rolodex) RolodexFileSize() int64 {
+ var size int64
+ for _, v := range r.localFS {
+ if lfs, ok := v.(RolodexFS); ok {
+ for _, f := range lfs.GetFiles() {
+ size += f.Size()
+ }
+ }
+ }
+ for _, v := range r.remoteFS {
+ if lfs, ok := v.(RolodexFS); ok {
+ for _, f := range lfs.GetFiles() {
+ size += f.Size()
+ }
+ }
+ }
+ return size
+}
diff --git a/index/rolodex_file.go b/index/rolodex_file.go
new file mode 100644
index 0000000..1268c7b
--- /dev/null
+++ b/index/rolodex_file.go
@@ -0,0 +1,153 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/pb33f/libopenapi/datamodel"
+ "gopkg.in/yaml.v3"
+ "os"
+ "time"
+)
+
+type rolodexFile struct {
+ location string
+ rolodex *Rolodex
+ index *SpecIndex
+ localFile *LocalFile
+ remoteFile *RemoteFile
+}
+
+func (rf *rolodexFile) Name() string {
+ if rf.localFile != nil {
+ return rf.localFile.filename
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.filename
+ }
+ return ""
+}
+
+func (rf *rolodexFile) GetIndex() *SpecIndex {
+ if rf.localFile != nil {
+ return rf.localFile.GetIndex()
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.GetIndex()
+ }
+ return nil
+}
+
+func (rf *rolodexFile) Index(config *SpecIndexConfig) (*SpecIndex, error) {
+ if rf.index != nil {
+ return rf.index, nil
+ }
+ var content []byte
+ if rf.localFile != nil {
+ content = rf.localFile.data
+ }
+ if rf.remoteFile != nil {
+ content = rf.remoteFile.data
+ }
+
+ // first, we must parse the content of the file
+ info, err := datamodel.ExtractSpecInfoWithDocumentCheck(content, config.SkipDocumentCheck)
+ if err != nil {
+ return nil, err
+ }
+
+ // create a new index for this file and link it to this rolodex.
+ config.Rolodex = rf.rolodex
+ index := NewSpecIndexWithConfig(info.RootNode, config)
+ rf.index = index
+ return index, nil
+
+}
+
+func (rf *rolodexFile) GetContent() string {
+ if rf.localFile != nil {
+ return string(rf.localFile.data)
+ }
+ if rf.remoteFile != nil {
+ return string(rf.remoteFile.data)
+ }
+ return ""
+}
+
+func (rf *rolodexFile) GetContentAsYAMLNode() (*yaml.Node, error) {
+ if rf.localFile != nil {
+ return rf.localFile.GetContentAsYAMLNode()
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.GetContentAsYAMLNode()
+ }
+ return nil, nil
+}
+
+func (rf *rolodexFile) GetFileExtension() FileExtension {
+ if rf.localFile != nil {
+ return rf.localFile.extension
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.extension
+ }
+ return UNSUPPORTED
+}
+func (rf *rolodexFile) GetFullPath() string {
+ if rf.localFile != nil {
+ return rf.localFile.fullPath
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.fullPath
+ }
+ return ""
+}
+func (rf *rolodexFile) ModTime() time.Time {
+ if rf.localFile != nil {
+ return rf.localFile.lastModified
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.lastModified
+ }
+ return time.Now()
+}
+
+func (rf *rolodexFile) Size() int64 {
+ if rf.localFile != nil {
+ return rf.localFile.Size()
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.Size()
+ }
+ return 0
+}
+
+func (rf *rolodexFile) IsDir() bool {
+ // always false.
+ return false
+}
+
+func (rf *rolodexFile) Sys() interface{} {
+ // not implemented.
+ return nil
+}
+
+func (rf *rolodexFile) Mode() os.FileMode {
+ if rf.localFile != nil {
+ return rf.localFile.Mode()
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.Mode()
+ }
+ return os.FileMode(0)
+}
+
+func (rf *rolodexFile) GetErrors() []error {
+ if rf.localFile != nil {
+ return rf.localFile.readingErrors
+ }
+ if rf.remoteFile != nil {
+ return rf.remoteFile.seekingErrors
+ }
+ return nil
+}
diff --git a/index/rolodex_file_loader.go b/index/rolodex_file_loader.go
new file mode 100644
index 0000000..2507519
--- /dev/null
+++ b/index/rolodex_file_loader.go
@@ -0,0 +1,440 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "fmt"
+ "github.com/pb33f/libopenapi/datamodel"
+ "golang.org/x/sync/syncmap"
+ "gopkg.in/yaml.v3"
+ "io"
+ "io/fs"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "slices"
+ "strings"
+ "sync"
+ "time"
+)
+
+// LocalFS is a file system that indexes local files.
+type LocalFS struct {
+ fsConfig *LocalFSConfig
+ indexConfig *SpecIndexConfig
+ entryPointDirectory string
+ baseDirectory string
+ Files syncmap.Map
+ extractedFiles map[string]RolodexFile
+ logger *slog.Logger
+ fileLock sync.Mutex
+ readingErrors []error
+ rolodex *Rolodex
+ processingFiles syncmap.Map
+ fileListeners int
+}
+
+// GetFiles returns the files that have been indexed. A map of RolodexFile objects keyed by the full path of the file.
+func (l *LocalFS) GetFiles() map[string]RolodexFile {
+ files := make(map[string]RolodexFile)
+ l.Files.Range(func(key, value interface{}) bool {
+ files[key.(string)] = value.(*LocalFile)
+ return true
+ })
+ l.extractedFiles = files
+ return files
+}
+
+// GetErrors returns any errors that occurred during the indexing process.
+func (l *LocalFS) GetErrors() []error {
+ return l.readingErrors
+}
+
+type waiterLocal struct {
+ f string
+ done bool
+ file *LocalFile
+ listeners int
+}
+
+// Open opens a file, returning it or an error. If the file is not found, the error is of type *PathError.
+func (l *LocalFS) Open(name string) (fs.File, error) {
+
+ if l.indexConfig != nil && !l.indexConfig.AllowFileLookup {
+ return nil, &fs.PathError{Op: "open", Path: name,
+ Err: fmt.Errorf("file lookup for '%s' not allowed, set the index configuration "+
+ "to AllowFileLookup to be true", name)}
+ }
+
+ if !filepath.IsAbs(name) {
+ name, _ = filepath.Abs(filepath.Join(l.baseDirectory, name))
+ }
+
+ if f, ok := l.Files.Load(name); ok {
+ return f.(*LocalFile), nil
+ } else {
+
+ if l.fsConfig != nil && l.fsConfig.DirFS == nil {
+
+ // if we're processing, we need to block and wait for the file to be processed
+ // try path first
+ if r, ko := l.processingFiles.Load(name); ko {
+
+ wait := r.(*waiterLocal)
+ wait.listeners++
+
+ l.logger.Debug("[rolodex file loader]: waiting for existing OS load to complete", "file", name, "listeners", wait.listeners)
+
+ for !wait.done {
+ time.Sleep(200 * time.Nanosecond) // breathe for a few nanoseconds.
+ }
+ wait.listeners--
+ l.logger.Debug("[rolodex file loader]: waiting done, OS load completed, returning file", "file", name, "listeners", wait.listeners)
+ return wait.file, nil
+ }
+
+ processingWaiter := &waiterLocal{f: name}
+
+ // add to processing
+ l.processingFiles.Store(name, processingWaiter)
+
+ var extractedFile *LocalFile
+ var extErr error
+ // attempt to open the file from the local filesystem
+ l.logger.Debug("[rolodex file loader]: extracting file from OS", "file", name)
+ extractedFile, extErr = l.extractFile(name)
+ if extErr != nil {
+ l.processingFiles.Delete(name)
+ return nil, extErr
+ }
+ if extractedFile != nil {
+
+ // in this mode, we need the index config to be set.
+ if l.indexConfig != nil {
+ copiedCfg := *l.indexConfig
+ copiedCfg.SpecAbsolutePath = name
+ copiedCfg.AvoidBuildIndex = true
+
+ idx, idxError := extractedFile.Index(&copiedCfg)
+
+ if idx != nil && l.rolodex != nil {
+ idx.rolodex = l.rolodex
+ }
+
+ if idxError != nil && idx == nil {
+ extractedFile.readingErrors = append(l.readingErrors, idxError)
+ } else {
+
+ // for each index, we need a resolver
+ resolver := NewResolver(idx)
+ idx.resolver = resolver
+ idx.BuildIndex()
+ }
+
+ if len(extractedFile.data) > 0 {
+ l.logger.Debug("[rolodex file loader]: successfully loaded and indexed file", "file", name)
+ }
+
+ // add index to rolodex indexes
+ if l.rolodex != nil {
+ l.rolodex.AddIndex(idx)
+ }
+ if processingWaiter.listeners > 0 {
+ l.logger.Debug("[rolodex file loader]: alerting file subscribers", "file", name, "subs", processingWaiter.listeners)
+ }
+ processingWaiter.file = extractedFile
+ processingWaiter.done = true
+ l.processingFiles.Delete(name)
+ return extractedFile, nil
+ }
+ }
+ }
+ }
+ return nil, &fs.PathError{Op: "open", Path: name, Err: fs.ErrNotExist}
+}
+
+// LocalFile is a file that has been indexed by the LocalFS. It implements the RolodexFile interface.
+type LocalFile struct {
+ filename string
+ name string
+ extension FileExtension
+ data []byte
+ fullPath string
+ lastModified time.Time
+ readingErrors []error
+ index *SpecIndex
+ parsed *yaml.Node
+ offset int64
+}
+
+// GetIndex returns the *SpecIndex for the file.
+func (l *LocalFile) GetIndex() *SpecIndex {
+ return l.index
+}
+
+// Index returns the *SpecIndex for the file. If the index has not been created, it will be created (indexed)
+func (l *LocalFile) Index(config *SpecIndexConfig) (*SpecIndex, error) {
+ if l.index != nil {
+ return l.index, nil
+ }
+ content := l.data
+
+ // first, we must parse the content of the file
+ info, err := datamodel.ExtractSpecInfoWithDocumentCheck(content, true)
+ if err != nil {
+ return nil, err
+ }
+
+ index := NewSpecIndexWithConfig(info.RootNode, config)
+ index.specAbsolutePath = l.fullPath
+
+ l.index = index
+ return index, nil
+
+}
+
+// GetContent returns the content of the file as a string.
+func (l *LocalFile) GetContent() string {
+ return string(l.data)
+}
+
+// GetContentAsYAMLNode returns the content of the file as a *yaml.Node. If something went wrong
+// then an error is returned.
+func (l *LocalFile) GetContentAsYAMLNode() (*yaml.Node, error) {
+ if l.parsed != nil {
+ return l.parsed, nil
+ }
+ if l.index != nil && l.index.root != nil {
+ return l.index.root, nil
+ }
+ if l.data == nil {
+ return nil, fmt.Errorf("no data to parse for file: %s", l.fullPath)
+ }
+ var root yaml.Node
+ err := yaml.Unmarshal(l.data, &root)
+ if err != nil {
+ return nil, err
+ }
+ if l.index != nil && l.index.root == nil {
+ l.index.root = &root
+ }
+ l.parsed = &root
+ return &root, nil
+}
+
+// GetFileExtension returns the FileExtension of the file.
+func (l *LocalFile) GetFileExtension() FileExtension {
+ return l.extension
+}
+
+// GetFullPath returns the full path of the file.
+func (l *LocalFile) GetFullPath() string {
+ return l.fullPath
+}
+
+// GetErrors returns any errors that occurred during the indexing process.
+func (l *LocalFile) GetErrors() []error {
+ return l.readingErrors
+}
+
+// FullPath returns the full path of the file.
+func (l *LocalFile) FullPath() string {
+ return l.fullPath
+}
+
+// Name returns the name of the file.
+func (l *LocalFile) Name() string {
+ return l.name
+}
+
+// Size returns the size of the file.
+func (l *LocalFile) Size() int64 {
+ return int64(len(l.data))
+}
+
+// Mode returns the file mode bits for the file.
+func (l *LocalFile) Mode() fs.FileMode {
+ return fs.FileMode(0)
+}
+
+// ModTime returns the modification time of the file.
+func (l *LocalFile) ModTime() time.Time {
+ return l.lastModified
+}
+
+// IsDir returns true if the file is a directory, it always returns false
+func (l *LocalFile) IsDir() bool {
+ return false
+}
+
+// Sys returns the underlying data source (always returns nil)
+func (l *LocalFile) Sys() interface{} {
+ return nil
+}
+
+// Close closes the file (doesn't do anything, returns no error)
+func (l *LocalFile) Close() error {
+ return nil
+}
+
+// Stat returns the FileInfo for the file.
+func (l *LocalFile) Stat() (fs.FileInfo, error) {
+ return l, nil
+}
+
+// Read reads the file into a byte slice, makes it compatible with io.Reader.
+func (l *LocalFile) Read(b []byte) (int, error) {
+ if l.offset >= int64(len(l.GetContent())) {
+ return 0, io.EOF
+ }
+ if l.offset < 0 {
+ return 0, &fs.PathError{Op: "read", Path: l.GetFullPath(), Err: fs.ErrInvalid}
+ }
+ n := copy(b, l.GetContent()[l.offset:])
+ l.offset += int64(n)
+ return n, nil
+}
+
+// LocalFSConfig is the configuration for the LocalFS.
+type LocalFSConfig struct {
+ // the base directory to index
+ BaseDirectory string
+
+ // supply your own logger
+ Logger *slog.Logger
+
+ // supply a list of specific files to index only
+ FileFilters []string
+
+ // supply a custom fs.FS to use
+ DirFS fs.FS
+
+ // supply an index configuration to use
+ IndexConfig *SpecIndexConfig
+}
+
+// NewLocalFSWithConfig creates a new LocalFS with the supplied configuration.
+func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
+ var allErrors []error
+
+ log := config.Logger
+ if log == nil {
+ log = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ }
+
+ // if the basedir is an absolute file, we're just going to index that file.
+ ext := filepath.Ext(config.BaseDirectory)
+ file := filepath.Base(config.BaseDirectory)
+
+ var absBaseDir string
+ absBaseDir, _ = filepath.Abs(config.BaseDirectory)
+
+ localFS := &LocalFS{
+ indexConfig: config.IndexConfig,
+ fsConfig: config,
+ logger: log,
+ baseDirectory: absBaseDir,
+ entryPointDirectory: config.BaseDirectory,
+ }
+
+ // if a directory filesystem is supplied, use that to walk the directory and pick up everything it finds.
+ if config.DirFS != nil {
+ walkErr := fs.WalkDir(config.DirFS, ".", func(p string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+
+ // we don't care about directories, or errors, just read everything we can.
+ if d.IsDir() {
+ return nil
+ }
+ if len(ext) > 2 && p != file {
+ return nil
+ }
+ if strings.HasPrefix(p, ".") {
+ return nil
+ }
+ if len(config.FileFilters) > 0 {
+ if !slices.Contains(config.FileFilters, p) {
+ return nil
+ }
+ }
+ _, fErr := localFS.extractFile(p)
+ return fErr
+ })
+
+ if walkErr != nil {
+ return nil, walkErr
+ }
+ }
+
+ localFS.readingErrors = allErrors
+ return localFS, nil
+}
+
+func (l *LocalFS) extractFile(p string) (*LocalFile, error) {
+ extension := ExtractFileType(p)
+ var readingErrors []error
+ abs := p
+ config := l.fsConfig
+ if !filepath.IsAbs(p) {
+ if config != nil && config.BaseDirectory != "" {
+ abs, _ = filepath.Abs(filepath.Join(config.BaseDirectory, p))
+ } else {
+ abs, _ = filepath.Abs(p)
+ }
+ }
+ var fileData []byte
+
+ switch extension {
+ case YAML, JSON:
+ var file fs.File
+ var fileError error
+ if config != nil && config.DirFS != nil {
+ l.logger.Debug("[rolodex file loader]: collecting JSON/YAML file from dirFS", "file", abs)
+ file, _ = config.DirFS.Open(p)
+ } else {
+ l.logger.Debug("[rolodex file loader]: reading local file from OS", "file", abs)
+ file, fileError = os.Open(abs)
+ }
+
+ if config != nil && config.DirFS != nil {
+
+ } else {
+ file, fileError = os.Open(abs)
+ }
+
+ // if reading without a directory FS, error out on any error, do not continue.
+ if fileError != nil {
+ readingErrors = append(readingErrors, fileError)
+ return nil, fileError
+ }
+
+ modTime := time.Now()
+ stat, _ := file.Stat()
+ if stat != nil {
+ modTime = stat.ModTime()
+ }
+ fileData, _ = io.ReadAll(file)
+
+ lf := &LocalFile{
+ filename: p,
+ name: filepath.Base(p),
+ extension: ExtractFileType(p),
+ data: fileData,
+ fullPath: abs,
+ lastModified: modTime,
+ readingErrors: readingErrors,
+ }
+ l.Files.Store(abs, lf)
+ return lf, nil
+ case UNSUPPORTED:
+ if config != nil && config.DirFS != nil {
+ l.logger.Debug("[rolodex file loader]: skipping non JSON/YAML file", "file", abs)
+ }
+ }
+ return nil, nil
+}
diff --git a/index/rolodex_file_loader_test.go b/index/rolodex_file_loader_test.go
new file mode 100644
index 0000000..3240a54
--- /dev/null
+++ b/index/rolodex_file_loader_test.go
@@ -0,0 +1,324 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
+ "io"
+ "io/fs"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "testing"
+ "testing/fstest"
+ "time"
+)
+
+func TestRolodexLoadsFilesCorrectly_NoErrors(t *testing.T) {
+ t.Parallel()
+ testFS := fstest.MapFS{
+ "spec.yaml": {Data: []byte("hip"), ModTime: time.Now()},
+ "spock.yaml": {Data: []byte("hip: : hello: :\n:hw"), ModTime: time.Now()},
+ "subfolder/spec1.json": {Data: []byte("hop"), ModTime: time.Now()},
+ "subfolder2/spec2.yaml": {Data: []byte("chop"), ModTime: time.Now()},
+ "subfolder2/hello.jpg": {Data: []byte("shop"), ModTime: time.Now()},
+ }
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: ".",
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ })),
+ DirFS: testFS,
+ })
+
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ files := fileFS.GetFiles()
+ assert.Len(t, files, 4)
+ assert.Len(t, fileFS.GetErrors(), 0)
+
+ key, _ := filepath.Abs(filepath.Join(fileFS.baseDirectory, "spec.yaml"))
+
+ localFile := files[key]
+ assert.NotNil(t, localFile)
+ assert.Nil(t, localFile.GetIndex())
+
+ lf := localFile.(*LocalFile)
+ idx, ierr := lf.Index(CreateOpenAPIIndexConfig())
+ assert.NoError(t, ierr)
+ assert.NotNil(t, idx)
+ assert.NotNil(t, localFile.GetContent())
+
+ d, e := localFile.GetContentAsYAMLNode()
+ assert.NoError(t, e)
+ assert.NotNil(t, d)
+ assert.NotNil(t, localFile.GetIndex())
+ assert.Equal(t, YAML, localFile.GetFileExtension())
+ assert.Equal(t, key, localFile.GetFullPath())
+ assert.Equal(t, "spec.yaml", lf.Name())
+ assert.Equal(t, int64(3), lf.Size())
+ assert.Equal(t, fs.FileMode(0), lf.Mode())
+ assert.False(t, lf.IsDir())
+ assert.Equal(t, time.Now().Unix(), lf.ModTime().Unix())
+ assert.Nil(t, lf.Sys())
+ assert.Nil(t, lf.Close())
+ q, w := lf.Stat()
+ assert.NotNil(t, q)
+ assert.NoError(t, w)
+
+ b, x := io.ReadAll(lf)
+ assert.Len(t, b, 3)
+ assert.NoError(t, x)
+
+ assert.Equal(t, key, lf.FullPath())
+ assert.Len(t, localFile.GetErrors(), 0)
+
+ // try and reindex
+ idx, ierr = lf.Index(CreateOpenAPIIndexConfig())
+ assert.NoError(t, ierr)
+ assert.NotNil(t, idx)
+
+ key, _ = filepath.Abs(filepath.Join(fileFS.baseDirectory, "spock.yaml"))
+
+ localFile = files[key]
+ assert.NotNil(t, localFile)
+ assert.Nil(t, localFile.GetIndex())
+
+ lf = localFile.(*LocalFile)
+ idx, ierr = lf.Index(CreateOpenAPIIndexConfig())
+ assert.Error(t, ierr)
+ assert.Nil(t, idx)
+ assert.NotNil(t, localFile.GetContent())
+ assert.Nil(t, localFile.GetIndex())
+
+}
+
+func TestRolodexLocalFS_NoConfig(t *testing.T) {
+
+ lfs := &LocalFS{}
+ f, e := lfs.Open("test.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, e)
+}
+
+func TestRolodexLocalFS_NoLookup(t *testing.T) {
+
+ cf := CreateClosedAPIIndexConfig()
+ lfs := &LocalFS{indexConfig: cf}
+ f, e := lfs.Open("test.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, e)
+}
+
+func TestRolodexLocalFS_BadAbsFile(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ lfs := &LocalFS{indexConfig: cf}
+ f, e := lfs.Open("/test.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, e)
+}
+
+func TestRolodexLocalFile_BadParse(t *testing.T) {
+
+ lf := &LocalFile{}
+ n, e := lf.GetContentAsYAMLNode()
+ assert.Nil(t, n)
+ assert.Error(t, e)
+ assert.Equal(t, "no data to parse for file: ", e.Error())
+}
+
+func TestRolodexLocalFile_NoIndexRoot(t *testing.T) {
+
+ lf := &LocalFile{data: []byte("burders"), index: &SpecIndex{}}
+ n, e := lf.GetContentAsYAMLNode()
+ assert.NotNil(t, n)
+ assert.NoError(t, e)
+
+}
+
+func TestRolodexLocalFS_NoBaseRelative(t *testing.T) {
+
+ lfs := &LocalFS{}
+ f, e := lfs.extractFile("test.jpg")
+ assert.Nil(t, f)
+ assert.NoError(t, e)
+}
+
+func TestRolodexLocalFile_IndexSingleFile(t *testing.T) {
+
+ testFS := fstest.MapFS{
+ "spec.yaml": {Data: []byte("hip"), ModTime: time.Now()},
+ "spock.yaml": {Data: []byte("hop"), ModTime: time.Now()},
+ "i-am-a-dir": {Mode: fs.FileMode(fs.ModeDir), ModTime: time.Now()},
+ }
+
+ fileFS, _ := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: "spec.yaml",
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ })),
+ DirFS: testFS,
+ })
+
+ files := fileFS.GetFiles()
+ assert.Len(t, files, 1)
+
+}
+
+func TestRolodexLocalFile_TestFilters(t *testing.T) {
+
+ testFS := fstest.MapFS{
+ "spec.yaml": {Data: []byte("hip"), ModTime: time.Now()},
+ "spock.yaml": {Data: []byte("pip"), ModTime: time.Now()},
+ "jam.jpg": {Data: []byte("sip"), ModTime: time.Now()},
+ }
+
+ fileFS, _ := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: ".",
+ FileFilters: []string{"spec.yaml", "spock.yaml", "jam.jpg"},
+ DirFS: testFS,
+ })
+ files := fileFS.GetFiles()
+ assert.Len(t, files, 2)
+
+}
+
+func TestRolodexLocalFile_TestBadFS(t *testing.T) {
+
+ testFS := test_badfs{}
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: ".",
+ DirFS: &testFS,
+ })
+ assert.Error(t, err)
+ assert.Nil(t, fileFS)
+
+}
+
+func TestNewRolodexLocalFile_BadOffset(t *testing.T) {
+
+ lf := &LocalFile{offset: -1}
+ z, y := io.ReadAll(lf)
+ assert.Len(t, z, 0)
+ assert.Error(t, y)
+}
+
+func TestRecursiveLocalFile_IndexFail(t *testing.T) {
+
+ pup := []byte("I:\n miss you fox, you're: my good boy:")
+
+ var myPuppy yaml.Node
+ _ = yaml.Unmarshal(pup, &myPuppy)
+
+ _ = os.WriteFile("fox.yaml", pup, 0o664)
+ defer os.Remove("fox.yaml")
+
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&myPuppy)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ IndexConfig: cf,
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, rErr)
+
+ fox, fErr := rolo.Open("fox.yaml")
+ assert.NoError(t, fErr)
+ assert.NotNil(t, fox)
+ assert.Len(t, fox.GetErrors(), 1)
+ assert.Equal(t, "unable to parse specification: yaml: line 2: mapping values are not allowed in this context", fox.GetErrors()[0].Error())
+
+}
+
+func TestRecursiveLocalFile_MultipleRequests(t *testing.T) {
+
+ pup := []byte(`components:
+ schemas:
+ fox:
+ type: string
+ description: fox, such a good boy
+ cotton:
+ type: string
+ description: my good girl
+ properties:
+ fox:
+ $ref: 'fox.yaml#/components/schemas/fox'
+ foxy:
+ $ref: 'fox.yaml#/components/schemas/fox'
+ sgtfox:
+ $ref: 'fox.yaml#/components/schemas/fox'`)
+
+ var myPuppy yaml.Node
+ _ = yaml.Unmarshal(pup, &myPuppy)
+
+ _ = os.WriteFile("fox.yaml", pup, 0o664)
+ defer os.Remove("fox.yaml")
+
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&myPuppy)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ IndexConfig: cf,
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rolo.SetRootNode(&myPuppy)
+
+ c := make(chan RolodexFile)
+ run := func(i int) {
+ fox, fErr := rolo.Open("fox.yaml")
+ assert.NoError(t, fErr)
+ if fox == nil {
+ }
+ assert.NotNil(t, fox)
+ c <- fox
+ }
+
+ for i := 0; i < 10; i++ {
+ go run(i)
+ }
+
+ completed := 0
+ for completed < 10 {
+ select {
+ case <-c:
+ completed++
+ }
+ }
+}
diff --git a/index/rolodex_ref_extractor.go b/index/rolodex_ref_extractor.go
new file mode 100644
index 0000000..a795fb8
--- /dev/null
+++ b/index/rolodex_ref_extractor.go
@@ -0,0 +1,58 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "fmt"
+ "strings"
+)
+
+const (
+ Local RefType = iota
+ File
+ HTTP
+)
+
+type RefType int
+
+type ExtractedRef struct {
+ Location string
+ Type RefType
+}
+
+// GetFile returns the file path of the reference.
+func (r *ExtractedRef) GetFile() string {
+ switch r.Type {
+ case File, HTTP:
+ location := strings.Split(r.Location, "#/")
+ return location[0]
+ default:
+ return r.Location
+ }
+}
+
+// GetReference returns the reference path of the reference.
+func (r *ExtractedRef) GetReference() string {
+ switch r.Type {
+ case File, HTTP:
+ location := strings.Split(r.Location, "#/")
+ return fmt.Sprintf("#/%s", location[1])
+ default:
+ return r.Location
+ }
+}
+
+// ExtractFileType returns the file extension of the reference.
+func ExtractFileType(ref string) FileExtension {
+ if strings.HasSuffix(ref, ".yaml") {
+ return YAML
+ }
+ if strings.HasSuffix(ref, ".yml") {
+ return YAML
+ }
+ if strings.HasSuffix(ref, ".json") {
+ return JSON
+ }
+ return UNSUPPORTED
+}
diff --git a/index/rolodex_ref_extractor_test.go b/index/rolodex_ref_extractor_test.go
new file mode 100644
index 0000000..419b2c4
--- /dev/null
+++ b/index/rolodex_ref_extractor_test.go
@@ -0,0 +1,35 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestExtractedRef_GetFile(t *testing.T) {
+
+ a := &ExtractedRef{Location: "#/components/schemas/One", Type: Local}
+ assert.Equal(t, "#/components/schemas/One", a.GetFile())
+
+ a = &ExtractedRef{Location: "pizza.yaml#/components/schemas/One", Type: File}
+ assert.Equal(t, "pizza.yaml", a.GetFile())
+
+ a = &ExtractedRef{Location: "https://api.pb33f.io/openapi.yaml#/components/schemas/One", Type: File}
+ assert.Equal(t, "https://api.pb33f.io/openapi.yaml", a.GetFile())
+
+}
+
+func TestExtractedRef_GetReference(t *testing.T) {
+
+ a := &ExtractedRef{Location: "#/components/schemas/One", Type: Local}
+ assert.Equal(t, "#/components/schemas/One", a.GetReference())
+
+ a = &ExtractedRef{Location: "pizza.yaml#/components/schemas/One", Type: File}
+ assert.Equal(t, "#/components/schemas/One", a.GetReference())
+
+ a = &ExtractedRef{Location: "https://api.pb33f.io/openapi.yaml#/components/schemas/One", Type: File}
+ assert.Equal(t, "#/components/schemas/One", a.GetReference())
+
+}
diff --git a/index/rolodex_remote_loader.go b/index/rolodex_remote_loader.go
new file mode 100644
index 0000000..822c3c5
--- /dev/null
+++ b/index/rolodex_remote_loader.go
@@ -0,0 +1,450 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "errors"
+ "fmt"
+ "github.com/pb33f/libopenapi/datamodel"
+ "github.com/pb33f/libopenapi/utils"
+ "golang.org/x/sync/syncmap"
+ "gopkg.in/yaml.v3"
+ "io"
+ "io/fs"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "time"
+)
+
+const (
+ YAML FileExtension = iota
+ JSON
+ UNSUPPORTED
+)
+
+// FileExtension is the type of file extension.
+type FileExtension int
+
+// RemoteFS is a file system that indexes remote files. It implements the fs.FS interface. Files are located remotely
+// and served via HTTP.
+type RemoteFS struct {
+ indexConfig *SpecIndexConfig
+ rootURL string
+ rootURLParsed *url.URL
+ RemoteHandlerFunc utils.RemoteURLHandler
+ Files syncmap.Map
+ ProcessingFiles syncmap.Map
+ FetchTime int64
+ FetchChannel chan *RemoteFile
+ remoteErrors []error
+ logger *slog.Logger
+ extractedFiles map[string]RolodexFile
+ rolodex *Rolodex
+}
+
+// RemoteFile is a file that has been indexed by the RemoteFS. It implements the RolodexFile interface.
+type RemoteFile struct {
+ filename string
+ name string
+ extension FileExtension
+ data []byte
+ fullPath string
+ URL *url.URL
+ lastModified time.Time
+ seekingErrors []error
+ index *SpecIndex
+ parsed *yaml.Node
+ offset int64
+}
+
+// GetFileName returns the name of the file.
+func (f *RemoteFile) GetFileName() string {
+ return f.filename
+}
+
+// GetContent returns the content of the file as a string.
+func (f *RemoteFile) GetContent() string {
+ return string(f.data)
+}
+
+// GetContentAsYAMLNode returns the content of the file as a yaml.Node.
+func (f *RemoteFile) GetContentAsYAMLNode() (*yaml.Node, error) {
+ if f.parsed != nil {
+ return f.parsed, nil
+ }
+ if f.index != nil && f.index.root != nil {
+ return f.index.root, nil
+ }
+ if f.data == nil {
+ return nil, fmt.Errorf("no data to parse for file: %s", f.fullPath)
+ }
+ var root yaml.Node
+ err := yaml.Unmarshal(f.data, &root)
+ if err != nil {
+ return nil, err
+ }
+ if f.index != nil && f.index.root == nil {
+ f.index.root = &root
+ }
+ f.parsed = &root
+ return &root, nil
+}
+
+// GetFileExtension returns the file extension of the file.
+func (f *RemoteFile) GetFileExtension() FileExtension {
+ return f.extension
+}
+
+// GetLastModified returns the last modified time of the file.
+func (f *RemoteFile) GetLastModified() time.Time {
+ return f.lastModified
+}
+
+// GetErrors returns any errors that occurred while reading the file.
+func (f *RemoteFile) GetErrors() []error {
+ return f.seekingErrors
+}
+
+// GetFullPath returns the full path of the file.
+func (f *RemoteFile) GetFullPath() string {
+ return f.fullPath
+}
+
+// fs.FileInfo interfaces
+
+// Name returns the name of the file.
+func (f *RemoteFile) Name() string {
+ return f.name
+}
+
+// Size returns the size of the file.
+func (f *RemoteFile) Size() int64 {
+ return int64(len(f.data))
+}
+
+// Mode returns the file mode bits for the file.
+func (f *RemoteFile) Mode() fs.FileMode {
+ return fs.FileMode(0)
+}
+
+// ModTime returns the modification time of the file.
+func (f *RemoteFile) ModTime() time.Time {
+ return f.lastModified
+}
+
+// IsDir returns true if the file is a directory.
+func (f *RemoteFile) IsDir() bool {
+ return false
+}
+
+// fs.File interfaces
+
+// Sys returns the underlying data source (always returns nil)
+func (f *RemoteFile) Sys() interface{} {
+ return nil
+}
+
+// Close closes the file (doesn't do anything, returns no error)
+func (f *RemoteFile) Close() error {
+ return nil
+}
+
+// Stat returns the FileInfo for the file.
+func (f *RemoteFile) Stat() (fs.FileInfo, error) {
+ return f, nil
+}
+
+// Read reads the file. Makes it compatible with io.Reader.
+func (f *RemoteFile) Read(b []byte) (int, error) {
+ if f.offset >= int64(len(f.data)) {
+ return 0, io.EOF
+ }
+ if f.offset < 0 {
+ return 0, &fs.PathError{Op: "read", Path: f.name, Err: fs.ErrInvalid}
+ }
+ n := copy(b, f.data[f.offset:])
+ f.offset += int64(n)
+ return n, nil
+}
+
+// Index indexes the file and returns a *SpecIndex, any errors are returned as well.
+func (f *RemoteFile) Index(config *SpecIndexConfig) (*SpecIndex, error) {
+ if f.index != nil {
+ return f.index, nil
+ }
+ content := f.data
+
+ // first, we must parse the content of the file
+ info, err := datamodel.ExtractSpecInfoWithDocumentCheck(content, true)
+ if err != nil {
+ return nil, err
+ }
+
+ index := NewSpecIndexWithConfig(info.RootNode, config)
+ index.specAbsolutePath = config.SpecAbsolutePath
+ f.index = index
+ return index, nil
+}
+
+// GetIndex returns the index for the file.
+func (f *RemoteFile) GetIndex() *SpecIndex {
+ return f.index
+}
+
+// NewRemoteFSWithConfig creates a new RemoteFS using the supplied SpecIndexConfig.
+func NewRemoteFSWithConfig(specIndexConfig *SpecIndexConfig) (*RemoteFS, error) {
+ if specIndexConfig == nil {
+ return nil, errors.New("no spec index config provided")
+ }
+ remoteRootURL := specIndexConfig.BaseURL
+ log := specIndexConfig.Logger
+ if log == nil {
+ log = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ }
+
+ rfs := &RemoteFS{
+ indexConfig: specIndexConfig,
+ logger: log,
+ rootURLParsed: remoteRootURL,
+ FetchChannel: make(chan *RemoteFile),
+ }
+ if remoteRootURL != nil {
+ rfs.rootURL = remoteRootURL.String()
+ }
+ if specIndexConfig.RemoteURLHandler != nil {
+ rfs.RemoteHandlerFunc = specIndexConfig.RemoteURLHandler
+ } else {
+ // default http client
+ client := &http.Client{
+ Timeout: time.Second * 120,
+ }
+ rfs.RemoteHandlerFunc = func(url string) (*http.Response, error) {
+ return client.Get(url)
+ }
+ }
+ return rfs, nil
+}
+
+// NewRemoteFSWithRootURL creates a new RemoteFS using the supplied root URL.
+func NewRemoteFSWithRootURL(rootURL string) (*RemoteFS, error) {
+ remoteRootURL, err := url.Parse(rootURL)
+ if err != nil {
+ return nil, err
+ }
+ config := CreateOpenAPIIndexConfig()
+ config.BaseURL = remoteRootURL
+ return NewRemoteFSWithConfig(config)
+}
+
+// SetRemoteHandlerFunc sets the remote handler function.
+func (i *RemoteFS) SetRemoteHandlerFunc(handlerFunc utils.RemoteURLHandler) {
+ i.RemoteHandlerFunc = handlerFunc
+}
+
+// SetIndexConfig sets the index configuration.
+func (i *RemoteFS) SetIndexConfig(config *SpecIndexConfig) {
+ i.indexConfig = config
+}
+
+// GetFiles returns the files that have been indexed.
+func (i *RemoteFS) GetFiles() map[string]RolodexFile {
+ files := make(map[string]RolodexFile)
+ i.Files.Range(func(key, value interface{}) bool {
+ files[key.(string)] = value.(*RemoteFile)
+ return true
+ })
+ i.extractedFiles = files
+ return files
+}
+
+// GetErrors returns any errors that occurred during the indexing process.
+func (i *RemoteFS) GetErrors() []error {
+ return i.remoteErrors
+}
+
+type waiterRemote struct {
+ f string
+ done bool
+ file *RemoteFile
+ listeners int
+}
+
+// Open opens a file, returning it or an error. If the file is not found, the error is of type *PathError.
+func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
+
+ if i.indexConfig != nil && !i.indexConfig.AllowRemoteLookup {
+ return nil, fmt.Errorf("remote lookup for '%s' is not allowed, please set "+
+ "AllowRemoteLookup to true as part of the index configuration", remoteURL)
+ }
+
+ remoteParsedURL, err := url.Parse(remoteURL)
+ if err != nil {
+ return nil, err
+ }
+ remoteParsedURLOriginal, _ := url.Parse(remoteURL)
+
+ // try path first
+ if r, ok := i.Files.Load(remoteParsedURL.Path); ok {
+ return r.(*RemoteFile), nil
+ }
+
+ // if we're processing, we need to block and wait for the file to be processed
+ // try path first
+ if r, ok := i.ProcessingFiles.Load(remoteParsedURL.Path); ok {
+
+ wait := r.(*waiterRemote)
+ wait.listeners++
+
+ i.logger.Debug("[rolodex remote loader] waiting for existing fetch to complete", "file", remoteURL,
+ "remoteURL", remoteParsedURL.String())
+
+ for !wait.done {
+ time.Sleep(500 * time.Nanosecond) // breathe for a few nanoseconds.
+ }
+
+ wait.listeners--
+ i.logger.Debug("[rolodex remote loader]: waiting done, remote completed, returning file", "file",
+ remoteParsedURL.String(), "listeners", wait.listeners)
+ return wait.file, nil
+ }
+
+ fileExt := ExtractFileType(remoteParsedURL.Path)
+
+ if fileExt == UNSUPPORTED {
+ i.remoteErrors = append(i.remoteErrors, fs.ErrInvalid)
+ if i.logger != nil {
+ i.logger.Warn("[rolodex remote loader] unsupported file in reference will be ignored", "file", remoteURL, "remoteURL", remoteParsedURL.String())
+ }
+ return nil, &fs.PathError{Op: "open", Path: remoteURL, Err: fs.ErrInvalid}
+ }
+
+ processingWaiter := &waiterRemote{f: remoteParsedURL.Path}
+
+ // add to processing
+ i.ProcessingFiles.Store(remoteParsedURL.Path, processingWaiter)
+
+ // if the remote URL is absolute (http:// or https://), and we have a rootURL defined, we need to override
+ // the host being defined by this URL, and use the rootURL instead, but keep the path.
+ if i.rootURLParsed != nil {
+ remoteParsedURL.Host = i.rootURLParsed.Host
+ remoteParsedURL.Scheme = i.rootURLParsed.Scheme
+ if !filepath.IsAbs(remoteParsedURL.Path) {
+ remoteParsedURL.Path = filepath.Join(i.rootURLParsed.Path, remoteParsedURL.Path)
+ }
+ }
+
+ if remoteParsedURL.Scheme == "" {
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+ return nil, nil // not a remote file, nothing wrong with that - just we can't keep looking here partner.
+ }
+
+ i.logger.Debug("loading remote file", "file", remoteURL, "remoteURL", remoteParsedURL.String())
+
+ response, clientErr := i.RemoteHandlerFunc(remoteParsedURL.String())
+ if clientErr != nil {
+
+ i.remoteErrors = append(i.remoteErrors, clientErr)
+ // remove from processing
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+ if response != nil {
+ i.logger.Error("client error", "error", clientErr, "status", response.StatusCode)
+ } else {
+ i.logger.Error("client error", "error", clientErr.Error())
+ }
+ return nil, clientErr
+ }
+ if response == nil {
+ // remove from processing
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+
+ return nil, fmt.Errorf("empty response from remote URL: %s", remoteParsedURL.String())
+ }
+ responseBytes, readError := io.ReadAll(response.Body)
+ if readError != nil {
+
+ // remove from processing
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+
+ return nil, fmt.Errorf("error reading bytes from remote file '%s': [%s]",
+ remoteParsedURL.String(), readError.Error())
+ }
+
+ if response.StatusCode >= 400 {
+
+ // remove from processing
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+
+ i.logger.Error("unable to fetch remote document",
+ "file", remoteParsedURL.Path, "status", response.StatusCode, "resp", string(responseBytes))
+ return nil, fmt.Errorf("unable to fetch remote document: %s", string(responseBytes))
+ }
+
+ absolutePath, _ := filepath.Abs(remoteParsedURL.Path)
+
+ // extract last modified from response
+ lastModified := response.Header.Get("Last-Modified")
+
+ // parse the last modified date into a time object
+ lastModifiedTime, parseErr := time.Parse(time.RFC1123, lastModified)
+
+ if parseErr != nil {
+ // can't extract last modified, so use now
+ lastModifiedTime = time.Now()
+ }
+
+ filename := filepath.Base(remoteParsedURL.Path)
+
+ remoteFile := &RemoteFile{
+ filename: filename,
+ name: remoteParsedURL.Path,
+ extension: fileExt,
+ data: responseBytes,
+ fullPath: absolutePath,
+ URL: remoteParsedURL,
+ lastModified: lastModifiedTime,
+ }
+
+ copiedCfg := *i.indexConfig
+
+ newBase := fmt.Sprintf("%s://%s%s", remoteParsedURLOriginal.Scheme, remoteParsedURLOriginal.Host,
+ filepath.Dir(remoteParsedURL.Path))
+ newBaseURL, _ := url.Parse(newBase)
+
+ if newBaseURL != nil {
+ copiedCfg.BaseURL = newBaseURL
+ }
+ copiedCfg.SpecAbsolutePath = remoteParsedURL.String()
+
+ if len(remoteFile.data) > 0 {
+ i.logger.Debug("successfully loaded file", "file", absolutePath)
+ }
+
+ processingWaiter.file = remoteFile
+ processingWaiter.done = true
+
+ // remove from processing
+ i.ProcessingFiles.Delete(remoteParsedURL.Path)
+ i.Files.Store(absolutePath, remoteFile)
+
+ idx, idxError := remoteFile.Index(&copiedCfg)
+
+ if idxError != nil && idx == nil {
+ i.remoteErrors = append(i.remoteErrors, idxError)
+ } else {
+
+ // for each index, we need a resolver
+ resolver := NewResolver(idx)
+ idx.resolver = resolver
+ idx.BuildIndex()
+ if i.rolodex != nil {
+ i.rolodex.AddExternalIndex(idx, remoteParsedURL.String())
+ }
+ }
+ return remoteFile, errors.Join(i.remoteErrors...)
+}
diff --git a/index/rolodex_remote_loader_test.go b/index/rolodex_remote_loader_test.go
new file mode 100644
index 0000000..2e50e64
--- /dev/null
+++ b/index/rolodex_remote_loader_test.go
@@ -0,0 +1,410 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "errors"
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "testing"
+ "time"
+)
+
+var test_httpClient = &http.Client{Timeout: time.Duration(60) * time.Second}
+
+func test_buildServer() *httptest.Server {
+ return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
+ if req.URL.String() == "/file1.yaml" {
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 07:28:00 GMT")
+ _, _ = rw.Write([]byte(`"$ref": "./deeper/file2.yaml#/components/schemas/Pet"`))
+ return
+ }
+ if req.URL.String() == "/deeper/file2.yaml" {
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 08:28:00 GMT")
+ _, _ = rw.Write([]byte(`"$ref": "/deeper/even_deeper/file3.yaml#/components/schemas/Pet"`))
+ return
+ }
+
+ if req.URL.String() == "/deeper/even_deeper/file3.yaml" {
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 10:28:00 GMT")
+ _, _ = rw.Write([]byte(`"$ref": "../file2.yaml#/components/schemas/Pet"`))
+ return
+ }
+
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 12:28:00 GMT")
+
+ if req.URL.String() == "/deeper/list.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "../file2.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bag/list.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "pocket/list.yaml"\n\n"$ref": "zip/things.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bag/pocket/list.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "../list.yaml"\n\n"$ref": "../../file2.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bag/pocket/things.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "list.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bag/zip/things.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "list.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bag/zip/list.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "../list.yaml"\n\n"$ref": "../../file1.yaml"\n\n"$ref": "more.yaml""`))
+ return
+ }
+
+ if req.URL.String() == "/bag/zip/more.yaml" {
+ _, _ = rw.Write([]byte(`"$ref": "../../deeper/list.yaml"\n\n"$ref": "../../bad.yaml"`))
+ return
+ }
+
+ if req.URL.String() == "/bad.yaml" {
+ rw.WriteHeader(http.StatusInternalServerError)
+ _, _ = rw.Write([]byte(`"error, cannot do the thing"`))
+ return
+ }
+
+ _, _ = rw.Write([]byte(`OK`))
+ }))
+}
+
+func TestNewRemoteFS_BasicCheck(t *testing.T) {
+
+ server := test_buildServer()
+ defer server.Close()
+
+ //remoteFS := NewRemoteFS("https://raw.githubusercontent.com/digitalocean/openapi/main/specification/")
+ remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
+ remoteFS.RemoteHandlerFunc = test_httpClient.Get
+
+ file, err := remoteFS.Open("/file1.yaml")
+
+ assert.NoError(t, err)
+
+ bytes, rErr := io.ReadAll(file)
+ assert.NoError(t, rErr)
+
+ stat, _ := file.Stat()
+
+ assert.Equal(t, "/file1.yaml", stat.Name())
+ assert.Equal(t, int64(53), stat.Size())
+ assert.Len(t, bytes, 53)
+
+ lastMod := stat.ModTime()
+ assert.Equal(t, "2015-10-21 07:28:00 +0000 GMT", lastMod.String())
+}
+
+func TestNewRemoteFS_BasicCheck_NoScheme(t *testing.T) {
+
+ server := test_buildServer()
+ defer server.Close()
+
+ remoteFS, _ := NewRemoteFSWithRootURL("")
+ remoteFS.RemoteHandlerFunc = test_httpClient.Get
+
+ file, err := remoteFS.Open("/file1.yaml")
+
+ assert.NoError(t, err)
+ assert.Nil(t, file)
+}
+
+func TestNewRemoteFS_BasicCheck_Relative(t *testing.T) {
+
+ server := test_buildServer()
+ defer server.Close()
+
+ remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
+ remoteFS.RemoteHandlerFunc = test_httpClient.Get
+
+ file, err := remoteFS.Open("/deeper/file2.yaml")
+
+ assert.NoError(t, err)
+
+ bytes, rErr := io.ReadAll(file)
+ assert.NoError(t, rErr)
+
+ assert.Len(t, bytes, 64)
+
+ stat, _ := file.Stat()
+
+ assert.Equal(t, "/deeper/file2.yaml", stat.Name())
+ assert.Equal(t, int64(64), stat.Size())
+
+ lastMod := stat.ModTime()
+ assert.Equal(t, "2015-10-21 08:28:00 +0000 GMT", lastMod.String())
+}
+
+func TestNewRemoteFS_BasicCheck_Relative_Deeper(t *testing.T) {
+
+ server := test_buildServer()
+ defer server.Close()
+
+ cf := CreateOpenAPIIndexConfig()
+ u, _ := url.Parse(server.URL)
+ cf.BaseURL = u
+
+ remoteFS, _ := NewRemoteFSWithConfig(cf)
+ remoteFS.RemoteHandlerFunc = test_httpClient.Get
+
+ file, err := remoteFS.Open("/deeper/even_deeper/file3.yaml")
+
+ assert.NoError(t, err)
+
+ bytes, rErr := io.ReadAll(file)
+ assert.NoError(t, rErr)
+
+ assert.Len(t, bytes, 47)
+
+ stat, _ := file.Stat()
+
+ assert.Equal(t, "/deeper/even_deeper/file3.yaml", stat.Name())
+ assert.Equal(t, int64(47), stat.Size())
+ assert.Equal(t, "/deeper/even_deeper/file3.yaml", file.(*RemoteFile).Name())
+ assert.Equal(t, "file3.yaml", file.(*RemoteFile).GetFileName())
+ assert.Len(t, file.(*RemoteFile).GetContent(), 47)
+ assert.Equal(t, YAML, file.(*RemoteFile).GetFileExtension())
+ assert.NotNil(t, file.(*RemoteFile).GetLastModified())
+ assert.Len(t, file.(*RemoteFile).GetErrors(), 0)
+ assert.Equal(t, "/deeper/even_deeper/file3.yaml", file.(*RemoteFile).GetFullPath())
+ assert.False(t, file.(*RemoteFile).IsDir())
+ assert.Nil(t, file.(*RemoteFile).Sys())
+ assert.Nil(t, file.(*RemoteFile).Close())
+
+ lastMod := stat.ModTime()
+ assert.Equal(t, "2015-10-21 10:28:00 +0000 GMT", lastMod.String())
+}
+
+func TestRemoteFile_NoContent(t *testing.T) {
+
+ rf := &RemoteFile{}
+ x, y := rf.GetContentAsYAMLNode()
+ assert.Nil(t, x)
+ assert.Error(t, y)
+}
+
+func TestRemoteFile_BadContent(t *testing.T) {
+
+ rf := &RemoteFile{data: []byte("bad: data: on: a single: line: makes: for: unhappy: yaml"), index: &SpecIndex{}}
+ x, y := rf.GetContentAsYAMLNode()
+ assert.Nil(t, x)
+ assert.Error(t, y)
+}
+
+func TestRemoteFile_GoodContent(t *testing.T) {
+
+ rf := &RemoteFile{data: []byte("good: data"), index: &SpecIndex{}}
+ x, y := rf.GetContentAsYAMLNode()
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+ assert.NotNil(t, rf.index.root)
+
+ // bad read
+ rf.offset = -1
+ d, err := io.ReadAll(rf)
+ assert.Empty(t, d)
+ assert.Error(t, err)
+
+}
+
+func TestRemoteFile_Index_AlreadySet(t *testing.T) {
+
+ rf := &RemoteFile{data: []byte("good: data"), index: &SpecIndex{}}
+ x, y := rf.Index(&SpecIndexConfig{})
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+
+}
+
+func TestRemoteFile_Index_BadContent(t *testing.T) {
+
+ rf := &RemoteFile{data: []byte("no: sleep: until: the bugs: weep")}
+ x, y := rf.Index(&SpecIndexConfig{})
+ assert.Nil(t, x)
+ assert.Error(t, y)
+
+}
+
+func TestRemoteFS_NoConfig(t *testing.T) {
+
+ x, y := NewRemoteFSWithConfig(nil)
+ assert.Nil(t, x)
+ assert.Error(t, y)
+
+}
+
+func TestRemoteFS_SetRemoteHandler(t *testing.T) {
+
+ h := func(url string) (*http.Response, error) {
+ return nil, errors.New("nope")
+ }
+ cf := CreateClosedAPIIndexConfig()
+ cf.RemoteURLHandler = h
+
+ x, y := NewRemoteFSWithConfig(cf)
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+ assert.NotNil(t, x.RemoteHandlerFunc)
+
+ assert.NotNil(t, x.RemoteHandlerFunc)
+
+ x.SetRemoteHandlerFunc(h)
+ assert.NotNil(t, x.RemoteHandlerFunc)
+
+ // run the handler
+ i, n := x.RemoteHandlerFunc("http://www.google.com")
+ assert.Nil(t, i)
+ assert.Error(t, n)
+ assert.Equal(t, "nope", n.Error())
+
+}
+
+func TestRemoteFS_NoConfigBadURL(t *testing.T) {
+ x, y := NewRemoteFSWithRootURL("I am not a URL. I am a potato.: no.... // no.")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+}
+
+func TestNewRemoteFS_Open_NoConfig(t *testing.T) {
+
+ rfs := &RemoteFS{}
+ x, y := rfs.Open("https://pb33f.io")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+
+}
+
+func TestNewRemoteFS_Open_ConfigNotAllowed(t *testing.T) {
+
+ rfs := &RemoteFS{indexConfig: CreateClosedAPIIndexConfig()}
+ x, y := rfs.Open("https://pb33f.io")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+
+}
+
+func TestNewRemoteFS_Open_BadURL(t *testing.T) {
+
+ rfs := &RemoteFS{indexConfig: CreateOpenAPIIndexConfig()}
+ x, y := rfs.Open("I am not a URL. I am a box of candy.. yum yum yum:: in my tum tum tum")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+
+}
+
+func TestNewRemoteFS_RemoteBaseURL_RelativeRequest(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ h := func(url string) (*http.Response, error) {
+ return nil, fmt.Errorf("nope, not having it %s", url)
+ }
+ cf.RemoteURLHandler = h
+
+ cf.BaseURL, _ = url.Parse("https://pb33f.io/the/love/machine")
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("gib/gab/jib/jab.yaml")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "nope, not having it https://pb33f.io/the/love/machine/gib/gab/jib/jab.yaml", y.Error())
+
+}
+
+func TestNewRemoteFS_RemoteBaseURL_BadRequestButContainsBody(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ h := func(url string) (*http.Response, error) {
+ return &http.Response{}, fmt.Errorf("it's bad, but who cares %s", url)
+ }
+ cf.RemoteURLHandler = h
+
+ cf.BaseURL, _ = url.Parse("https://pb33f.io/the/love/machine")
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("/woof.yaml")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "it's bad, but who cares https://pb33f.io/woof.yaml", y.Error())
+
+}
+
+func TestNewRemoteFS_RemoteBaseURL_NoErrorNoResponse(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ h := func(url string) (*http.Response, error) {
+ return nil, nil // useless!
+ }
+ cf.RemoteURLHandler = h
+
+ cf.BaseURL, _ = url.Parse("https://pb33f.io/the/love/machine")
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("/woof.yaml")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "empty response from remote URL: https://pb33f.io/woof.yaml", y.Error())
+}
+
+func TestNewRemoteFS_RemoteBaseURL_ReadBodyFail(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ h := func(url string) (*http.Response, error) {
+ r := &http.Response{}
+ r.Body = &LocalFile{offset: -1} // read will fail.
+ return r, nil
+ }
+ cf.RemoteURLHandler = h
+
+ cf.BaseURL, _ = url.Parse("https://pb33f.io/the/love/machine")
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("/woof.yaml")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "error reading bytes from remote file 'https://pb33f.io/woof.yaml': "+
+ "[read : invalid argument]", y.Error())
+}
+
+func TestNewRemoteFS_RemoteBaseURL_EmptySpecFailIndex(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ h := func(url string) (*http.Response, error) {
+ r := &http.Response{}
+ r.Body = &LocalFile{data: []byte{}} // no bytes to read.
+ return r, nil
+ }
+ cf.RemoteURLHandler = h
+
+ cf.BaseURL, _ = url.Parse("https://pb33f.io/the/love/machine")
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("/woof.yaml")
+ assert.NotNil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "there is nothing in the spec, it's empty - so there is nothing to be done", y.Error())
+}
+
+func TestNewRemoteFS_Unsupported(t *testing.T) {
+
+ cf := CreateOpenAPIIndexConfig()
+ rfs, _ := NewRemoteFSWithConfig(cf)
+
+ x, y := rfs.Open("/woof.png")
+ assert.Nil(t, x)
+ assert.Error(t, y)
+ assert.Equal(t, "open /woof.png: invalid argument", y.Error())
+}
diff --git a/index/rolodex_test.go b/index/rolodex_test.go
new file mode 100644
index 0000000..f45c181
--- /dev/null
+++ b/index/rolodex_test.go
@@ -0,0 +1,1641 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
+ "io"
+ "io/fs"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "os"
+ "strings"
+ "testing"
+ "testing/fstest"
+ "time"
+)
+
+func TestRolodex_NewRolodex(t *testing.T) {
+ c := CreateOpenAPIIndexConfig()
+ rolo := NewRolodex(c)
+ assert.NotNil(t, rolo)
+ assert.NotNil(t, rolo.indexConfig)
+ assert.Nil(t, rolo.GetIgnoredCircularReferences())
+ assert.Equal(t, rolo.GetIndexingDuration(), time.Duration(0))
+ assert.Nil(t, rolo.GetRootIndex())
+ assert.Len(t, rolo.GetIndexes(), 0)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+}
+
+func TestRolodex_NoFS(t *testing.T) {
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rf, err := rolo.Open("spec.yaml")
+ assert.Error(t, err)
+ assert.Equal(t, "rolodex has no file systems configured, cannot open 'spec.yaml'. "+
+ "Add a BaseURL or BasePath to your configuration so the rolodex knows how to resolve references", err.Error())
+ assert.Nil(t, rf)
+
+}
+
+func TestRolodex_LocalNativeFS(t *testing.T) {
+
+ t.Parallel()
+ testFS := fstest.MapFS{
+ "spec.yaml": {Data: []byte("hip"), ModTime: time.Now()},
+ "subfolder/spec1.json": {Data: []byte("hop"), ModTime: time.Now()},
+ "subfolder2/spec2.yaml": {Data: []byte("chop"), ModTime: time.Now()},
+ "subfolder2/hello.jpg": {Data: []byte("shop"), ModTime: time.Now()},
+ }
+
+ baseDir := "/tmp"
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: baseDir,
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ })),
+ DirFS: testFS,
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddLocalFS(baseDir, fileFS)
+
+ f, rerr := rolo.Open("spec.yaml")
+ assert.NoError(t, rerr)
+ assert.Equal(t, "hip", f.GetContent())
+
+}
+
+func TestRolodex_LocalNonNativeFS(t *testing.T) {
+
+ t.Parallel()
+ testFS := fstest.MapFS{
+ "spec.yaml": {Data: []byte("hip"), ModTime: time.Now()},
+ "subfolder/spec1.json": {Data: []byte("hop"), ModTime: time.Now()},
+ "subfolder2/spec2.yaml": {Data: []byte("chop"), ModTime: time.Now()},
+ "subfolder2/hello.jpg": {Data: []byte("shop"), ModTime: time.Now()},
+ }
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddLocalFS(baseDir, testFS)
+
+ f, rerr := rolo.Open("spec.yaml")
+ assert.NoError(t, rerr)
+
+ assert.Equal(t, "hip", f.GetContent())
+}
+
+type test_badfs struct {
+ ok bool
+ goodstat bool
+ offset int64
+}
+
+func (t *test_badfs) Open(v string) (fs.File, error) {
+ ok := false
+ if v != "/" && v != "." && v != "http://localhost/test.yaml" {
+ ok = true
+ }
+ if v == "http://localhost/goodstat.yaml" || strings.HasSuffix(v, "goodstat.yaml") {
+ ok = true
+ t.goodstat = true
+ }
+ if v == "http://localhost/badstat.yaml" || v == "badstat.yaml" {
+ ok = true
+ t.goodstat = false
+ }
+ return &test_badfs{ok: ok, goodstat: t.goodstat}, nil
+}
+func (t *test_badfs) Stat() (fs.FileInfo, error) {
+ if t.goodstat {
+ return &LocalFile{
+ lastModified: time.Now(),
+ }, nil
+ }
+ return nil, os.ErrInvalid
+}
+func (t *test_badfs) Read(b []byte) (int, error) {
+ if t.ok {
+ if t.offset >= int64(len("pizza")) {
+ return 0, io.EOF
+ }
+ if t.offset < 0 {
+ return 0, &fs.PathError{Op: "read", Path: "lemons", Err: fs.ErrInvalid}
+ }
+ n := copy(b, "pizza"[t.offset:])
+ t.offset += int64(n)
+ return n, nil
+ }
+ return 0, os.ErrNotExist
+}
+func (t *test_badfs) Close() error {
+ return os.ErrNotExist
+}
+
+func TestRolodex_LocalNonNativeFS_BadRead(t *testing.T) {
+
+ t.Parallel()
+ testFS := &test_badfs{}
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddLocalFS(baseDir, testFS)
+
+ f, rerr := rolo.Open("/")
+ assert.Nil(t, f)
+ assert.Error(t, rerr)
+ assert.Equal(t, "file does not exist", rerr.Error())
+
+}
+
+func TestRolodex_LocalNonNativeFS_BadStat(t *testing.T) {
+
+ t.Parallel()
+ testFS := &test_badfs{}
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddLocalFS(baseDir, testFS)
+
+ f, rerr := rolo.Open("badstat.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, rerr)
+ assert.Equal(t, "invalid argument", rerr.Error())
+
+}
+
+func TestRolodex_LocalNonNativeRemoteFS_BadRead(t *testing.T) {
+
+ t.Parallel()
+ testFS := &test_badfs{}
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddRemoteFS(baseDir, testFS)
+
+ f, rerr := rolo.Open("http://localhost/test.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, rerr)
+ assert.Equal(t, "file does not exist", rerr.Error())
+}
+
+func TestRolodex_LocalNonNativeRemoteFS_ReadFile(t *testing.T) {
+
+ t.Parallel()
+ testFS := &test_badfs{}
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddRemoteFS(baseDir, testFS)
+
+ r, rerr := rolo.Open("http://localhost/goodstat.yaml")
+ assert.NotNil(t, r)
+ assert.NoError(t, rerr)
+
+ assert.Equal(t, "goodstat.yaml", r.Name())
+ assert.Nil(t, r.GetIndex())
+ assert.Equal(t, "pizza", r.GetContent())
+ assert.Equal(t, "http://localhost/goodstat.yaml", r.GetFullPath())
+ assert.Equal(t, time.Now().UnixMilli(), r.ModTime().UnixMilli())
+ assert.Equal(t, int64(5), r.Size())
+ assert.False(t, r.IsDir())
+ assert.Nil(t, r.Sys())
+ assert.Equal(t, r.Mode(), os.FileMode(0))
+ n, e := r.GetContentAsYAMLNode()
+ assert.Len(t, r.GetErrors(), 0)
+ assert.NoError(t, e)
+ assert.NotNil(t, n)
+ assert.Equal(t, YAML, r.GetFileExtension())
+}
+
+func TestRolodex_LocalNonNativeRemoteFS_BadStat(t *testing.T) {
+
+ t.Parallel()
+ testFS := &test_badfs{}
+
+ baseDir := ""
+
+ rolo := NewRolodex(CreateOpenAPIIndexConfig())
+ rolo.AddRemoteFS(baseDir, testFS)
+
+ f, rerr := rolo.Open("http://localhost/badstat.yaml")
+ assert.Nil(t, f)
+ assert.Error(t, rerr)
+ assert.Equal(t, "invalid argument", rerr.Error())
+
+}
+
+func TestRolodex_rolodexFileTests(t *testing.T) {
+ r := &rolodexFile{}
+ assert.Equal(t, "", r.Name())
+ assert.Nil(t, r.GetIndex())
+ assert.Equal(t, "", r.GetContent())
+ assert.Equal(t, "", r.GetFullPath())
+ assert.Equal(t, time.Now().UnixMilli(), r.ModTime().UnixMilli())
+ assert.Equal(t, int64(0), r.Size())
+ assert.False(t, r.IsDir())
+ assert.Nil(t, r.Sys())
+ assert.Equal(t, r.Mode(), os.FileMode(0))
+ n, e := r.GetContentAsYAMLNode()
+ assert.Len(t, r.GetErrors(), 0)
+ assert.NoError(t, e)
+ assert.Nil(t, n)
+ assert.Equal(t, UNSUPPORTED, r.GetFileExtension())
+}
+
+func TestRolodex_NotRolodexFS(t *testing.T) {
+
+ nonRoloFS := os.DirFS(".")
+ cf := CreateOpenAPIIndexConfig()
+ rolo := NewRolodex(cf)
+ rolo.AddLocalFS(".", nonRoloFS)
+
+ err := rolo.IndexTheRolodex()
+
+ assert.Error(t, err)
+ assert.Equal(t, "rolodex file system is not a RolodexFS", err.Error())
+
+}
+
+func TestRolodex_IndexCircularLookup(t *testing.T) {
+
+ offToOz := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ $ref: "../test_specs/circular-tests.yaml#/components/schemas/One"`
+
+ _ = os.WriteFile("off_to_oz.yaml", []byte(offToOz), 0644)
+ defer os.Remove("off_to_oz.yaml")
+
+ baseDir := "../"
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "off_to_oz.yaml",
+ "test_specs/circular-tests.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ rolodex := NewRolodex(cf)
+ rolodex.AddLocalFS(baseDir, fileFS)
+ err = rolodex.IndexTheRolodex()
+ assert.Error(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 3)
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 0)
+}
+
+func TestRolodex_IndexCircularLookup_AroundWeGo(t *testing.T) {
+
+ there := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: object
+ required:
+ - where
+ properties:
+ where:
+ $ref: "back-again.yaml#/components/schemas/CircleTest/properties/muffins"`
+
+ backagain := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ $ref: "there.yaml#/components/schemas/CircleTest"`
+
+ _ = os.WriteFile("there.yaml", []byte(there), 0644)
+ _ = os.WriteFile("back-again.yaml", []byte(backagain), 0644)
+ defer os.Remove("there.yaml")
+ defer os.Remove("back-again.yaml")
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "there.yaml",
+ "back-again.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ rolodex := NewRolodex(cf)
+ rolodex.AddLocalFS(baseDir, fileFS)
+ err = rolodex.IndexTheRolodex()
+ assert.Error(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 1)
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 0)
+}
+
+func TestRolodex_IndexCircularLookup_AroundWeGo_IgnorePoly(t *testing.T) {
+
+ fifth := "type: string"
+
+ fourth := `type: "object"
+properties:
+ name:
+ type: "string"
+ children:
+ type: "object"`
+
+ third := `type: "object"
+properties:
+ name:
+ $ref: "http://the-space-race-is-all-about-space-and-time-dot.com/fourth.yaml"
+ tame:
+ $ref: "http://the-space-race-is-all-about-space-and-time-dot.com/fifth.yaml#/"
+ blame:
+ $ref: "fifth.yaml"
+
+ fame:
+ $ref: "$PWD/fourth.yaml#/properties/name"
+ game:
+ $ref: "$PWD/fifth.yaml"
+
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "second.yaml#/components/schemas/CircleTest"
+required:
+ - children`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "third.yaml"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ $ref: "second.yaml#/components/schemas/CircleTest"`
+
+ cwd, _ := os.Getwd()
+
+ _ = os.WriteFile("third.yaml", []byte(strings.ReplaceAll(third, "$PWD", cwd)), 0644)
+ _ = os.WriteFile("second.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first.yaml", []byte(first), 0644)
+ _ = os.WriteFile("fourth.yaml", []byte(fourth), 0644)
+ _ = os.WriteFile("fifth.yaml", []byte(fifth), 0644)
+ defer os.Remove("first.yaml")
+ defer os.Remove("second.yaml")
+ defer os.Remove("third.yaml")
+ defer os.Remove("fourth.yaml")
+ defer os.Remove("fifth.yaml")
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first.yaml",
+ "second.yaml",
+ "third.yaml",
+ "fourth.yaml",
+ "fifth.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.IgnorePolymorphicCircularReferences = true
+ cf.SkipDocumentCheck = true
+ rolodex := NewRolodex(cf)
+ rolodex.AddLocalFS(baseDir, fileFS)
+
+ srv := test_rolodexDeepRefServer([]byte(first), []byte(second),
+ []byte(strings.ReplaceAll(third, "$PWD", cwd)), []byte(fourth), []byte(fifth))
+ defer srv.Close()
+
+ u, _ := url.Parse(srv.URL)
+ cf.BaseURL = u
+ remoteFS, rErr := NewRemoteFSWithConfig(cf)
+ assert.NoError(t, rErr)
+
+ rolodex.AddRemoteFS(srv.URL, remoteFS)
+
+ err = rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ // there are two circles. Once when reading the journey from first.yaml, and then a second internal look in second.yaml
+ // the index won't find three, because by the time that 'three' has been read, it's already been indexed and the journey
+ // discovered.
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 2)
+
+ // extract a local file
+ f, _ := rolodex.Open("first.yaml")
+ // index
+ x, y := f.(*rolodexFile).Index(cf)
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+
+ // re-index
+ x, y = f.(*rolodexFile).Index(cf)
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+
+ // extract a remote file
+ f, _ = rolodex.Open("http://the-space-race-is-all-about-space-and-time-dot.com/fourth.yaml")
+
+ // index
+ x, y = f.(*rolodexFile).Index(cf)
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+
+ // re-index
+ x, y = f.(*rolodexFile).Index(cf)
+ assert.NotNil(t, x)
+ assert.NoError(t, y)
+
+ // extract another remote file
+ f, _ = rolodex.Open("http://the-space-race-is-all-about-space-and-time-dot.com/fifth.yaml")
+
+ //change cf to perform document check (which should fail)
+ cf.SkipDocumentCheck = false
+
+ // index and fail
+ x, y = f.(*rolodexFile).Index(cf)
+ assert.Nil(t, x)
+ assert.Error(t, y)
+}
+
+func test_rolodexDeepRefServer(a, b, c, d, e []byte) *httptest.Server {
+ return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 12:28:00 GMT")
+ if strings.HasSuffix(req.URL.String(), "/first.yaml") {
+ _, _ = rw.Write(a)
+ return
+ }
+ if strings.HasSuffix(req.URL.String(), "/second.yaml") {
+ _, _ = rw.Write(b)
+ return
+ }
+ if strings.HasSuffix(req.URL.String(), "/third.yaml") {
+ _, _ = rw.Write(c)
+ return
+ }
+ if strings.HasSuffix(req.URL.String(), "/fourth.yaml") {
+ _, _ = rw.Write(d)
+ return
+ }
+ if strings.HasSuffix(req.URL.String(), "/fifth.yaml") {
+ _, _ = rw.Write(e)
+ return
+ }
+ rw.WriteHeader(http.StatusInternalServerError)
+ rw.Write([]byte("500 - COMPUTAR SAYS NO!"))
+ }))
+}
+
+func TestRolodex_IndexCircularLookup_PolyItems_LocalLoop_WithFiles_RecursiveLookup(t *testing.T) {
+
+ fourth := `type: "object"
+properties:
+ name:
+ type: "string"
+ children:
+ type: "object"`
+
+ third := `type: "object"
+properties:
+ name:
+ $ref: "http://the-space-race-is-all-about-space-and-time-dot.com/fourth.yaml"`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ bing:
+ $ref: "not_found.yaml"
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "third.yaml"
+ required:
+ - "name"
+ - "children"`
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ $ref: "second_n.yaml#/components/schemas/CircleTest"`
+
+ cwd, _ := os.Getwd()
+
+ _ = os.WriteFile("third_n.yaml", []byte(strings.ReplaceAll(third, "$PWD", cwd)), 0644)
+ _ = os.WriteFile("second_n.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first_n.yaml", []byte(first), 0644)
+ _ = os.WriteFile("fourth_n.yaml", []byte(fourth), 0644)
+ defer os.Remove("first_n.yaml")
+ defer os.Remove("second_n.yaml")
+ defer os.Remove("third_n.yaml")
+ defer os.Remove("fourth_n.yaml")
+
+ baseDir := "."
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.IgnorePolymorphicCircularReferences = true
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ IndexConfig: cf,
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolodex := NewRolodex(cf)
+ rolodex.AddLocalFS(baseDir, fileFS)
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+ rolodex.SetRootNode(&rootNode)
+
+ srv := test_rolodexDeepRefServer([]byte(first), []byte(second),
+ []byte(strings.ReplaceAll(third, "$PWD", cwd)), []byte(fourth), nil)
+ defer srv.Close()
+
+ u, _ := url.Parse(srv.URL)
+ cf.BaseURL = u
+ remoteFS, rErr := NewRemoteFSWithConfig(cf)
+ assert.NoError(t, rErr)
+
+ rolodex.AddRemoteFS(srv.URL, remoteFS)
+
+ err = rolodex.IndexTheRolodex()
+ assert.Error(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 2)
+}
+
+func TestRolodex_IndexCircularLookup_PolyItems_LocalLoop_WithFiles(t *testing.T) {
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ items:
+ $ref: "second_a.yaml#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "#/components/schemas/CircleTest"`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ items:
+ $ref: "#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "#/components/schemas/CircleTest"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+
+ _ = os.WriteFile("second_a.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first_a.yaml", []byte(first), 0644)
+ defer os.Remove("first_a.yaml")
+ defer os.Remove("second_a.yaml")
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first_a.yaml",
+ "second_a.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolodex.AddLocalFS(baseDir, fileFS)
+ rolodex.SetRootNode(&rootNode)
+ assert.NotNil(t, rolodex.GetRootNode())
+
+ err = rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ // multiple loops across two files
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+}
+
+func TestRolodex_IndexCircularLookup_PolyItems_LocalLoop_BuildIndexesPost(t *testing.T) {
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ items:
+ $ref: "second_d.yaml#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "#/components/schemas/CircleTest"`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ items:
+ $ref: "#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "#/components/schemas/CircleTest"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+
+ _ = os.WriteFile("second_d.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first_d.yaml", []byte(first), 0644)
+ defer os.Remove("first_d.yaml")
+ defer os.Remove("second_d.yaml")
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ cf.AvoidBuildIndex = true
+ rolodex := NewRolodex(cf)
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first_d.yaml",
+ "second_d.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolodex.AddLocalFS(baseDir, fileFS)
+ rolodex.SetRootNode(&rootNode)
+
+ err = rolodex.IndexTheRolodex()
+ rolodex.BuildIndexes()
+
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ // multiple loops across two files
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+
+ // trigger a rebuild, should do nothing.
+ rolodex.BuildIndexes()
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_IndexCircularLookup_ArrayItems_LocalLoop_WithFiles(t *testing.T) {
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "second_b.yaml#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: array
+ items:
+ $ref: "#/components/schemas/CircleTest"`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: array
+ items:
+ $ref: "#/components/schemas/CircleTest"
+ required:
+ - "name"
+ - "children"
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: array
+ items:
+ $ref: "#/components/schemas/CircleTest"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+
+ _ = os.WriteFile("second_b.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first_b.yaml", []byte(first), 0644)
+ defer os.Remove("first_b.yaml")
+ defer os.Remove("second_b.yaml")
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnoreArrayCircularReferences = true
+ rolodex := NewRolodex(cf)
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first_b.yaml",
+ "second_b.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolodex.AddLocalFS(baseDir, fileFS)
+ rolodex.SetRootNode(&rootNode)
+
+ err = rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ // multiple loops across two files
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+}
+
+func TestRolodex_IndexCircularLookup_PolyItemsHttpOnly(t *testing.T) {
+
+ third := `type: string`
+ fourth := `components:
+ schemas:
+ Chicken:
+ type: string`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ Loopy:
+ type: "object"
+ properties:
+ cake:
+ type: "string"
+ anyOf:
+ items:
+ $ref: "https://I-love-a-good-cake-and-pizza.com/third.yaml"
+ pizza:
+ type: "string"
+ anyOf:
+ items:
+ $ref: "third.yaml"
+ same:
+ type: "string"
+ oneOf:
+ items:
+ $ref: "https://kjahsdkjahdkjashdas.com/fourth.yaml#/components/schemas/Chicken"
+ name:
+ type: "string"
+ oneOf:
+ items:
+ $ref: "https://kjahsdkjahdkjashdas.com/third.yaml#/"
+ children:
+ type: "object"
+ allOf:
+ items:
+ $ref: "first.yaml#/components/schemas/StartTest"
+ required:
+ - "name"
+ - "children"
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ oneOf:
+ items:
+ $ref: "#/components/schemas/Loopy"
+ required:
+ - "name"
+ - "children"`
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ chuffins:
+ type: object
+ allOf:
+ - $ref: "https://kjahsdkjahdkjashdas.com/third.yaml"
+ buffins:
+ type: object
+ allOf:
+ - $ref: "https://kjahsdkjahdkjashdas.com/second.yaml#/"
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "https://kjahsdkjahdkjashdas.com/second.yaml#/components/schemas/CircleTest"
+`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+
+ srv := test_rolodexDeepRefServer([]byte(first), []byte(second), []byte(third), []byte(fourth), nil)
+ defer srv.Close()
+
+ u, _ := url.Parse(srv.URL)
+ cf.BaseURL = u
+ remoteFS, rErr := NewRemoteFSWithConfig(cf)
+ assert.NoError(t, rErr)
+
+ rolodex.AddRemoteFS(srv.URL, remoteFS)
+ rolodex.SetRootNode(&rootNode)
+
+ err := rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ assert.GreaterOrEqual(t, len(rolodex.GetIgnoredCircularReferences()), 1)
+ assert.Equal(t, rolodex.GetRootIndex().GetResolver().GetIndexesVisited(), 6)
+}
+
+func TestRolodex_IndexCircularLookup_PolyItemsFileOnly_LocalIncluded(t *testing.T) {
+
+ third := `type: string`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ LoopyMcLoopFace:
+ type: "object"
+ properties:
+ hoop:
+ type: object
+ allOf:
+ items:
+ $ref: "third_c.yaml"
+ boop:
+ type: object
+ allOf:
+ items:
+ $ref: "$PWD/third_c.yaml"
+ loop:
+ type: object
+ oneOf:
+ items:
+ $ref: "#/components/schemas/LoopyMcLoopFace"
+ CircleTest:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "#/components/schemas/LoopyMcLoopFace"
+ required:
+ - "name"
+ - "children"`
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "second_c.yaml#/components/schemas/CircleTest"
+ - $ref: "$PWD/third_c.yaml"`
+
+ var rootNode yaml.Node
+ cws, _ := os.Getwd()
+
+ _ = yaml.Unmarshal([]byte(strings.ReplaceAll(first, "$PWD", cws)), &rootNode)
+ _ = os.WriteFile("second_c.yaml", []byte(strings.ReplaceAll(second, "$PWD", cws)), 0644)
+ _ = os.WriteFile("first_c.yaml", []byte(strings.ReplaceAll(first, "$PWD", cws)), 0644)
+ _ = os.WriteFile("third_c.yaml", []byte(third), 0644)
+
+ defer os.Remove("first_c.yaml")
+ defer os.Remove("second_c.yaml")
+ defer os.Remove("third_c.yaml")
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first_c.yaml",
+ "second_c.yaml",
+ "third_c.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolodex.AddLocalFS(baseDir, fileFS)
+ rolodex.SetRootNode(&rootNode)
+
+ err = rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+
+ // should only be a single loop.
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+}
+
+func TestRolodex_TestDropDownToRemoteFS_CatchErrors(t *testing.T) {
+
+ fourth := `type: "object"
+properties:
+ name:
+ type: "string"
+ children:
+ type: "object"`
+
+ third := `type: "object"
+properties:
+ name:
+ $ref: "http://the-space-race-is-all-about-space-and-time-dot.com/fourth.yaml"`
+
+ second := `openapi: 3.1.0
+components:
+ schemas:
+ CircleTest:
+ type: "object"
+ properties:
+ bing:
+ $ref: "not_found.yaml"
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "third.yaml"
+ required:
+ - "name"
+ - "children"`
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ $ref: "second_e.yaml#/components/schemas/CircleTest"`
+
+ cwd, _ := os.Getwd()
+
+ _ = os.WriteFile("third_e.yaml", []byte(strings.ReplaceAll(third, "$PWD", cwd)), 0644)
+ _ = os.WriteFile("second_e.yaml", []byte(second), 0644)
+ _ = os.WriteFile("first_e.yaml", []byte(first), 0644)
+ _ = os.WriteFile("fourth_e.yaml", []byte(fourth), 0644)
+ defer os.Remove("first_e.yaml")
+ defer os.Remove("second_e.yaml")
+ defer os.Remove("third_e.yaml")
+ defer os.Remove("fourth_e.yaml")
+
+ baseDir := "."
+
+ fsCfg := &LocalFSConfig{
+ BaseDirectory: baseDir,
+ DirFS: os.DirFS(baseDir),
+ FileFilters: []string{
+ "first_e.yaml",
+ "second_e.yaml",
+ "third_e.yaml",
+ "fourth_e.yaml",
+ },
+ }
+
+ fileFS, err := NewLocalFSWithConfig(fsCfg)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+ rolodex.AddLocalFS(baseDir, fileFS)
+
+ srv := test_rolodexDeepRefServer([]byte(first), []byte(second),
+ []byte(strings.ReplaceAll(third, "$PWD", cwd)), []byte(fourth), nil)
+ defer srv.Close()
+
+ u, _ := url.Parse(srv.URL)
+ cf.BaseURL = u
+ remoteFS, rErr := NewRemoteFSWithConfig(cf)
+ assert.NoError(t, rErr)
+
+ rolodex.AddRemoteFS(srv.URL, remoteFS)
+
+ err = rolodex.IndexTheRolodex()
+ assert.Error(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 2)
+}
+
+func TestRolodex_IndexCircularLookup_LookupHttpNoBaseURL(t *testing.T) {
+
+ first := `openapi: 3.1.0
+components:
+ schemas:
+ StartTest:
+ type: object
+ required:
+ - muffins
+ properties:
+ muffins:
+ type: object
+ anyOf:
+ - $ref: "https://raw.githubusercontent.com/pb33f/libopenapi/main/test_specs/circular-tests.yaml#/components/schemas/One"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(first), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+
+ remoteFS, rErr := NewRemoteFSWithConfig(cf)
+ assert.NoError(t, rErr)
+
+ rolodex.AddRemoteFS("", remoteFS)
+ rolodex.SetRootNode(&rootNode)
+
+ err := rolodex.IndexTheRolodex()
+ assert.Error(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 1)
+}
+
+func TestRolodex_IndexCircularLookup_ignorePoly(t *testing.T) {
+
+ spinny := `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ anyOf:
+ - $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(spinny), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnorePolymorphicCircularReferences = true
+ rolodex := NewRolodex(cf)
+ rolodex.SetRootNode(&rootNode)
+ err := rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+}
+
+func TestRolodex_IndexCircularLookup_ignoreArray(t *testing.T) {
+
+ spinny := `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(spinny), &rootNode)
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.IgnoreArrayCircularReferences = true
+ rolodex := NewRolodex(cf)
+ rolodex.SetRootNode(&rootNode)
+ err := rolodex.IndexTheRolodex()
+ assert.NoError(t, err)
+ assert.Len(t, rolodex.GetCaughtErrors(), 0)
+ assert.Len(t, rolodex.GetIgnoredCircularReferences(), 1)
+}
+
+func TestRolodex_SimpleTest_OneDoc(t *testing.T) {
+
+ baseDir := "rolodex_test_data"
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: baseDir,
+ Logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ })),
+ DirFS: os.DirFS(baseDir),
+ })
+
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.IgnoreArrayCircularReferences = true
+ cf.IgnorePolymorphicCircularReferences = true
+
+ rolo := NewRolodex(cf)
+ rolo.AddLocalFS(baseDir, fileFS)
+
+ err = rolo.IndexTheRolodex()
+
+ assert.NotZero(t, rolo.GetIndexingDuration())
+ assert.Nil(t, rolo.GetRootIndex())
+ assert.Len(t, rolo.GetIndexes(), 9)
+
+ assert.NoError(t, err)
+ assert.Len(t, rolo.indexes, 9)
+
+ // open components.yaml
+ f, rerr := rolo.Open("components.yaml")
+ assert.NoError(t, rerr)
+ assert.Equal(t, "components.yaml", f.Name())
+
+ idx, ierr := f.(*rolodexFile).Index(cf)
+ assert.NoError(t, ierr)
+ assert.NotNil(t, idx)
+ assert.Equal(t, YAML, f.GetFileExtension())
+ assert.True(t, strings.HasSuffix(f.GetFullPath(), "rolodex_test_data/components.yaml"))
+ assert.NotNil(t, f.ModTime())
+ assert.Equal(t, int64(283), f.Size())
+ assert.False(t, f.IsDir())
+ assert.Nil(t, f.Sys())
+ assert.Equal(t, fs.FileMode(0), f.Mode())
+ assert.Len(t, f.GetErrors(), 0)
+
+ // check the index has a rolodex reference
+ assert.NotNil(t, idx.GetRolodex())
+
+ // re-run the index should be a no-op
+ assert.NoError(t, rolo.IndexTheRolodex())
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 0)
+
+}
+
+func TestRolodex_CircularReferencesPolyIgnored(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ bingo:
+ type: object
+ properties:
+ bango:
+ $ref: "#/components/schemas/ProductCategory"
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ items:
+ anyOf:
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnorePolymorphicCircularReferences = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ assert.NotNil(t, rolo.GetRootIndex())
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_CircularReferencesPolyIgnored_PostCheck(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ bingo:
+ type: object
+ properties:
+ bango:
+ $ref: "#/components/schemas/ProductCategory"
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ items:
+ anyOf:
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnorePolymorphicCircularReferences = true
+ c.AvoidCircularReferenceCheck = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ assert.NotNil(t, rolo.GetRootIndex())
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_CircularReferencesPolyIgnored_Resolve(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ bingo:
+ type: object
+ properties:
+ bango:
+ $ref: "#/components/schemas/ProductCategory"
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "object"
+ items:
+ anyOf:
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnorePolymorphicCircularReferences = true
+ c.AvoidCircularReferenceCheck = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ assert.NotNil(t, rolo.GetRootIndex())
+ rolo.Resolve()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_CircularReferencesPostCheck(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ bingo:
+ type: object
+ properties:
+ bango:
+ $ref: "#/components/schemas/bingo"
+ required:
+ - bango`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.AvoidCircularReferenceCheck = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ assert.NotNil(t, rolo.GetRootIndex())
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 0)
+ assert.Len(t, rolo.GetCaughtErrors(), 1)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetInfiniteCircularReferences(), 1)
+ assert.Len(t, rolo.GetRootIndex().GetResolver().GetSafeCircularReferences(), 0)
+
+}
+
+func TestRolodex_CircularReferencesArrayIgnored(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnoreArrayCircularReferences = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_CircularReferencesArrayIgnored_Resolve(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnoreArrayCircularReferences = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ rolo.Resolve()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestRolodex_CircularReferencesArrayIgnored_PostCheck(t *testing.T) {
+
+ var d = `openapi: 3.1.0
+components:
+ schemas:
+ ProductCategory:
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ children:
+ type: "array"
+ items:
+ $ref: "#/components/schemas/ProductCategory"
+ description: "Array of sub-categories in the same format."
+ required:
+ - "name"
+ - "children"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+
+ c := CreateClosedAPIIndexConfig()
+ c.IgnoreArrayCircularReferences = true
+ c.AvoidCircularReferenceCheck = true
+ rolo := NewRolodex(c)
+ rolo.SetRootNode(&rootNode)
+ _ = rolo.IndexTheRolodex()
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 1)
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+
+}
+
+func TestHumanFileSize(t *testing.T) {
+
+ // test bytes for different units
+ assert.Equal(t, "1 B", HumanFileSize(1))
+ assert.Equal(t, "1 KB", HumanFileSize(1024))
+ assert.Equal(t, "1 MB", HumanFileSize(1024*1024))
+
+}
diff --git a/index/rolodex_test_data/components.yaml b/index/rolodex_test_data/components.yaml
new file mode 100644
index 0000000..8d521ee
--- /dev/null
+++ b/index/rolodex_test_data/components.yaml
@@ -0,0 +1,13 @@
+openapi: 3.1.0
+info:
+ title: Rolodex Test Data
+ version: 1.0.0
+components:
+ schemas:
+ Ding:
+ type: object
+ description: A thing that does nothing. Ding a ling!
+ properties:
+ message:
+ type: string
+ description: I am pointless. Ding Ding!
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir1/components.yaml b/index/rolodex_test_data/dir1/components.yaml
new file mode 100644
index 0000000..dd14d4c
--- /dev/null
+++ b/index/rolodex_test_data/dir1/components.yaml
@@ -0,0 +1,15 @@
+openapi: 3.1.0
+info:
+ title: Dir1 Test Components
+ version: 1.0.0
+components:
+ schemas:
+ GlobalComponent:
+ type: object
+ description: Dir1 Global Component
+ properties:
+ message:
+ type: string
+ description: I am pointless, but I am global dir1.
+ SomeUtil:
+ $ref: "utils/utils.yaml"
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir1/subdir1/shared.yaml b/index/rolodex_test_data/dir1/subdir1/shared.yaml
new file mode 100644
index 0000000..d70a69a
--- /dev/null
+++ b/index/rolodex_test_data/dir1/subdir1/shared.yaml
@@ -0,0 +1,15 @@
+openapi: 3.1.0
+info:
+ title: Dir1 Shared Components
+ version: 1.0.0
+components:
+ schemas:
+ SharedComponent:
+ type: object
+ description: Dir1 Shared Component
+ properties:
+ message:
+ type: string
+ description: I am pointless, but I am shared dir1.
+ SomeUtil:
+ $ref: "../utils/utils.yaml"
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir1/utils/utils.yaml b/index/rolodex_test_data/dir1/utils/utils.yaml
new file mode 100644
index 0000000..2fa63ac
--- /dev/null
+++ b/index/rolodex_test_data/dir1/utils/utils.yaml
@@ -0,0 +1,9 @@
+type: object
+description: I am a utility for dir1
+properties:
+ message:
+ type: object
+ description: I am pointless dir1.
+ properties:
+ shared:
+ $ref: '../subdir1/shared.yaml#/components/schemas/SharedComponent'
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir2/components.yaml b/index/rolodex_test_data/dir2/components.yaml
new file mode 100644
index 0000000..1d25203
--- /dev/null
+++ b/index/rolodex_test_data/dir2/components.yaml
@@ -0,0 +1,21 @@
+openapi: 3.1.0
+info:
+ title: Dir2 Test Components
+ version: 1.0.0
+components:
+ schemas:
+ GlobalComponent:
+ type: object
+ description: Dir2 Global Component
+ properties:
+ message:
+ type: string
+ description: I am pointless, but I am global dir2.
+ AnotherComponent:
+ type: object
+ description: Dir2 Another Component
+ properties:
+ message:
+ $ref: "subdir2/shared.yaml#/components/schemas/SharedComponent"
+ SomeUtil:
+ $ref: "utils/utils.yaml"
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir2/subdir2/shared.yaml b/index/rolodex_test_data/dir2/subdir2/shared.yaml
new file mode 100644
index 0000000..ef913dc
--- /dev/null
+++ b/index/rolodex_test_data/dir2/subdir2/shared.yaml
@@ -0,0 +1,15 @@
+openapi: 3.1.0
+info:
+ title: Dir2 Shared Components
+ version: 1.0.0
+components:
+ schemas:
+ SharedComponent:
+ type: object
+ description: Dir2 Shared Component
+ properties:
+ utilMessage:
+ $ref: "../utils/utils.yaml"
+ message:
+ type: string
+ description: I am pointless, but I am shared dir2.
\ No newline at end of file
diff --git a/index/rolodex_test_data/dir2/utils/utils.yaml b/index/rolodex_test_data/dir2/utils/utils.yaml
new file mode 100644
index 0000000..494bf4e
--- /dev/null
+++ b/index/rolodex_test_data/dir2/utils/utils.yaml
@@ -0,0 +1,6 @@
+type: object
+description: I am a utility for dir2
+properties:
+ message:
+ type: object
+ description: I am pointless dir2 utility, I am multiple levels deep.
\ No newline at end of file
diff --git a/index/rolodex_test_data/doc1.yaml b/index/rolodex_test_data/doc1.yaml
new file mode 100644
index 0000000..cd31fda
--- /dev/null
+++ b/index/rolodex_test_data/doc1.yaml
@@ -0,0 +1,32 @@
+openapi: 3.1.0
+info:
+ title: Rolodex Test Data
+ version: 1.0.0
+paths:
+ /one/local:
+ get:
+ responses:
+ '200':
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Thing'
+ /one/file:
+ get:
+ responses:
+ '200':
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: 'components.yaml#/components/schemas/Ding'
+components:
+ schemas:
+ Thing:
+ type: object
+ description: A thing that does nothing.
+ properties:
+ message:
+ type: string
+ description: I am pointless.
\ No newline at end of file
diff --git a/index/rolodex_test_data/doc2.yaml b/index/rolodex_test_data/doc2.yaml
new file mode 100644
index 0000000..c0fef50
--- /dev/null
+++ b/index/rolodex_test_data/doc2.yaml
@@ -0,0 +1,59 @@
+openapi: 3.1.0
+info:
+ title: Rolodex Test Data
+ version: 1.0.0
+paths:
+# /one/local:
+# get:
+# responses:
+# '200':
+# description: OK
+# content:
+# application/json:
+# schema:
+# $ref: '#/components/schemas/Thing'
+# /one/file:
+# get:
+# responses:
+# '200':
+# description: OK
+# content:
+# application/json:
+# schema:
+# $ref: 'components.yaml#/components/schemas/Ding'
+# /nested/files1:
+# get:
+# responses:
+# '200':
+# description: OK
+# content:
+# application/json:
+# schema:
+# $ref: 'dir1/components.yaml#/components/schemas/GlobalComponent'
+# /nested/files2:
+# get:
+# responses:
+# '200':
+# description: OK
+# content:
+# application/json:
+# schema:
+# $ref: 'dir2/components.yaml#/components/schemas/GlobalComponent'
+ /nested/files3:
+ get:
+ responses:
+ '200':
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: 'dir2/components.yaml#/components/schemas/AnotherComponent'
+components:
+ schemas:
+ Thing:
+ type: object
+ description: A thing that does nothing.
+ properties:
+ message:
+ type: string
+ description: I am pointless.
\ No newline at end of file
diff --git a/index/search_index.go b/index/search_index.go
index 0c8f698..58a51a7 100644
--- a/index/search_index.go
+++ b/index/search_index.go
@@ -3,32 +3,177 @@
package index
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "path/filepath"
+ "strings"
+)
+
+type ContextKey string
+
+const CurrentPathKey ContextKey = "currentPath"
+const FoundIndexKey ContextKey = "foundIndex"
+
+func (index *SpecIndex) SearchIndexForReferenceByReference(fullRef *Reference) (*Reference, *SpecIndex) {
+ r, idx, _ := index.SearchIndexForReferenceByReferenceWithContext(context.Background(), fullRef)
+ return r, idx
+}
+
// SearchIndexForReference searches the index for a reference, first looking through the mapped references
// and then externalSpecIndex for a match. If no match is found, it will recursively search the child indexes
// extracted when parsing the OpenAPI Spec.
-func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
- if r, ok := index.allMappedRefs[ref]; ok {
- return []*Reference{r}
+func (index *SpecIndex) SearchIndexForReference(ref string) (*Reference, *SpecIndex) {
+ return index.SearchIndexForReferenceByReference(&Reference{FullDefinition: ref})
+}
+
+func (index *SpecIndex) SearchIndexForReferenceWithContext(ctx context.Context, ref string) (*Reference, *SpecIndex, context.Context) {
+ return index.SearchIndexForReferenceByReferenceWithContext(ctx, &Reference{FullDefinition: ref})
+}
+
+func (index *SpecIndex) SearchIndexForReferenceByReferenceWithContext(ctx context.Context, searchRef *Reference) (*Reference, *SpecIndex, context.Context) {
+
+ if v, ok := index.cache.Load(searchRef.FullDefinition); ok {
+ return v.(*Reference), v.(*Reference).Index, context.WithValue(ctx, CurrentPathKey, v.(*Reference).RemoteLocation)
}
- for c := range index.children {
- found := goFindMeSomething(index.children[c], ref)
- if found != nil {
- return found
+
+ ref := searchRef.FullDefinition
+ refAlt := ref
+ absPath := index.specAbsolutePath
+ if absPath == "" {
+ absPath = index.config.BasePath
+ }
+ var roloLookup string
+ uri := strings.Split(ref, "#/")
+ if len(uri) == 2 {
+ if uri[0] != "" {
+ if strings.HasPrefix(uri[0], "http") {
+ roloLookup = searchRef.FullDefinition
+ } else {
+ if filepath.IsAbs(uri[0]) {
+ roloLookup = uri[0]
+ } else {
+ if filepath.Ext(absPath) != "" {
+ absPath = filepath.Dir(absPath)
+ }
+ roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
+ }
+ }
+ } else {
+
+ if filepath.Ext(uri[1]) != "" {
+ roloLookup = absPath
+ } else {
+ roloLookup = ""
+ }
+
+ ref = fmt.Sprintf("#/%s", uri[1])
+ refAlt = fmt.Sprintf("%s#/%s", absPath, uri[1])
+
+ }
+
+ } else {
+ if filepath.IsAbs(uri[0]) {
+ roloLookup = uri[0]
+ } else {
+
+ if strings.HasPrefix(uri[0], "http") {
+ roloLookup = ref
+ } else {
+ if filepath.Ext(absPath) != "" {
+ absPath = filepath.Dir(absPath)
+ }
+ roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
+ }
+ }
+ ref = uri[0]
+ }
+ if strings.Contains(ref, "%") {
+ // decode the url.
+ ref, _ = url.QueryUnescape(ref)
+ refAlt, _ = url.QueryUnescape(refAlt)
+ }
+
+ if r, ok := index.allMappedRefs[ref]; ok {
+ index.cache.Store(ref, r)
+ return r, r.Index, context.WithValue(ctx, CurrentPathKey, r.RemoteLocation)
+ }
+
+ if r, ok := index.allMappedRefs[refAlt]; ok {
+ index.cache.Store(refAlt, r)
+ return r, r.Index, context.WithValue(ctx, CurrentPathKey, r.RemoteLocation)
+ }
+
+ // check the rolodex for the reference.
+ if roloLookup != "" {
+
+ if strings.Contains(roloLookup, "#") {
+ roloLookup = strings.Split(roloLookup, "#")[0]
+ }
+
+ rFile, err := index.rolodex.Open(roloLookup)
+ if err != nil {
+ return nil, index, ctx
+ }
+
+ // extract the index from the rolodex file.
+ if rFile != nil {
+ idx := rFile.GetIndex()
+ if index.resolver != nil {
+ index.resolver.indexesVisited++
+ }
+ if idx != nil {
+
+ // check mapped refs.
+ if r, ok := idx.allMappedRefs[ref]; ok {
+ index.cache.Store(ref, r)
+ idx.cache.Store(ref, r)
+ return r, r.Index, context.WithValue(ctx, CurrentPathKey, r.RemoteLocation)
+ }
+
+ // build a collection of all the inline schemas and search them
+ // for the reference.
+ var d []*Reference
+ d = append(d, idx.allInlineSchemaDefinitions...)
+ d = append(d, idx.allRefSchemaDefinitions...)
+ d = append(d, idx.allInlineSchemaObjectDefinitions...)
+ for _, s := range d {
+ if s.FullDefinition == ref {
+ idx.cache.Store(ref, s)
+ index.cache.Store(ref, s)
+ return s, s.Index, context.WithValue(ctx, CurrentPathKey, s.RemoteLocation)
+ }
+ }
+
+ // does component exist in the root?
+ node, _ := rFile.GetContentAsYAMLNode()
+ if node != nil {
+ var found *Reference
+ exp := strings.Split(ref, "#/")
+ compId := ref
+
+ if len(exp) == 2 {
+ compId = fmt.Sprintf("#/%s", exp[1])
+ found = FindComponent(node, compId, exp[0], idx)
+ }
+ if found == nil {
+ found = idx.FindComponent(ref)
+ }
+
+ if found != nil {
+ idx.cache.Store(ref, found)
+ index.cache.Store(ref, found)
+ return found, found.Index, context.WithValue(ctx, CurrentPathKey, found.RemoteLocation)
+ }
+ }
+ }
}
}
- return nil
-}
-func (index *SpecIndex) SearchAncestryForSeenURI(uri string) *SpecIndex {
- if index.parentIndex == nil {
- return nil
+ if index.logger != nil {
+ index.logger.Error("unable to locate reference anywhere in the rolodex", "reference", ref)
}
- if index.uri[0] != uri {
- return index.parentIndex.SearchAncestryForSeenURI(uri)
- }
- return index
-}
+ return nil, index, ctx
-func goFindMeSomething(i *SpecIndex, ref string) []*Reference {
- return i.SearchIndexForReference(ref)
}
diff --git a/index/search_index_test.go b/index/search_index_test.go
index 62dd2a8..442d9d6 100644
--- a/index/search_index_test.go
+++ b/index/search_index_test.go
@@ -4,6 +4,7 @@
package index
import (
+ "context"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
"os"
@@ -18,6 +19,18 @@ func TestSpecIndex_SearchIndexForReference(t *testing.T) {
c := CreateOpenAPIIndexConfig()
idx := NewSpecIndexWithConfig(&rootNode, c)
- ref := idx.SearchIndexForReference("#/components/schemas/Pet")
+ ref, _ := idx.SearchIndexForReference("#/components/schemas/Pet")
+ assert.NotNil(t, ref)
+}
+
+func TestSpecIndex_SearchIndexForReferenceWithContext(t *testing.T) {
+ petstore, _ := os.ReadFile("../test_specs/petstorev3.json")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(petstore, &rootNode)
+
+ c := CreateOpenAPIIndexConfig()
+ idx := NewSpecIndexWithConfig(&rootNode, c)
+
+ ref, _, _ := idx.SearchIndexForReferenceWithContext(context.Background(), "#/components/schemas/Pet")
assert.NotNil(t, ref)
}
diff --git a/index/search_rolodex.go b/index/search_rolodex.go
new file mode 100644
index 0000000..9178f27
--- /dev/null
+++ b/index/search_rolodex.go
@@ -0,0 +1,72 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "gopkg.in/yaml.v3"
+)
+
+// FindNodeOrigin searches all indexes for the origin of a node. If the node is found, a NodeOrigin
+// is returned, otherwise nil is returned.
+func (r *Rolodex) FindNodeOrigin(node *yaml.Node) *NodeOrigin {
+ f := make(chan *NodeOrigin)
+ d := make(chan bool)
+ findNode := func(i int, node *yaml.Node) {
+ n := r.indexes[i].FindNodeOrigin(node)
+ if n != nil {
+ f <- n
+ return
+ }
+ d <- true
+ }
+ for i, _ := range r.indexes {
+ go findNode(i, node)
+ }
+ searched := 0
+ for searched < len(r.indexes) {
+ select {
+ case n := <-f:
+ return n
+ case <-d:
+ searched++
+ }
+ }
+ return r.GetRootIndex().FindNodeOrigin(node)
+}
+
+// FindNodeOrigin searches this index for a matching node. If the node is found, a NodeOrigin
+// is returned, otherwise nil is returned.
+func (index *SpecIndex) FindNodeOrigin(node *yaml.Node) *NodeOrigin {
+ if node != nil {
+ if index.nodeMap[node.Line] != nil {
+ if index.nodeMap[node.Line][node.Column] != nil {
+ foundNode := index.nodeMap[node.Line][node.Column]
+ if foundNode.Kind == yaml.DocumentNode {
+ foundNode = foundNode.Content[0]
+ }
+ match := true
+ if foundNode.Value != node.Value || foundNode.Kind != node.Kind || foundNode.Tag != node.Tag {
+ match = false
+ }
+ if len(foundNode.Content) == len(node.Content) {
+ for i := range foundNode.Content {
+ if foundNode.Content[i].Value != node.Content[i].Value {
+ match = false
+ }
+ }
+ }
+ if match {
+ return &NodeOrigin{
+ Node: foundNode,
+ Line: node.Line,
+ Column: node.Column,
+ AbsoluteLocation: index.specAbsolutePath,
+ Index: index,
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/index/search_rolodex_test.go b/index/search_rolodex_test.go
new file mode 100644
index 0000000..459ef90
--- /dev/null
+++ b/index/search_rolodex_test.go
@@ -0,0 +1,124 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package index
+
+import (
+ "github.com/stretchr/testify/assert"
+ "github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
+ "gopkg.in/yaml.v3"
+ "strings"
+ "testing"
+)
+
+func TestRolodex_FindNodeOrigin(t *testing.T) {
+
+ baseDir := "rolodex_test_data"
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.AvoidCircularReferenceCheck = true
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: baseDir,
+ IndexConfig: cf,
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolo := NewRolodex(cf)
+ rolo.AddLocalFS(baseDir, fileFS)
+
+ // open doc2
+ f, rerr := rolo.Open("doc2.yaml")
+ assert.Nil(t, rerr)
+ assert.NotNil(t, f)
+
+ node, _ := f.GetContentAsYAMLNode()
+
+ rolo.SetRootNode(node)
+
+ err = rolo.IndexTheRolodex()
+ rolo.Resolve()
+
+ assert.Len(t, rolo.indexes, 4)
+
+ // extract something that can only exist after resolution
+ path := "$.paths./nested/files3.get.responses.200.content.application/json.schema.properties.message.properties.utilMessage.properties.message.description"
+ yp, _ := yamlpath.NewPath(path)
+ results, _ := yp.Find(node)
+
+ assert.NotNil(t, results)
+ assert.Len(t, results, 1)
+ assert.Equal(t, "I am pointless dir2 utility, I am multiple levels deep.", results[0].Value)
+
+ // now for the truth, where did this come from?
+ origin := rolo.FindNodeOrigin(results[0])
+
+ assert.NotNil(t, origin)
+ assert.True(t, strings.HasSuffix(origin.AbsoluteLocation, "index/rolodex_test_data/dir2/utils/utils.yaml"))
+
+ // should be identical to the original node
+ assert.Equal(t, results[0], origin.Node)
+
+ // look for something that cannot exist
+ origin = rolo.FindNodeOrigin(nil)
+ assert.Nil(t, origin)
+
+ // modify the node and try again
+ m := *results[0]
+ m.Value = "I am a new message"
+ origin = rolo.FindNodeOrigin(&m)
+ assert.Nil(t, origin)
+
+ // extract the doc root
+ origin = rolo.FindNodeOrigin(node)
+ assert.Nil(t, origin)
+}
+
+func TestRolodex_FindNodeOrigin_ModifyLookup(t *testing.T) {
+
+ baseDir := "rolodex_test_data"
+
+ cf := CreateOpenAPIIndexConfig()
+ cf.BasePath = baseDir
+ cf.AvoidCircularReferenceCheck = true
+
+ fileFS, err := NewLocalFSWithConfig(&LocalFSConfig{
+ BaseDirectory: baseDir,
+ IndexConfig: cf,
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rolo := NewRolodex(cf)
+ rolo.AddLocalFS(baseDir, fileFS)
+
+ // open doc2
+ f, rerr := rolo.Open("doc2.yaml")
+ assert.Nil(t, rerr)
+ assert.NotNil(t, f)
+
+ node, _ := f.GetContentAsYAMLNode()
+
+ rolo.SetRootNode(node)
+
+ err = rolo.IndexTheRolodex()
+ rolo.Resolve()
+
+ assert.Len(t, rolo.indexes, 4)
+
+ path := "$.paths./nested/files3.get.responses.200.content.application/json.schema"
+ yp, _ := yamlpath.NewPath(path)
+ results, _ := yp.Find(node)
+
+ // copy, modify, and try again
+ o := *results[0]
+ o.Content = []*yaml.Node{
+ {Value: "beer"}, {Value: "wine"}, {Value: "cake"}, {Value: "burgers"}, {Value: "herbs"}, {Value: "spices"},
+ }
+ origin := rolo.FindNodeOrigin(&o)
+ assert.Nil(t, origin)
+}
diff --git a/index/spec_index.go b/index/spec_index.go
index 6d78f37..cc35039 100644
--- a/index/spec_index.go
+++ b/index/spec_index.go
@@ -14,14 +14,15 @@ package index
import (
"fmt"
- "sort"
- "strings"
- "sync"
-
"github.com/pb33f/libopenapi/utils"
"github.com/vmware-labs/yaml-jsonpath/pkg/yamlpath"
"golang.org/x/sync/syncmap"
"gopkg.in/yaml.v3"
+ "log/slog"
+ "os"
+ "sort"
+ "strings"
+ "sync"
)
// NewSpecIndexWithConfig will create a new index of an OpenAPI or Swagger spec. It uses the same logic as NewSpecIndex
@@ -29,16 +30,20 @@ import (
// how the index is set up.
func NewSpecIndexWithConfig(rootNode *yaml.Node, config *SpecIndexConfig) *SpecIndex {
index := new(SpecIndex)
- if config != nil && config.seenRemoteSources == nil {
- config.seenRemoteSources = &syncmap.Map{}
- }
- config.remoteLock = &sync.Mutex{}
index.config = config
- index.parentIndex = config.ParentIndex
+ index.rolodex = config.Rolodex
index.uri = config.uri
+ index.specAbsolutePath = config.SpecAbsolutePath
if rootNode == nil || len(rootNode.Content) <= 0 {
return index
}
+ if config.Logger != nil {
+ index.logger = config.Logger
+ } else {
+ index.logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+ }
boostrapIndexCollections(rootNode, index)
return createNewIndex(rootNode, index, config.AvoidBuildIndex)
}
@@ -47,11 +52,8 @@ func NewSpecIndexWithConfig(rootNode *yaml.Node, config *SpecIndexConfig) *SpecI
// other than a raw index of every node for every content type in the specification. This process runs as fast as
// possible so dependencies looking through the tree, don't need to walk the entire thing over, and over.
//
-// Deprecated: Use NewSpecIndexWithConfig instead, this function will be removed in the future because it
-// defaults to allowing remote references and file references. This is a potential security risk and should be controlled by
-// providing a SpecIndexConfig that explicitly sets the AllowRemoteLookup and AllowFileLookup to true.
-// This function also does not support specifications with relative references that may not exist locally.
-// - https://github.com/pb33f/libopenapi/issues/73
+// This creates a new index using a default 'open' configuration. This means if a BaseURL or BasePath are supplied
+// the rolodex will automatically read those files or open those h
func NewSpecIndex(rootNode *yaml.Node) *SpecIndex {
index := new(SpecIndex)
index.config = CreateOpenAPIIndexConfig()
@@ -64,6 +66,11 @@ func createNewIndex(rootNode *yaml.Node, index *SpecIndex, avoidBuildOut bool) *
if rootNode == nil {
return index
}
+ index.nodeMapCompleted = make(chan bool)
+ index.nodeMap = make(map[int]map[int]*yaml.Node)
+ go index.MapNodes(rootNode) // this can run async.
+
+ index.cache = new(syncmap.Map)
// boot index.
results := index.ExtractRefs(index.root.Content[0], index.root, []string{}, 0, false, "")
@@ -87,19 +94,17 @@ func createNewIndex(rootNode *yaml.Node, index *SpecIndex, avoidBuildOut bool) *
if !avoidBuildOut {
index.BuildIndex()
}
-
- // do a copy!
- index.config.seenRemoteSources.Range(func(k, v any) bool {
- index.seenRemoteSources[k.(string)] = v.(*yaml.Node)
- return true
- })
+ <-index.nodeMapCompleted
return index
}
-// BuildIndex will run all of the count operations required to build up maps of everything. It's what makes the index
+// BuildIndex will run all the count operations required to build up maps of everything. It's what makes the index
// useful for looking up things, the count operations are all run in parallel and then the final calculations are run
// the index is ready.
func (index *SpecIndex) BuildIndex() {
+ if index.built {
+ return
+ }
countFuncs := []func() int{
index.GetOperationCount,
index.GetComponentSchemaCount,
@@ -129,6 +134,11 @@ func (index *SpecIndex) BuildIndex() {
index.GetInlineDuplicateParamCount()
index.GetAllDescriptionsCount()
index.GetTotalTagsCount()
+ index.built = true
+}
+
+func (index *SpecIndex) GetLogger() *slog.Logger {
+ return index.logger
}
// GetRootNode returns document root node.
@@ -136,6 +146,10 @@ func (index *SpecIndex) GetRootNode() *yaml.Node {
return index.root
}
+func (index *SpecIndex) GetRolodex() *Rolodex {
+ return index.rolodex
+}
+
// GetGlobalTagsNode returns document root tags node.
func (index *SpecIndex) GetGlobalTagsNode() *yaml.Node {
return index.tagsNode
@@ -423,11 +437,6 @@ func (index *SpecIndex) GetAllOperationsServers() map[string]map[string][]*Refer
return index.opServersRefs
}
-// GetAllExternalIndexes will return all indexes for external documents
-func (index *SpecIndex) GetAllExternalIndexes() map[string]*SpecIndex {
- return index.externalSpecIndex
-}
-
// SetAllowCircularReferenceResolving will flip a bit that can be used by any consumers to determine if they want
// to allow or disallow circular references to be resolved or visited
func (index *SpecIndex) SetAllowCircularReferenceResolving(allow bool) {
@@ -618,14 +627,14 @@ func (index *SpecIndex) GetGlobalCallbacksCount() int {
return index.globalCallbacksCount
}
- // index.pathRefsLock.Lock()
+ index.pathRefsLock.RLock()
for path, p := range index.pathRefs {
for _, m := range p {
// look through method for callbacks
callbacks, _ := yamlpath.NewPath("$..callbacks")
- res, _ := callbacks.Find(m.Node)
-
+ var res []*yaml.Node
+ res, _ = callbacks.Find(m.Node)
if len(res) > 0 {
for _, callback := range res[0].Content {
if utils.IsNodeMap(callback) {
@@ -650,7 +659,7 @@ func (index *SpecIndex) GetGlobalCallbacksCount() int {
}
}
}
- // index.pathRefsLock.Unlock()
+ index.pathRefsLock.RUnlock()
return index.globalCallbacksCount
}
@@ -670,7 +679,9 @@ func (index *SpecIndex) GetGlobalLinksCount() int {
// look through method for links
links, _ := yamlpath.NewPath("$..links")
- res, _ := links.Find(m.Node)
+ var res []*yaml.Node
+
+ res, _ = links.Find(m.Node)
if len(res) > 0 {
for _, link := range res[0].Content {
@@ -928,6 +939,8 @@ func (index *SpecIndex) GetOperationCount() int {
opCount := 0
+ locatedPathRefs := make(map[string]map[string]*Reference)
+
for x, p := range index.pathsNode.Content {
if x%2 == 0 {
@@ -957,12 +970,10 @@ func (index *SpecIndex) GetOperationCount() int {
Path: fmt.Sprintf("$.paths.%s.%s", p.Value, m.Value),
ParentNode: m,
}
- index.pathRefsLock.Lock()
- if index.pathRefs[p.Value] == nil {
- index.pathRefs[p.Value] = make(map[string]*Reference)
+ if locatedPathRefs[p.Value] == nil {
+ locatedPathRefs[p.Value] = make(map[string]*Reference)
}
- index.pathRefs[p.Value][ref.Name] = ref
- index.pathRefsLock.Unlock()
+ locatedPathRefs[p.Value][ref.Name] = ref
// update
opCount++
}
@@ -970,7 +981,9 @@ func (index *SpecIndex) GetOperationCount() int {
}
}
}
-
+ for k, v := range locatedPathRefs {
+ index.pathRefs[k] = v
+ }
index.operationCount = opCount
return opCount
}
@@ -1185,16 +1198,3 @@ func (index *SpecIndex) GetAllDescriptionsCount() int {
func (index *SpecIndex) GetAllSummariesCount() int {
return len(index.allSummaries)
}
-
-// CheckForSeenRemoteSource will check to see if we have already seen this remote source and return it,
-// to avoid making duplicate remote calls for document data.
-func (index *SpecIndex) CheckForSeenRemoteSource(url string) (bool, *yaml.Node) {
- if index.config == nil || index.config.seenRemoteSources == nil {
- return false, nil
- }
- j, _ := index.config.seenRemoteSources.Load(url)
- if j != nil {
- return true, j.(*yaml.Node)
- }
- return false, nil
-}
diff --git a/index/spec_index_test.go b/index/spec_index_test.go
index 9061f9e..3e1980d 100644
--- a/index/spec_index_test.go
+++ b/index/spec_index_test.go
@@ -4,21 +4,62 @@
package index
import (
+ "bytes"
"fmt"
- "io/ioutil"
+ "github.com/pb33f/libopenapi/utils"
+ "golang.org/x/sync/syncmap"
"log"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
"net/url"
"os"
"os/exec"
"path/filepath"
"testing"
+ "time"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
)
+func TestSpecIndex_GetCache(t *testing.T) {
+
+ petstore, _ := os.ReadFile("../test_specs/petstorev3.json")
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(petstore, &rootNode)
+
+ index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
+
+ extCache := index.GetCache()
+ assert.NotNil(t, extCache)
+ extCache.Store("test", "test")
+ loaded, ok := extCache.Load("test")
+ assert.Equal(t, "test", loaded)
+ assert.True(t, ok)
+
+ // create a new cache
+ newCache := new(syncmap.Map)
+ index.SetCache(newCache)
+
+ // check that the cache has been set.
+ assert.Equal(t, newCache, index.GetCache())
+
+ // add an item to the new cache and check it exists
+ newCache.Store("test2", "test2")
+ loaded, ok = newCache.Load("test2")
+ assert.Equal(t, "test2", loaded)
+ assert.True(t, ok)
+
+ // now check that the new item in the new cache does not exist in the old cache.
+ loaded, ok = extCache.Load("test2")
+ assert.Nil(t, loaded)
+ assert.False(t, ok)
+
+}
+
func TestSpecIndex_ExtractRefsStripe(t *testing.T) {
- stripe, _ := ioutil.ReadFile("../test_specs/stripe.yaml")
+ stripe, _ := os.ReadFile("../test_specs/stripe.yaml")
var rootNode yaml.Node
_ = yaml.Unmarshal(stripe, &rootNode)
@@ -47,6 +88,8 @@ func TestSpecIndex_ExtractRefsStripe(t *testing.T) {
assert.Len(t, index.GetAllReferenceSchemas(), 1972)
assert.NotNil(t, index.GetRootServersNode())
assert.Len(t, index.GetAllRootServers(), 1)
+ assert.Equal(t, "", index.GetSpecAbsolutePath())
+ assert.NotNil(t, index.GetLogger())
// not required, but flip the circular result switch on and off.
assert.False(t, index.AllowCircularReferenceResolving())
@@ -61,11 +104,10 @@ func TestSpecIndex_ExtractRefsStripe(t *testing.T) {
assert.Len(t, index.GetRefsByLine(), 537)
assert.Len(t, index.GetLinesWithReferences(), 1972)
assert.Len(t, index.GetAllExternalDocuments(), 0)
- assert.Len(t, index.GetAllExternalIndexes(), 0)
}
func TestSpecIndex_Asana(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/asana.yaml")
+ asana, _ := os.ReadFile("../test_specs/asana.yaml")
var rootNode yaml.Node
_ = yaml.Unmarshal(asana, &rootNode)
@@ -91,15 +133,62 @@ func TestSpecIndex_DigitalOcean(t *testing.T) {
var rootNode yaml.Node
_ = yaml.Unmarshal(do, &rootNode)
- baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
- index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
- BaseURL: baseURL,
- AllowRemoteLookup: true,
- AllowFileLookup: true,
- })
+ location := "https://raw.githubusercontent.com/digitalocean/openapi/main/specification"
+ baseURL, _ := url.Parse(location)
+
+ // create a new config that allows remote lookups.
+ cf := &SpecIndexConfig{}
+ cf.AvoidBuildIndex = true
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // setting this baseURL will override the base
+ cf.BaseURL = baseURL
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithConfig(cf)
+
+ // create a handler that uses an env variable to capture any GITHUB_TOKEN in the OS ENV
+ // and inject it into the request header, so this does not fail when running lots of local tests.
+ if os.Getenv("GH_PAT") != "" {
+ fmt.Println("GH_PAT found, setting remote handler func")
+ client := &http.Client{
+ Timeout: time.Second * 120,
+ }
+ remoteFS.SetRemoteHandlerFunc(func(url string) (*http.Response, error) {
+ request, _ := http.NewRequest(http.MethodGet, url, nil)
+ request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", os.Getenv("GH_PAT")))
+ return client.Do(request)
+ })
+ }
+
+ // add remote filesystem
+ rolo.AddRemoteFS(location, remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+ assert.NoError(t, indexedErr)
+
+ // get all the files!
+ files := remoteFS.GetFiles()
+ fileLen := len(files)
+ assert.Equal(t, 1646, fileLen)
+ assert.Len(t, remoteFS.GetErrors(), 0)
+
+ // check circular references
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 0)
- assert.Len(t, index.GetAllExternalIndexes(), 291)
- assert.NotNil(t, index)
}
func TestSpecIndex_DigitalOcean_FullCheckoutLocalResolve(t *testing.T) {
@@ -107,73 +196,248 @@ func TestSpecIndex_DigitalOcean_FullCheckoutLocalResolve(t *testing.T) {
tmp, _ := os.MkdirTemp("", "openapi")
cmd := exec.Command("git", "clone", "https://github.com/digitalocean/openapi", tmp)
defer os.RemoveAll(filepath.Join(tmp, "openapi"))
+
err := cmd.Run()
if err != nil {
log.Fatalf("cmd.Run() failed with %s\n", err)
}
+
spec, _ := filepath.Abs(filepath.Join(tmp, "specification", "DigitalOcean-public.v2.yaml"))
- doLocal, _ := ioutil.ReadFile(spec)
+ doLocal, _ := os.ReadFile(spec)
+
var rootNode yaml.Node
_ = yaml.Unmarshal(doLocal, &rootNode)
- config := CreateOpenAPIIndexConfig()
- config.BasePath = filepath.Join(tmp, "specification")
+ basePath := filepath.Join(tmp, "specification")
- index := NewSpecIndexWithConfig(&rootNode, config)
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = basePath
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ DirFS: os.DirFS(cf.BasePath),
+ Logger: cf.Logger,
+ }
+
+ // create a new local filesystem.
+ fileFS, fsErr := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, fsErr)
+
+ files := fileFS.GetFiles()
+ fileLen := len(files)
+
+ assert.Equal(t, 1691, fileLen)
+
+ rolo.AddLocalFS(basePath, fileFS)
+
+ rErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, rErr)
+
+ index := rolo.GetRootIndex()
- assert.Len(t, index.GetAllExternalIndexes(), 291)
assert.NotNil(t, index)
- ref := index.SearchIndexForReference("resources/apps/apps_list_instanceSizes.yml")
- assert.NotNil(t, ref)
- assert.Equal(t, "operationId", ref[0].Node.Content[0].Value)
+ assert.Len(t, index.GetMappedReferencesSequenced(), 299)
+ assert.Len(t, index.GetMappedReferences(), 299)
+ assert.Len(t, fileFS.GetErrors(), 0)
- ref = index.SearchIndexForReference("examples/ruby/domains_create.yml")
- assert.NotNil(t, ref)
- assert.Equal(t, "lang", ref[0].Node.Content[0].Value)
+ // check circular references
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 0)
- ref = index.SearchIndexForReference("../../shared/responses/server_error.yml")
- assert.NotNil(t, ref)
- assert.Equal(t, "description", ref[0].Node.Content[0].Value)
+ assert.Equal(t, int64(1328224), rolo.RolodexFileSize())
+ assert.Equal(t, "1.27 MB", rolo.RolodexFileSizeAsString())
+ assert.Equal(t, 1691, rolo.RolodexTotalFiles())
+
+}
+
+func TestSpecIndex_DigitalOcean_FullCheckoutLocalResolve_RecursiveLookup(t *testing.T) {
+ // this is a full checkout of the digitalocean API repo.
+ tmp, _ := os.MkdirTemp("", "openapi")
+ cmd := exec.Command("git", "clone", "https://github.com/digitalocean/openapi", tmp)
+ defer os.RemoveAll(filepath.Join(tmp, "openapi"))
+
+ err := cmd.Run()
+ if err != nil {
+ log.Fatalf("cmd.Run() failed with %s\n", err)
+ }
+
+ spec, _ := filepath.Abs(filepath.Join(tmp, "specification", "DigitalOcean-public.v2.yaml"))
+ doLocal, _ := os.ReadFile(spec)
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal(doLocal, &rootNode)
+
+ basePath := filepath.Join(tmp, "specification")
+
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = basePath
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ IndexConfig: cf,
+ Logger: cf.Logger,
+ }
+
+ // create a new local filesystem.
+ fileFS, fsErr := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, fsErr)
+
+ rolo.AddLocalFS(basePath, fileFS)
+
+ rErr := rolo.IndexTheRolodex()
+ files := fileFS.GetFiles()
+ fileLen := len(files)
+
+ assert.Equal(t, 1677, fileLen)
+
+ assert.NoError(t, rErr)
+
+ index := rolo.GetRootIndex()
+
+ assert.NotNil(t, index)
+
+ assert.Len(t, index.GetMappedReferencesSequenced(), 299)
+ assert.Len(t, index.GetMappedReferences(), 299)
+ assert.Len(t, fileFS.GetErrors(), 0)
+
+ // check circular references
+ rolo.CheckForCircularReferences()
+ assert.Len(t, rolo.GetCaughtErrors(), 0)
+ assert.Len(t, rolo.GetIgnoredCircularReferences(), 0)
+
+ assert.Equal(t, int64(1266728), rolo.RolodexFileSize())
+ assert.Equal(t, "1.21 MB", rolo.RolodexFileSizeAsString())
+ assert.Equal(t, 1677, rolo.RolodexTotalFiles())
- ref = index.SearchIndexForReference("../models/options.yml")
- assert.NotNil(t, ref)
}
func TestSpecIndex_DigitalOcean_LookupsNotAllowed(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/digitalocean.yaml")
+ do, _ := os.ReadFile("../test_specs/digitalocean.yaml")
var rootNode yaml.Node
- _ = yaml.Unmarshal(asana, &rootNode)
+ _ = yaml.Unmarshal(do, &rootNode)
- baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
- index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
- BaseURL: baseURL,
- })
+ location := "https://raw.githubusercontent.com/digitalocean/openapi/main/specification"
+ baseURL, _ := url.Parse(location)
+
+ // create a new config that does not allow remote lookups.
+ cf := &SpecIndexConfig{}
+ cf.AvoidBuildIndex = true
+ cf.AvoidCircularReferenceCheck = true
+ var op []byte
+ buf := bytes.NewBuffer(op)
+ cf.Logger = slog.New(slog.NewJSONHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // setting this baseURL will override the base
+ cf.BaseURL = baseURL
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithConfig(cf)
+
+ // add remote filesystem
+ rolo.AddRemoteFS(location, remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+ assert.Error(t, indexedErr)
+ assert.Len(t, utils.UnwrapErrors(indexedErr), 291)
+
+ index := rolo.GetRootIndex()
+
+ files := remoteFS.GetFiles()
+ fileLen := len(files)
+ assert.Equal(t, 0, fileLen)
+ assert.Len(t, remoteFS.GetErrors(), 0)
// no lookups allowed, bits have not been set, so there should just be a bunch of errors.
- assert.Len(t, index.GetAllExternalIndexes(), 0)
assert.True(t, len(index.GetReferenceIndexErrors()) > 0)
}
func TestSpecIndex_BaseURLError(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/digitalocean.yaml")
+
+ do, _ := os.ReadFile("../test_specs/digitalocean.yaml")
var rootNode yaml.Node
- _ = yaml.Unmarshal(asana, &rootNode)
+ _ = yaml.Unmarshal(do, &rootNode)
- // this should fail because the base url is not a valid url and digital ocean won't be able to resolve
- // anything.
- baseURL, _ := url.Parse("https://githerbs.com/fresh/herbs/for/you")
- index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
- BaseURL: baseURL,
- AllowRemoteLookup: true,
- AllowFileLookup: true,
- })
+ location := "https://githerbsandcoffeeandcode.com/fresh/herbs/for/you" // not gonna work bro.
+ baseURL, _ := url.Parse(location)
+
+ // create a new config that allows remote lookups.
+ cf := &SpecIndexConfig{}
+ cf.AvoidBuildIndex = true
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ var op []byte
+ buf := bytes.NewBuffer(op)
+ cf.Logger = slog.New(slog.NewJSONHandler(buf, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // setting this baseURL will override the base
+ cf.BaseURL = baseURL
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithConfig(cf)
+
+ // add remote filesystem
+ rolo.AddRemoteFS(location, remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+ assert.Error(t, indexedErr)
+ assert.Len(t, utils.UnwrapErrors(indexedErr), 291)
+
+ files := remoteFS.GetFiles()
+ fileLen := len(files)
+ assert.Equal(t, 0, fileLen)
+ assert.GreaterOrEqual(t, len(remoteFS.GetErrors()), 200)
- assert.Len(t, index.GetAllExternalIndexes(), 0)
}
func TestSpecIndex_k8s(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/k8s.json")
+ asana, _ := os.ReadFile("../test_specs/k8s.json")
var rootNode yaml.Node
_ = yaml.Unmarshal(asana, &rootNode)
@@ -198,7 +462,7 @@ func TestSpecIndex_k8s(t *testing.T) {
}
func TestSpecIndex_PetstoreV2(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/petstorev2.json")
+ asana, _ := os.ReadFile("../test_specs/petstorev2.json")
var rootNode yaml.Node
_ = yaml.Unmarshal(asana, &rootNode)
@@ -222,7 +486,7 @@ func TestSpecIndex_PetstoreV2(t *testing.T) {
}
func TestSpecIndex_XSOAR(t *testing.T) {
- xsoar, _ := ioutil.ReadFile("../test_specs/xsoar.json")
+ xsoar, _ := os.ReadFile("../test_specs/xsoar.json")
var rootNode yaml.Node
_ = yaml.Unmarshal(xsoar, &rootNode)
@@ -240,7 +504,7 @@ func TestSpecIndex_XSOAR(t *testing.T) {
}
func TestSpecIndex_PetstoreV3(t *testing.T) {
- petstore, _ := ioutil.ReadFile("../test_specs/petstorev3.json")
+ petstore, _ := os.ReadFile("../test_specs/petstorev3.json")
var rootNode yaml.Node
_ = yaml.Unmarshal(petstore, &rootNode)
@@ -263,12 +527,16 @@ func TestSpecIndex_PetstoreV3(t *testing.T) {
assert.Equal(t, 19, index.GetAllSummariesCount())
assert.Len(t, index.GetAllDescriptions(), 90)
assert.Len(t, index.GetAllSummaries(), 19)
+
+ index.SetAbsolutePath("/rooty/rootster")
+ assert.Equal(t, "/rooty/rootster", index.GetSpecAbsolutePath())
+
}
var mappedRefs = 15
func TestSpecIndex_BurgerShop(t *testing.T) {
- burgershop, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
+ burgershop, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
var rootNode yaml.Node
_ = yaml.Unmarshal(burgershop, &rootNode)
@@ -366,7 +634,7 @@ paths:
}
func TestSpecIndex_BurgerShop_AllTheComponents(t *testing.T) {
- burgershop, _ := ioutil.ReadFile("../test_specs/all-the-components.yaml")
+ burgershop, _ := os.ReadFile("../test_specs/all-the-components.yaml")
var rootNode yaml.Node
_ = yaml.Unmarshal(burgershop, &rootNode)
@@ -422,7 +690,7 @@ func TestSpecIndex_NoRoot(t *testing.T) {
docs := index.ExtractExternalDocuments(nil)
assert.Nil(t, docs)
assert.Nil(t, refs)
- assert.Nil(t, index.FindComponent("nothing", nil))
+ assert.Nil(t, index.FindComponent("nothing"))
assert.Equal(t, -1, index.GetOperationCount())
assert.Equal(t, -1, index.GetPathCount())
assert.Equal(t, -1, index.GetGlobalTagsCount())
@@ -434,18 +702,79 @@ func TestSpecIndex_NoRoot(t *testing.T) {
assert.Equal(t, -1, index.GetGlobalLinksCount())
}
+func test_buildMixedRefServer() *httptest.Server {
+
+ bs, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
+ return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
+ rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 07:28:00 GMT")
+ _, _ = rw.Write(bs)
+
+ }))
+}
+
func TestSpecIndex_BurgerShopMixedRef(t *testing.T) {
- spec, _ := ioutil.ReadFile("../test_specs/mixedref-burgershop.openapi.yaml")
+
+ // create a test server.
+ server := test_buildMixedRefServer()
+ defer server.Close()
+
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "../test_specs"
+ cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelError,
+ }))
+
+ // setting this baseURL will override the base
+ cf.BaseURL, _ = url.Parse(server.URL)
+
+ cFile := "../test_specs/mixedref-burgershop.openapi.yaml"
+ yml, _ := os.ReadFile(cFile)
var rootNode yaml.Node
- _ = yaml.Unmarshal(spec, &rootNode)
+ _ = yaml.Unmarshal([]byte(yml), &rootNode)
- cwd, _ := os.Getwd()
+ // create a new rolodex
+ rolo := NewRolodex(cf)
- index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
- AllowRemoteLookup: true,
- AllowFileLookup: true,
- BasePath: cwd,
- })
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // create a new remote fs and set the config for indexing.
+ remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
+ remoteFS.SetIndexConfig(cf)
+
+ // set our remote handler func
+
+ c := http.Client{}
+
+ remoteFS.RemoteHandlerFunc = c.Get
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"burgershop.openapi.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
+ // add file systems to the rolodex
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rolo.AddRemoteFS(server.URL, remoteFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+ rolo.BuildIndexes()
+
+ assert.NoError(t, indexedErr)
+
+ index := rolo.GetRootIndex()
+ rolo.CheckForCircularReferences()
assert.Len(t, index.allRefs, 5)
assert.Len(t, index.allMappedRefs, 5)
@@ -460,12 +789,29 @@ func TestSpecIndex_BurgerShopMixedRef(t *testing.T) {
assert.Equal(t, 2, index.GetOperationsParameterCount())
assert.Equal(t, 1, index.GetInlineDuplicateParamCount())
assert.Equal(t, 1, index.GetInlineUniqueParamCount())
+ assert.Len(t, index.refErrors, 0)
+ assert.Len(t, index.GetCircularReferences(), 0)
+
+ // get the size of the rolodex.
+ assert.Equal(t, int64(60232), rolo.RolodexFileSize()+int64(len(yml)))
+ assert.Equal(t, "50.48 KB", rolo.RolodexFileSizeAsString())
+ assert.Equal(t, 3, rolo.RolodexTotalFiles())
+
+}
+
+func TestCalcSizeAsString(t *testing.T) {
+ assert.Equal(t, "345 B", HumanFileSize(345))
+ assert.Equal(t, "1 KB", HumanFileSize(1024))
+ assert.Equal(t, "1 KB", HumanFileSize(1025))
+ assert.Equal(t, "1.98 KB", HumanFileSize(2025))
+ assert.Equal(t, "1 MB", HumanFileSize(1025*1024))
+ assert.Equal(t, "1 GB", HumanFileSize(1025*1025*1025))
}
func TestSpecIndex_TestEmptyBrokenReferences(t *testing.T) {
- asana, _ := ioutil.ReadFile("../test_specs/badref-burgershop.openapi.yaml")
+ badref, _ := os.ReadFile("../test_specs/badref-burgershop.openapi.yaml")
var rootNode yaml.Node
- _ = yaml.Unmarshal(asana, &rootNode)
+ _ = yaml.Unmarshal(badref, &rootNode)
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
assert.Equal(t, 5, index.GetPathCount())
@@ -479,7 +825,7 @@ func TestSpecIndex_TestEmptyBrokenReferences(t *testing.T) {
assert.Equal(t, 2, index.GetOperationsParameterCount())
assert.Equal(t, 1, index.GetInlineDuplicateParamCount())
assert.Equal(t, 1, index.GetInlineUniqueParamCount())
- assert.Len(t, index.refErrors, 7)
+ assert.Len(t, index.refErrors, 6)
}
func TestTagsNoDescription(t *testing.T) {
@@ -594,13 +940,13 @@ func TestSpecIndex_FindComponent_WithACrazyAssPath(t *testing.T) {
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
assert.Equal(t, "#/paths/~1crazy~1ass~1references/get/parameters/0",
- index.FindComponent("#/paths/~1crazy~1ass~1references/get/responses/404/content/application~1xml;%20charset=utf-8/schema", nil).Node.Content[1].Value)
+ index.FindComponent("#/paths/~1crazy~1ass~1references/get/responses/404/content/application~1xml;%20charset=utf-8/schema").Node.Content[1].Value)
assert.Equal(t, "a param",
- index.FindComponent("#/paths/~1crazy~1ass~1references/get/parameters/0", nil).Node.Content[1].Value)
+ index.FindComponent("#/paths/~1crazy~1ass~1references/get/parameters/0").Node.Content[1].Value)
}
-func TestSpecIndex_FindComponenth(t *testing.T) {
+func TestSpecIndex_FindComponent(t *testing.T) {
yml := `components:
schemas:
pizza:
@@ -614,7 +960,7 @@ func TestSpecIndex_FindComponenth(t *testing.T) {
_ = yaml.Unmarshal([]byte(yml), &rootNode)
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
- assert.Nil(t, index.FindComponent("I-do-not-exist", nil))
+ assert.Nil(t, index.FindComponent("I-do-not-exist"))
}
func TestSpecIndex_TestPathsNodeAsArray(t *testing.T) {
@@ -631,68 +977,7 @@ func TestSpecIndex_TestPathsNodeAsArray(t *testing.T) {
_ = yaml.Unmarshal([]byte(yml), &rootNode)
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
- assert.Nil(t, index.performExternalLookup(nil, "unknown", nil, nil))
-}
-
-func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_Error(t *testing.T) {
- index := new(SpecIndex)
- index.seenRemoteSources = make(map[string]*yaml.Node)
- index.seenRemoteSources["https://no-hope-for-a-dope.com"] = &yaml.Node{}
- _, _, err := index.lookupRemoteReference("https://no-hope-for-a-dope.com#/$.....#[;]something")
- assert.Error(t, err)
-}
-
-func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_BadFind(t *testing.T) {
- index := new(SpecIndex)
- index.seenRemoteSources = make(map[string]*yaml.Node)
- index.seenRemoteSources["https://no-hope-for-a-dope.com"] = &yaml.Node{}
- a, b, err := index.lookupRemoteReference("https://no-hope-for-a-dope.com#/hey")
- assert.Error(t, err)
- assert.Nil(t, a)
- assert.Nil(t, b)
-}
-
-// Discovered in issue https://github.com/pb33f/libopenapi/issues/37
-func TestSpecIndex_lookupRemoteReference_NoComponent(t *testing.T) {
- index := new(SpecIndex)
- index.seenRemoteSources = make(map[string]*yaml.Node)
- index.seenRemoteSources["https://api.rest.sh/schemas/ErrorModel.json"] = &yaml.Node{}
- a, b, err := index.lookupRemoteReference("https://api.rest.sh/schemas/ErrorModel.json")
- assert.NoError(t, err)
- assert.NotNil(t, a)
- assert.NotNil(t, b)
-}
-
-// Discovered in issue https://github.com/daveshanley/vacuum/issues/225
-func TestSpecIndex_lookupFileReference_NoComponent(t *testing.T) {
- cwd, _ := os.Getwd()
- index := new(SpecIndex)
- index.config = &SpecIndexConfig{BasePath: cwd}
-
- _ = ioutil.WriteFile("coffee-time.yaml", []byte("time: for coffee"), 0o664)
- defer os.Remove("coffee-time.yaml")
-
- index.seenRemoteSources = make(map[string]*yaml.Node)
- a, b, err := index.lookupFileReference("coffee-time.yaml")
- assert.NoError(t, err)
- assert.NotNil(t, a)
- assert.NotNil(t, b)
-}
-
-func TestSpecIndex_CheckBadURLRef(t *testing.T) {
- yml := `openapi: 3.1.0
-paths:
- /cakes:
- post:
- parameters:
- - $ref: 'httpsss://badurl'`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
-
- index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
-
- assert.Len(t, index.refErrors, 2)
+ assert.Nil(t, index.lookupRolodex(nil))
}
func TestSpecIndex_CheckBadURLRefNoRemoteAllowed(t *testing.T) {
@@ -709,15 +994,34 @@ paths:
c := CreateClosedAPIIndexConfig()
idx := NewSpecIndexWithConfig(&rootNode, c)
- assert.Len(t, idx.refErrors, 2)
- assert.Equal(t, "remote lookups are not permitted, "+
- "please set AllowRemoteLookup to true in the configuration", idx.refErrors[0].Error())
+ assert.Len(t, idx.refErrors, 1)
}
func TestSpecIndex_CheckIndexDiscoversNoComponentLocalFileReference(t *testing.T) {
- _ = ioutil.WriteFile("coffee-time.yaml", []byte("name: time for coffee"), 0o664)
+ c := []byte("name: time for coffee")
+
+ _ = os.WriteFile("coffee-time.yaml", c, 0o664)
defer os.Remove("coffee-time.yaml")
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "."
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"coffee-time.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
yml := `openapi: 3.0.3
paths:
/cakes:
@@ -725,80 +1029,121 @@ paths:
parameters:
- $ref: 'coffee-time.yaml'`
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
+ var coffee yaml.Node
+ _ = yaml.Unmarshal([]byte(yml), &coffee)
- index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&coffee)
+
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, rErr)
+
+ index := rolo.GetRootIndex()
assert.NotNil(t, index.GetAllParametersFromOperations()["/cakes"]["post"]["coffee-time.yaml"][0].Node)
}
-func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_BadJSON(t *testing.T) {
- index := NewSpecIndexWithConfig(nil, &SpecIndexConfig{
- AllowRemoteLookup: true,
- })
- index.seenRemoteSources = make(map[string]*yaml.Node)
- a, b, err := index.lookupRemoteReference("https://google.com//logos/doodles/2022/labor-day-2022-6753651837109490.3-l.png#/hey")
- assert.Error(t, err)
- assert.Nil(t, a)
- assert.Nil(t, b)
-}
-
-func TestSpecIndex_lookupFileReference_BadFileName(t *testing.T) {
- index := NewSpecIndexWithConfig(nil, CreateOpenAPIIndexConfig())
- _, _, err := index.lookupFileReference("not-a-reference")
- assert.Error(t, err)
-}
-
-func TestSpecIndex_lookupFileReference_SeenSourceSimulation_Error(t *testing.T) {
- index := NewSpecIndexWithConfig(nil, CreateOpenAPIIndexConfig())
- index.seenRemoteSources = make(map[string]*yaml.Node)
- index.seenRemoteSources["magic-money-file.json"] = &yaml.Node{}
- _, _, err := index.lookupFileReference("magic-money-file.json#something")
- assert.Error(t, err)
-}
-
-func TestSpecIndex_lookupFileReference_BadFile(t *testing.T) {
- index := NewSpecIndexWithConfig(nil, CreateOpenAPIIndexConfig())
- _, _, err := index.lookupFileReference("chickers.json#no-rice")
- assert.Error(t, err)
-}
-
-func TestSpecIndex_lookupFileReference_BadFileDataRead(t *testing.T) {
- _ = ioutil.WriteFile("chickers.yaml", []byte("broke: the: thing: [again]"), 0o664)
- defer os.Remove("chickers.yaml")
- var root yaml.Node
- index := NewSpecIndexWithConfig(&root, CreateOpenAPIIndexConfig())
- _, _, err := index.lookupFileReference("chickers.yaml#no-rice")
- assert.Error(t, err)
-}
-
func TestSpecIndex_lookupFileReference_MultiRes(t *testing.T) {
- _ = ioutil.WriteFile("embie.yaml", []byte("naughty:\n - puppy: dog\n - puppy: naughty\npuppy:\n - naughty: puppy"), 0o664)
+
+ embie := []byte("naughty:\n - puppy: dog\n - puppy: naughty\npuppy:\n - naughty: puppy")
+
+ _ = os.WriteFile("embie.yaml", embie, 0o664)
defer os.Remove("embie.yaml")
- index := NewSpecIndexWithConfig(nil, CreateOpenAPIIndexConfig())
- index.seenRemoteSources = make(map[string]*yaml.Node)
- k, doc, err := index.lookupFileReference("embie.yaml#/.naughty")
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "."
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ var myPuppy yaml.Node
+ _ = yaml.Unmarshal(embie, &myPuppy)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&myPuppy)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"embie.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
assert.NoError(t, err)
- assert.NotNil(t, doc)
- assert.Nil(t, k)
+
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, rErr)
+
+ embieRoloFile, fErr := rolo.Open("embie.yaml")
+
+ assert.NoError(t, fErr)
+ assert.NotNil(t, embieRoloFile)
+
+ index := rolo.GetRootIndex()
+ //index.seenRemoteSources = make(map[string]*yaml.Node)
+ absoluteRef, _ := filepath.Abs("embie.yaml#/naughty")
+ fRef, _ := index.SearchIndexForReference(absoluteRef)
+ assert.NotNil(t, fRef)
+
}
func TestSpecIndex_lookupFileReference(t *testing.T) {
- _ = ioutil.WriteFile("fox.yaml", []byte("good:\n - puppy: dog\n - puppy: forever-more"), 0o664)
+
+ pup := []byte("good:\n - puppy: dog\n - puppy: forever-more")
+
+ var myPuppy yaml.Node
+ _ = yaml.Unmarshal(pup, &myPuppy)
+
+ _ = os.WriteFile("fox.yaml", pup, 0o664)
defer os.Remove("fox.yaml")
- index := NewSpecIndexWithConfig(nil, CreateOpenAPIIndexConfig())
- index.seenRemoteSources = make(map[string]*yaml.Node)
- k, doc, err := index.lookupFileReference("fox.yaml#/good")
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "."
+
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&myPuppy)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"fox.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
assert.NoError(t, err)
- assert.NotNil(t, doc)
- assert.NotNil(t, k)
+
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+ rErr := rolo.IndexTheRolodex()
+
+ assert.NoError(t, rErr)
+
+ fox, fErr := rolo.Open("fox.yaml")
+ assert.NoError(t, fErr)
+ assert.Equal(t, "fox.yaml", fox.Name())
+ assert.Equal(t, "good:\n - puppy: dog\n - puppy: forever-more", string(fox.GetContent()))
+
}
func TestSpecIndex_parameterReferencesHavePaths(t *testing.T) {
- _ = ioutil.WriteFile("paramour.yaml", []byte(`components:
+
+ _ = os.WriteFile("paramour.yaml", []byte(`components:
parameters:
param3:
name: param3
@@ -807,6 +1152,13 @@ func TestSpecIndex_parameterReferencesHavePaths(t *testing.T) {
type: string`), 0o664)
defer os.Remove("paramour.yaml")
+ // create a new config that allows local and remote to be mixed up.
+ cf := CreateOpenAPIIndexConfig()
+ cf.AvoidBuildIndex = true
+ cf.AllowRemoteLookup = true
+ cf.AvoidCircularReferenceCheck = true
+ cf.BasePath = "."
+
yml := `paths:
/:
parameters:
@@ -837,7 +1189,32 @@ components:
var rootNode yaml.Node
_ = yaml.Unmarshal([]byte(yml), &rootNode)
- index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
+ // create a new rolodex
+ rolo := NewRolodex(cf)
+
+ // set the rolodex root node to the root node of the spec.
+ rolo.SetRootNode(&rootNode)
+
+ // configure the local filesystem.
+ fsCfg := LocalFSConfig{
+ BaseDirectory: cf.BasePath,
+ FileFilters: []string{"paramour.yaml"},
+ DirFS: os.DirFS(cf.BasePath),
+ }
+
+ // create a new local filesystem.
+ fileFS, err := NewLocalFSWithConfig(&fsCfg)
+ assert.NoError(t, err)
+
+ // add file system
+ rolo.AddLocalFS(cf.BasePath, fileFS)
+
+ // index the rolodex.
+ indexedErr := rolo.IndexTheRolodex()
+ assert.NoError(t, indexedErr)
+ rolo.BuildIndexes()
+
+ index := rolo.GetRootIndex()
params := index.GetAllParametersFromOperations()
diff --git a/index/utility_methods.go b/index/utility_methods.go
index 8803872..0da56d5 100644
--- a/index/utility_methods.go
+++ b/index/utility_methods.go
@@ -6,6 +6,7 @@ package index
import (
"fmt"
"net/url"
+ "path/filepath"
"strings"
"sync"
@@ -22,20 +23,23 @@ func (index *SpecIndex) extractDefinitionsAndSchemas(schemasNode *yaml.Node, pat
}
def := fmt.Sprintf("%s%s", pathPrefix, name)
+ fullDef := fmt.Sprintf("%s%s", index.specAbsolutePath, def)
+
ref := &Reference{
+ FullDefinition: fullDef,
Definition: def,
Name: name,
Node: schema,
Path: fmt.Sprintf("$.components.schemas.%s", name),
ParentNode: schemasNode,
- RequiredRefProperties: index.extractDefinitionRequiredRefProperties(schemasNode, map[string][]string{}),
+ RequiredRefProperties: extractDefinitionRequiredRefProperties(schemasNode, map[string][]string{}, fullDef, index),
}
index.allComponentSchemaDefinitions[def] = ref
}
}
// extractDefinitionRequiredRefProperties goes through the direct properties of a schema and extracts the map of required definitions from within it
-func (index *SpecIndex) extractDefinitionRequiredRefProperties(schemaNode *yaml.Node, reqRefProps map[string][]string) map[string][]string {
+func extractDefinitionRequiredRefProperties(schemaNode *yaml.Node, reqRefProps map[string][]string, fulldef string, idx *SpecIndex) map[string][]string {
if schemaNode == nil {
return reqRefProps
}
@@ -70,7 +74,7 @@ func (index *SpecIndex) extractDefinitionRequiredRefProperties(schemaNode *yaml.
// Check to see if the current property is directly embedded within the current schema, and handle its properties if so
_, paramPropertiesMapNode := utils.FindKeyNodeTop("properties", param.Content)
if paramPropertiesMapNode != nil {
- reqRefProps = index.extractDefinitionRequiredRefProperties(param, reqRefProps)
+ reqRefProps = extractDefinitionRequiredRefProperties(param, reqRefProps, fulldef, idx)
}
// Check to see if the current property is polymorphic, and dive into that model if so
@@ -78,7 +82,7 @@ func (index *SpecIndex) extractDefinitionRequiredRefProperties(schemaNode *yaml.
_, ofNode := utils.FindKeyNodeTop(key, param.Content)
if ofNode != nil {
for _, ofNodeItem := range ofNode.Content {
- reqRefProps = index.extractRequiredReferenceProperties(ofNodeItem, name, reqRefProps)
+ reqRefProps = extractRequiredReferenceProperties(fulldef, idx, ofNodeItem, name, reqRefProps)
}
}
}
@@ -91,19 +95,19 @@ func (index *SpecIndex) extractDefinitionRequiredRefProperties(schemaNode *yaml.
continue
}
- reqRefProps = index.extractRequiredReferenceProperties(requiredPropDefNode, requiredPropertyNode.Value, reqRefProps)
+ reqRefProps = extractRequiredReferenceProperties(fulldef, idx, requiredPropDefNode, requiredPropertyNode.Value, reqRefProps)
}
return reqRefProps
}
// extractRequiredReferenceProperties returns a map of definition names to the property or properties which reference it within a node
-func (index *SpecIndex) extractRequiredReferenceProperties(requiredPropDefNode *yaml.Node, propName string, reqRefProps map[string][]string) map[string][]string {
- isRef, _, defPath := utils.IsNodeRefValue(requiredPropDefNode)
+func extractRequiredReferenceProperties(fulldef string, idx *SpecIndex, requiredPropDefNode *yaml.Node, propName string, reqRefProps map[string][]string) map[string][]string {
+ isRef, _, refName := utils.IsNodeRefValue(requiredPropDefNode)
if !isRef {
_, defItems := utils.FindKeyNodeTop("items", requiredPropDefNode.Content)
if defItems != nil {
- isRef, _, defPath = utils.IsNodeRefValue(defItems)
+ isRef, _, refName = utils.IsNodeRefValue(defItems)
}
}
@@ -111,6 +115,71 @@ func (index *SpecIndex) extractRequiredReferenceProperties(requiredPropDefNode *
return reqRefProps
}
+ defPath := fulldef
+
+ if strings.HasPrefix(refName, "http") || filepath.IsAbs(refName) {
+ defPath = refName
+ } else {
+ exp := strings.Split(fulldef, "#/")
+ if len(exp) == 2 {
+ if exp[0] != "" {
+ if strings.HasPrefix(exp[0], "http") {
+ u, _ := url.Parse(exp[0])
+ r := strings.Split(refName, "#/")
+ if len(r) == 2 {
+ var abs string
+ if r[0] == "" {
+ abs = u.Path
+ } else {
+ abs, _ = filepath.Abs(filepath.Join(filepath.Dir(u.Path), r[0]))
+ }
+
+ u.Path = abs
+ u.Fragment = ""
+ defPath = fmt.Sprintf("%s#/%s", u.String(), r[1])
+ } else {
+ u.Path = filepath.Join(filepath.Dir(u.Path), r[0])
+ u.Fragment = ""
+ defPath = u.String()
+ }
+ } else {
+ r := strings.Split(refName, "#/")
+ if len(r) == 2 {
+ var abs string
+ if r[0] == "" {
+ abs, _ = filepath.Abs(exp[0])
+ } else {
+ abs, _ = filepath.Abs(filepath.Join(filepath.Dir(exp[0]), r[0]))
+ }
+
+ defPath = fmt.Sprintf("%s#/%s", abs, r[1])
+ } else {
+ defPath, _ = filepath.Abs(filepath.Join(filepath.Dir(exp[0]), r[0]))
+ }
+ }
+ } else {
+ defPath = refName
+ }
+ } else {
+ if strings.HasPrefix(exp[0], "http") {
+ u, _ := url.Parse(exp[0])
+ r := strings.Split(refName, "#/")
+ if len(r) == 2 {
+ abs, _ := filepath.Abs(filepath.Join(filepath.Dir(u.Path), r[0]))
+ u.Path = abs
+ u.Fragment = ""
+ defPath = fmt.Sprintf("%s#/%s", u.String(), r[1])
+ } else {
+ u.Path = filepath.Join(filepath.Dir(u.Path), r[0])
+ u.Fragment = ""
+ defPath = u.String()
+ }
+ } else {
+ defPath, _ = filepath.Abs(filepath.Join(filepath.Dir(exp[0]), refName))
+ }
+ }
+ }
+
if _, ok := reqRefProps[defPath]; !ok {
reqRefProps[defPath] = []string{}
}
@@ -239,19 +308,24 @@ func (index *SpecIndex) extractComponentExamples(examplesNode *yaml.Node, pathPr
}
func (index *SpecIndex) extractComponentSecuritySchemes(securitySchemesNode *yaml.Node, pathPrefix string) {
+
var name string
- for i, secScheme := range securitySchemesNode.Content {
+ for i, schema := range securitySchemesNode.Content {
if i%2 == 0 {
- name = secScheme.Value
+ name = schema.Value
continue
}
def := fmt.Sprintf("%s%s", pathPrefix, name)
+ fullDef := fmt.Sprintf("%s%s", index.specAbsolutePath, def)
+
ref := &Reference{
- Definition: def,
- Name: name,
- Node: secScheme,
- ParentNode: securitySchemesNode,
- Path: fmt.Sprintf("$.components.securitySchemes.%s", name),
+ FullDefinition: fullDef,
+ Definition: def,
+ Name: name,
+ Node: schema,
+ Path: fmt.Sprintf("$.components.securitySchemes.%s", name),
+ ParentNode: securitySchemesNode,
+ RequiredRefProperties: extractDefinitionRequiredRefProperties(securitySchemesNode, map[string][]string{}, fullDef, index),
}
index.allSecuritySchemes[def] = ref
}
@@ -271,6 +345,16 @@ func (index *SpecIndex) countUniqueInlineDuplicates() int {
return unique
}
+func seekRefEnd(index *SpecIndex, refName string) *Reference {
+ ref, _ := index.SearchIndexForReference(refName)
+ if ref != nil {
+ if ok, _, v := utils.IsNodeRefValue(ref.Node); ok {
+ return seekRefEnd(ref.Index, v)
+ }
+ }
+ return ref
+}
+
func (index *SpecIndex) scanOperationParams(params []*yaml.Node, pathItemNode *yaml.Node, method string) {
for i, param := range params {
// param is ref
@@ -278,6 +362,16 @@ func (index *SpecIndex) scanOperationParams(params []*yaml.Node, pathItemNode *y
paramRefName := param.Content[1].Value
paramRef := index.allMappedRefs[paramRefName]
+ if paramRef == nil {
+ // could be in the rolodex
+ ref := seekRefEnd(index, paramRefName)
+ if ref != nil {
+ paramRef = ref
+ if strings.Contains(paramRefName, "%") {
+ paramRefName, _ = url.QueryUnescape(paramRefName)
+ }
+ }
+ }
if index.paramOpRefs[pathItemNode.Value] == nil {
index.paramOpRefs[pathItemNode.Value] = make(map[string]map[string][]*Reference)
@@ -441,9 +535,5 @@ func GenerateCleanSpecConfigBaseURL(baseURL *url.URL, dir string, includeFile bo
}
}
- if strings.HasSuffix(p, "/") {
- p = p[:len(p)-1]
- }
- return p
-
+ return strings.TrimSuffix(p, "/")
}
diff --git a/index/utility_methods_test.go b/index/utility_methods_test.go
index aec4506..86271fb 100644
--- a/index/utility_methods_test.go
+++ b/index/utility_methods_test.go
@@ -5,6 +5,7 @@ package index
import (
"github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
"net/url"
"testing"
)
@@ -48,8 +49,213 @@ func TestGenerateCleanSpecConfigBaseURL_HttpStrip(t *testing.T) {
GenerateCleanSpecConfigBaseURL(u, "crap.yaml#thing", true))
}
-func TestSpecIndex_extractDefinitionRequiredRefProperties(t *testing.T) {
- c := CreateOpenAPIIndexConfig()
- idx := NewSpecIndexWithConfig(nil, c)
- assert.Nil(t, idx.extractDefinitionRequiredRefProperties(nil, nil))
+func Test_extractRequiredReferenceProperties(t *testing.T) {
+
+ d := `$ref: http://internets/shoes`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("the-big.yaml#/cheese/thing", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_singleFile(t *testing.T) {
+
+ d := `$ref: http://cake.yaml/camel.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("dingo-bingo-bango.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_http(t *testing.T) {
+
+ d := `$ref: http://cake.yaml/camel.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("http://dingo-bingo-bango.yaml/camel.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_abs(t *testing.T) {
+
+ d := `$ref: http://cake.yaml/camel.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("/camel.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_abs3(t *testing.T) {
+
+ d := `$ref: oh/pillow.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("/big/fat/camel.yaml#/milk", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["/big/fat/oh/pillow.yaml"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_rel_full(t *testing.T) {
+
+ d := `$ref: "#/a/nice/picture/of/cake"`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("/chalky/milky/camel.yaml#/milk", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["/chalky/milky/camel.yaml#/a/nice/picture/of/cake"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_rel(t *testing.T) {
+
+ d := `$ref: oh/camel.yaml#/rum/cake`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("/camel.yaml#/milk", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["/oh/camel.yaml#/rum/cake"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_abs2(t *testing.T) {
+
+ d := `$ref: /oh/my/camel.yaml#/rum/cake`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("../flannel.yaml#/milk", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["/oh/my/camel.yaml#/rum/cake"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_http_rel(t *testing.T) {
+
+ d := `$ref: my/wet/camel.yaml#/rum/cake`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("http://beer-world.com/lost/in/space.yaml#/vase", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["http://beer-world.com/lost/in/my/wet/camel.yaml#/rum/cake"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_http_rel_nocomponent(t *testing.T) {
+
+ d := `$ref: my/wet/camel.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("http://beer-world.com/lost/in/space.yaml#/vase", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["http://beer-world.com/lost/in/my/wet/camel.yaml"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_nocomponent(t *testing.T) {
+
+ d := `$ref: my/wet/camel.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("#/rotund/cakes", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["my/wet/camel.yaml"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_component_http(t *testing.T) {
+
+ d := `$ref: go-to-bed.com/no/more/cake.yaml#/lovely/jam`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("http://bunny-bun-bun.com/no.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["http://bunny-bun-bun.com/go-to-bed.com/no/more/cake.yaml#/lovely/jam"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractRequiredReferenceProperties_nocomponent_http(t *testing.T) {
+
+ d := `$ref: go-to-bed.com/no/more/cake.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("http://bunny-bun-bun.com/no.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["http://bunny-bun-bun.com/go-to-bed.com/no/more/cake.yaml"][0])
+ assert.NotNil(t, data)
+
+}
+
+func Test_extractRequiredReferenceProperties_nocomponent_http2(t *testing.T) {
+
+ d := `$ref: go-to-bed.com/no/more/cake.yaml`
+
+ var rootNode yaml.Node
+ _ = yaml.Unmarshal([]byte(d), &rootNode)
+ props := make(map[string][]string)
+
+ data := extractRequiredReferenceProperties("/why.yaml", nil,
+ rootNode.Content[0], "cakes", props)
+ assert.Len(t, props, 1)
+ assert.Equal(t, "cakes", props["/go-to-bed.com/no/more/cake.yaml"][0])
+ assert.NotNil(t, data)
+}
+
+func Test_extractDefinitionRequiredRefProperties_nil(t *testing.T) {
+ assert.Nil(t, extractDefinitionRequiredRefProperties(nil, nil, "", nil))
}
diff --git a/renderer/mock_generator_test.go b/renderer/mock_generator_test.go
index 2308d15..2d79757 100644
--- a/renderer/mock_generator_test.go
+++ b/renderer/mock_generator_test.go
@@ -4,6 +4,7 @@
package renderer
import (
+ "context"
"encoding/json"
"strings"
"testing"
@@ -45,7 +46,7 @@ func createFakeMock(mock string, values map[string]any, example any) *fakeMockab
var root yaml.Node
_ = yaml.Unmarshal([]byte(mock), &root)
var lowProxy lowbase.SchemaProxy
- _ = lowProxy.Build(&root, root.Content[0], nil)
+ _ = lowProxy.Build(context.Background(), &root, root.Content[0], nil)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
Value: &lowProxy,
}
@@ -68,7 +69,7 @@ func createFakeMockWithoutProxy(mock string, values map[string]any, example any)
var root yaml.Node
_ = yaml.Unmarshal([]byte(mock), &root)
var lowProxy lowbase.SchemaProxy
- _ = lowProxy.Build(&root, root.Content[0], nil)
+ _ = lowProxy.Build(context.Background(), &root, root.Content[0], nil)
lowRef := low.NodeReference[*lowbase.SchemaProxy]{
Value: &lowProxy,
}
diff --git a/renderer/schema_renderer.go b/renderer/schema_renderer.go
index 39135bb..1747dfe 100644
--- a/renderer/schema_renderer.go
+++ b/renderer/schema_renderer.go
@@ -58,7 +58,8 @@ func init() {
// SchemaRenderer is a renderer that will generate random words, numbers and values based on a dictionary file.
// The dictionary is just a slice of strings that is used to generate random words.
type SchemaRenderer struct {
- words []string
+ words []string
+ disableRequired bool
}
// CreateRendererUsingDictionary will create a new SchemaRenderer using a custom dictionary file.
@@ -86,6 +87,13 @@ func (wr *SchemaRenderer) RenderSchema(schema *base.Schema) any {
return structure[rootType].(any)
}
+// DisableRequiredCheck will disable the required check when rendering a schema. This means that all properties
+// will be rendered, not just the required ones.
+// https://github.com/pb33f/libopenapi/issues/200
+func (wr *SchemaRenderer) DisableRequiredCheck() {
+ wr.disableRequired = true
+}
+
// DiveIntoSchema will dive into a schema and inject values from examples into a map. If there are no examples in
// the schema, then the renderer will attempt to generate a value based on the schema type, format and pattern.
func (wr *SchemaRenderer) DiveIntoSchema(schema *base.Schema, key string, structure map[string]any, depth int) {
@@ -220,7 +228,7 @@ func (wr *SchemaRenderer) DiveIntoSchema(schema *base.Schema, key string, struct
// check if this schema has required properties, if so, then only render required props, if not
// render everything in the schema.
checkProps := orderedmap.New[string, *base.SchemaProxy]()
- if len(schema.Required) > 0 {
+ if !wr.disableRequired && len(schema.Required) > 0 {
for _, requiredProp := range schema.Required {
checkProps.Set(requiredProp, properties.GetOrZero(requiredProp))
}
@@ -267,29 +275,23 @@ func (wr *SchemaRenderer) DiveIntoSchema(schema *base.Schema, key string, struct
// handle oneOf
oneOf := schema.OneOf
- if oneOf != nil {
+ if len(oneOf) > 0 {
oneOfMap := make(map[string]any)
- for _, oneOfSchema := range oneOf {
- oneOfCompiled := oneOfSchema.Schema()
- wr.DiveIntoSchema(oneOfCompiled, oneOfType, oneOfMap, depth+1)
- for k, v := range oneOfMap[oneOfType].(map[string]any) {
- propertyMap[k] = v
- }
- break // one run once for the first result.
+ oneOfCompiled := oneOf[0].Schema()
+ wr.DiveIntoSchema(oneOfCompiled, oneOfType, oneOfMap, depth+1)
+ for k, v := range oneOfMap[oneOfType].(map[string]any) {
+ propertyMap[k] = v
}
}
// handle anyOf
anyOf := schema.AnyOf
- if anyOf != nil {
+ if len(anyOf) > 0 {
anyOfMap := make(map[string]any)
- for _, anyOfSchema := range anyOf {
- anyOfCompiled := anyOfSchema.Schema()
- wr.DiveIntoSchema(anyOfCompiled, anyOfType, anyOfMap, depth+1)
- for k, v := range anyOfMap[anyOfType].(map[string]any) {
- propertyMap[k] = v
- }
- break // one run once for the first result only, same as oneOf
+ anyOfCompiled := anyOf[0].Schema()
+ wr.DiveIntoSchema(anyOfCompiled, anyOfType, anyOfMap, depth+1)
+ for k, v := range anyOfMap[anyOfType].(map[string]any) {
+ propertyMap[k] = v
}
}
structure[key] = propertyMap
diff --git a/renderer/schema_renderer_test.go b/renderer/schema_renderer_test.go
index b9acc01..da37cbc 100644
--- a/renderer/schema_renderer_test.go
+++ b/renderer/schema_renderer_test.go
@@ -4,6 +4,7 @@
package renderer
import (
+ "context"
"encoding/base64"
"encoding/json"
"errors"
@@ -57,7 +58,7 @@ func getSchema(schema []byte) *highbase.Schema {
panic(e)
}
sp := new(lowbase.SchemaProxy)
- _ = sp.Build(nil, compNode.Content[0], nil)
+ _ = sp.Build(context.Background(), nil, compNode.Content[0], nil)
lp := low.NodeReference[*lowbase.SchemaProxy]{
Value: sp,
ValueNode: compNode.Content[0],
@@ -960,6 +961,32 @@ properties:
assert.Nil(t, journeyMap["pb33f"].(map[string]interface{})["fries"])
}
+func TestRenderExample_Test_RequiredCheckDisabled(t *testing.T) {
+ testObject := `type: [object]
+required:
+ - drink
+properties:
+ burger:
+ type: string
+ fries:
+ type: string
+ drink:
+ type: string`
+
+ compiled := getSchema([]byte(testObject))
+
+ journeyMap := make(map[string]any)
+ wr := createSchemaRenderer()
+ wr.DisableRequiredCheck()
+ wr.DiveIntoSchema(compiled, "pb33f", journeyMap, 0)
+
+ assert.NotNil(t, journeyMap["pb33f"])
+ drink := journeyMap["pb33f"].(map[string]interface{})["drink"].(string)
+ assert.NotNil(t, drink)
+ assert.NotNil(t, journeyMap["pb33f"].(map[string]interface{})["burger"])
+ assert.NotNil(t, journeyMap["pb33f"].(map[string]interface{})["fries"])
+}
+
func TestRenderSchema_WithExample(t *testing.T) {
testObject := `type: [object]
properties:
@@ -1132,7 +1159,7 @@ properties:
buildSchema := func() *highbase.SchemaProxy {
sp := new(lowbase.SchemaProxy)
- _ = sp.Build(nil, compNode.Content[0], nil)
+ _ = sp.Build(context.Background(), nil, compNode.Content[0], nil)
lp := low.NodeReference[*lowbase.SchemaProxy]{
Value: sp,
ValueNode: compNode.Content[0],
diff --git a/resolver/resolver.go b/resolver/resolver.go
deleted file mode 100644
index ad191e7..0000000
--- a/resolver/resolver.go
+++ /dev/null
@@ -1,514 +0,0 @@
-// Copyright 2022 Dave Shanley / Quobix
-// SPDX-License-Identifier: MIT
-
-package resolver
-
-import (
- "fmt"
-
- "github.com/pb33f/libopenapi/index"
- "github.com/pb33f/libopenapi/utils"
- "gopkg.in/yaml.v3"
-)
-
-// ResolvingError represents an issue the resolver had trying to stitch the tree together.
-type ResolvingError struct {
- // ErrorRef is the error thrown by the resolver
- ErrorRef error
-
- // Node is the *yaml.Node reference that contains the resolving error
- Node *yaml.Node
-
- // Path is the shortened journey taken by the resolver
- Path string
-
- // CircularReference is set if the error is a reference to the circular reference.
- CircularReference *index.CircularReferenceResult
-}
-
-func (r *ResolvingError) Error() string {
- return fmt.Sprintf("%s: %s [%d:%d]", r.ErrorRef.Error(),
- r.Path, r.Node.Line, r.Node.Column)
-}
-
-// Resolver will use a *index.SpecIndex to stitch together a resolved root tree using all the discovered
-// references in the doc.
-type Resolver struct {
- specIndex *index.SpecIndex
- resolvedRoot *yaml.Node
- resolvingErrors []*ResolvingError
- circularReferences []*index.CircularReferenceResult
- referencesVisited int
- indexesVisited int
- journeysTaken int
- relativesSeen int
- ignorePoly bool
- ignoreArray bool
-}
-
-// NewResolver will create a new resolver from a *index.SpecIndex
-func NewResolver(index *index.SpecIndex) *Resolver {
- if index == nil {
- return nil
- }
- return &Resolver{
- specIndex: index,
- resolvedRoot: index.GetRootNode(),
- }
-}
-
-// GetResolvingErrors returns all errors found during resolving
-func (resolver *Resolver) GetResolvingErrors() []*ResolvingError {
- return resolver.resolvingErrors
-}
-
-// GetCircularErrors returns all circular reference errors found.
-func (resolver *Resolver) GetCircularErrors() []*index.CircularReferenceResult {
- return resolver.circularReferences
-}
-
-// GetPolymorphicCircularErrors returns all circular errors that stem from polymorphism
-func (resolver *Resolver) GetPolymorphicCircularErrors() []*index.CircularReferenceResult {
- var res []*index.CircularReferenceResult
- for i := range resolver.circularReferences {
- if !resolver.circularReferences[i].IsInfiniteLoop {
- continue
- }
- if !resolver.circularReferences[i].IsPolymorphicResult {
- continue
- }
- res = append(res, resolver.circularReferences[i])
- }
- return res
-}
-
-// GetNonPolymorphicCircularErrors returns all circular errors that DO NOT stem from polymorphism
-func (resolver *Resolver) GetNonPolymorphicCircularErrors() []*index.CircularReferenceResult {
- var res []*index.CircularReferenceResult
- for i := range resolver.circularReferences {
- if !resolver.circularReferences[i].IsInfiniteLoop {
- continue
- }
-
- if !resolver.circularReferences[i].IsPolymorphicResult {
- res = append(res, resolver.circularReferences[i])
- }
- }
- return res
-}
-
-// IgnorePolymorphicCircularReferences will ignore any circular references that are polymorphic (oneOf, anyOf, allOf)
-// This must be set before any resolving is done.
-func (resolver *Resolver) IgnorePolymorphicCircularReferences() {
- resolver.ignorePoly = true
-}
-
-// IgnoreArrayCircularReferences will ignore any circular references that stem from arrays. This must be set before
-// any resolving is done.
-func (resolver *Resolver) IgnoreArrayCircularReferences() {
- resolver.ignoreArray = true
-}
-
-// GetJourneysTaken returns the number of journeys taken by the resolver
-func (resolver *Resolver) GetJourneysTaken() int {
- return resolver.journeysTaken
-}
-
-// GetReferenceVisited returns the number of references visited by the resolver
-func (resolver *Resolver) GetReferenceVisited() int {
- return resolver.referencesVisited
-}
-
-// GetIndexesVisited returns the number of indexes visited by the resolver
-func (resolver *Resolver) GetIndexesVisited() int {
- return resolver.indexesVisited
-}
-
-// GetRelativesSeen returns the number of siblings (nodes at the same level) seen for each reference found.
-func (resolver *Resolver) GetRelativesSeen() int {
- return resolver.relativesSeen
-}
-
-// Resolve will resolve the specification, everything that is not polymorphic and not circular, will be resolved.
-// this data can get big, it results in a massive duplication of data. This is a destructive method and will permanently
-// re-organize the node tree. Make sure you have copied your original tree before running this (if you want to preserve
-// original data)
-func (resolver *Resolver) Resolve() []*ResolvingError {
-
- visitIndex(resolver, resolver.specIndex)
-
- for _, circRef := range resolver.circularReferences {
- // If the circular reference is not required, we can ignore it, as it's a terminable loop rather than an infinite one
- if !circRef.IsInfiniteLoop {
- continue
- }
-
- resolver.resolvingErrors = append(resolver.resolvingErrors, &ResolvingError{
- ErrorRef: fmt.Errorf("Infinite circular reference detected: %s", circRef.Start.Name),
- Node: circRef.LoopPoint.Node,
- Path: circRef.GenerateJourneyPath(),
- })
- }
-
- return resolver.resolvingErrors
-}
-
-// CheckForCircularReferences Check for circular references, without resolving, a non-destructive run.
-func (resolver *Resolver) CheckForCircularReferences() []*ResolvingError {
- visitIndexWithoutDamagingIt(resolver, resolver.specIndex)
- for _, circRef := range resolver.circularReferences {
- // If the circular reference is not required, we can ignore it, as it's a terminable loop rather than an infinite one
- if !circRef.IsInfiniteLoop {
- continue
- }
-
- resolver.resolvingErrors = append(resolver.resolvingErrors, &ResolvingError{
- ErrorRef: fmt.Errorf("Infinite circular reference detected: %s", circRef.Start.Name),
- Node: circRef.LoopPoint.Node,
- Path: circRef.GenerateJourneyPath(),
- CircularReference: circRef,
- })
- }
- // update our index with any circular refs we found.
- resolver.specIndex.SetCircularReferences(resolver.circularReferences)
- return resolver.resolvingErrors
-}
-
-func visitIndexWithoutDamagingIt(res *Resolver, idx *index.SpecIndex) {
- mapped := idx.GetMappedReferencesSequenced()
- mappedIndex := idx.GetMappedReferences()
- res.indexesVisited++
- for _, ref := range mapped {
- seenReferences := make(map[string]bool)
- var journey []*index.Reference
- res.journeysTaken++
- res.VisitReference(ref.Reference, seenReferences, journey, false)
- }
- schemas := idx.GetAllComponentSchemas()
- for s, schemaRef := range schemas {
- if mappedIndex[s] == nil {
- seenReferences := make(map[string]bool)
- var journey []*index.Reference
- res.journeysTaken++
- res.VisitReference(schemaRef, seenReferences, journey, false)
- }
- }
- for _, c := range idx.GetChildren() {
- visitIndexWithoutDamagingIt(res, c)
- }
-}
-
-func visitIndex(res *Resolver, idx *index.SpecIndex) {
- mapped := idx.GetMappedReferencesSequenced()
- mappedIndex := idx.GetMappedReferences()
- res.indexesVisited++
-
- for _, ref := range mapped {
- seenReferences := make(map[string]bool)
- var journey []*index.Reference
- res.journeysTaken++
- if ref != nil && ref.Reference != nil {
- ref.Reference.Node.Content = res.VisitReference(ref.Reference, seenReferences, journey, true)
- }
- }
-
- schemas := idx.GetAllComponentSchemas()
- for s, schemaRef := range schemas {
- if mappedIndex[s] == nil {
- seenReferences := make(map[string]bool)
- var journey []*index.Reference
- res.journeysTaken++
- schemaRef.Node.Content = res.VisitReference(schemaRef, seenReferences, journey, true)
- }
- }
-
- // map everything
- for _, sequenced := range idx.GetAllSequencedReferences() {
- locatedDef := mappedIndex[sequenced.Definition]
- if locatedDef != nil {
- if !locatedDef.Circular && locatedDef.Seen {
- sequenced.Node.Content = locatedDef.Node.Content
- }
- }
- }
- for _, c := range idx.GetChildren() {
- visitIndex(res, c)
- }
-}
-
-// VisitReference will visit a reference as part of a journey and will return resolved nodes.
-func (resolver *Resolver) VisitReference(ref *index.Reference, seen map[string]bool, journey []*index.Reference, resolve bool) []*yaml.Node {
- resolver.referencesVisited++
- if ref.Resolved || ref.Seen {
- return ref.Node.Content
- }
-
- journey = append(journey, ref)
- relatives := resolver.extractRelatives(ref.Node, nil, seen, journey, resolve)
-
- seen = make(map[string]bool)
-
- seen[ref.Definition] = true
- for _, r := range relatives {
- // check if we have seen this on the journey before, if so! it's circular
- skip := false
- for i, j := range journey {
- if j.Definition == r.Definition {
-
- var foundDup *index.Reference
- foundRefs := resolver.specIndex.SearchIndexForReference(r.Definition)
- if len(foundRefs) > 0 {
- foundDup = foundRefs[0]
- }
-
- var circRef *index.CircularReferenceResult
- if !foundDup.Circular {
- loop := append(journey, foundDup)
-
- visitedDefinitions := map[string]bool{}
- isInfiniteLoop, _ := resolver.isInfiniteCircularDependency(foundDup, visitedDefinitions, nil)
-
- isArray := false
- if r.ParentNodeSchemaType == "array" {
- isArray = true
- }
- circRef = &index.CircularReferenceResult{
- Journey: loop,
- Start: foundDup,
- LoopIndex: i,
- LoopPoint: foundDup,
- IsArrayResult: isArray,
- IsInfiniteLoop: isInfiniteLoop,
- }
- resolver.circularReferences = append(resolver.circularReferences, circRef)
-
- foundDup.Seen = true
- foundDup.Circular = true
- }
- skip = true
- }
- }
-
- if !skip {
- var original *index.Reference
- foundRefs := resolver.specIndex.SearchIndexForReference(r.Definition)
- if len(foundRefs) > 0 {
- original = foundRefs[0]
- }
- resolved := resolver.VisitReference(original, seen, journey, resolve)
- if resolve {
- r.Node.Content = resolved // this is where we perform the actual resolving.
- }
- r.Seen = true
- ref.Seen = true
- }
- }
- ref.Resolved = true
- ref.Seen = true
-
- return ref.Node.Content
-}
-
-func (resolver *Resolver) isInfiniteCircularDependency(ref *index.Reference, visitedDefinitions map[string]bool, initialRef *index.Reference) (bool, map[string]bool) {
- if ref == nil {
- return false, visitedDefinitions
- }
-
- for refDefinition := range ref.RequiredRefProperties {
- r := resolver.specIndex.GetMappedReferences()[refDefinition]
- if initialRef != nil && initialRef.Definition == r.Definition {
- return true, visitedDefinitions
- }
-
- if visitedDefinitions[r.Definition] {
- continue
- }
- visitedDefinitions[r.Definition] = true
-
- ir := initialRef
- if ir == nil {
- ir = ref
- }
-
- var isChildICD bool
- isChildICD, visitedDefinitions = resolver.isInfiniteCircularDependency(r, visitedDefinitions, ir)
- if isChildICD {
- return true, visitedDefinitions
- }
- }
-
- return false, visitedDefinitions
-}
-
-func (resolver *Resolver) extractRelatives(node, parent *yaml.Node,
- foundRelatives map[string]bool,
- journey []*index.Reference, resolve bool) []*index.Reference {
-
- if len(journey) > 100 {
- return nil
- }
-
- var found []*index.Reference
- if len(node.Content) > 0 {
- for i, n := range node.Content {
- if utils.IsNodeMap(n) || utils.IsNodeArray(n) {
-
- var anyvn, allvn, onevn, arrayTypevn *yaml.Node
-
- // extract polymorphic references
- if len(n.Content) > 1 {
- _, anyvn = utils.FindKeyNodeTop("anyOf", n.Content)
- _, allvn = utils.FindKeyNodeTop("allOf", n.Content)
- _, onevn = utils.FindKeyNodeTop("oneOf", n.Content)
- _, arrayTypevn = utils.FindKeyNodeTop("type", n.Content)
- }
- if anyvn != nil || allvn != nil || onevn != nil {
- if resolver.ignorePoly {
- continue
- }
- }
- if arrayTypevn != nil {
- if arrayTypevn.Value == "array" {
- if resolver.ignoreArray {
- continue
- }
- }
- }
-
- found = append(found, resolver.extractRelatives(n, node, foundRelatives, journey, resolve)...)
- }
-
- if i%2 == 0 && n.Value == "$ref" {
-
- if !utils.IsNodeStringValue(node.Content[i+1]) {
- continue
- }
-
- value := node.Content[i+1].Value
-
- ref := resolver.specIndex.SearchIndexForReference(value)
-
- if ref == nil {
- _, path := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
- err := &ResolvingError{
- ErrorRef: fmt.Errorf("cannot resolve reference `%s`, it's missing", value),
- Node: n,
- Path: path,
- }
- resolver.resolvingErrors = append(resolver.resolvingErrors, err)
- continue
- }
-
- schemaType := ""
- if parent != nil {
- _, arrayTypevn := utils.FindKeyNodeTop("type", parent.Content)
- if arrayTypevn != nil {
- if arrayTypevn.Value == "array" {
- schemaType = "array"
- }
- }
- }
-
- r := &index.Reference{
- Definition: value,
- Name: value,
- Node: node,
- ParentNode: parent,
- ParentNodeSchemaType: schemaType,
- }
-
- found = append(found, r)
-
- foundRelatives[value] = true
- }
-
- if i%2 == 0 && n.Value != "$ref" && n.Value != "" {
-
- if n.Value == "allOf" ||
- n.Value == "oneOf" ||
- n.Value == "anyOf" {
-
- // if this is a polymorphic link, we want to follow it and see if it becomes circular
- if utils.IsNodeMap(node.Content[i+1]) { // check for nested items
- // check if items is present, to indicate an array
- if _, v := utils.FindKeyNodeTop("items", node.Content[i+1].Content); v != nil {
- if utils.IsNodeMap(v) {
- if d, _, l := utils.IsNodeRefValue(v); d {
- ref := resolver.specIndex.GetMappedReferences()[l]
- if ref != nil && !ref.Circular {
- circ := false
- for f := range journey {
- if journey[f].Definition == ref.Definition {
- circ = true
- break
- }
- }
- if !circ {
- resolver.VisitReference(ref, foundRelatives, journey, resolve)
- } else {
- loop := append(journey, ref)
- circRef := &index.CircularReferenceResult{
- Journey: loop,
- Start: ref,
- LoopIndex: i,
- LoopPoint: ref,
- PolymorphicType: n.Value,
- IsPolymorphicResult: true,
- }
-
- ref.Seen = true
- ref.Circular = true
- resolver.circularReferences = append(resolver.circularReferences, circRef)
- }
- }
- }
- }
- }
- }
- // for array based polymorphic items
- if utils.IsNodeArray(node.Content[i+1]) { // check for nested items
- // check if items is present, to indicate an array
- for q := range node.Content[i+1].Content {
- v := node.Content[i+1].Content[q]
- if utils.IsNodeMap(v) {
- if d, _, l := utils.IsNodeRefValue(v); d {
- ref := resolver.specIndex.GetMappedReferences()[l]
- if ref != nil && !ref.Circular {
- circ := false
- for f := range journey {
- if journey[f].Definition == ref.Definition {
- circ = true
- break
- }
- }
- if !circ {
- resolver.VisitReference(ref, foundRelatives, journey, resolve)
- } else {
- loop := append(journey, ref)
- circRef := &index.CircularReferenceResult{
- Journey: loop,
- Start: ref,
- LoopIndex: i,
- LoopPoint: ref,
- PolymorphicType: n.Value,
- IsPolymorphicResult: true,
- }
-
- ref.Seen = true
- ref.Circular = true
- resolver.circularReferences = append(resolver.circularReferences, circRef)
- }
- }
- }
- }
- }
- }
- break
- }
-
- }
- }
- }
- resolver.relativesSeen += len(found)
- return found
-}
diff --git a/resolver/resolver_test.go b/resolver/resolver_test.go
deleted file mode 100644
index 9fe2d92..0000000
--- a/resolver/resolver_test.go
+++ /dev/null
@@ -1,589 +0,0 @@
-package resolver
-
-import (
- "errors"
- "fmt"
- "net/url"
- "os"
- "testing"
-
- "github.com/pb33f/libopenapi/index"
- "github.com/stretchr/testify/assert"
- "gopkg.in/yaml.v3"
-)
-
-func TestNewResolver(t *testing.T) {
- assert.Nil(t, NewResolver(nil))
-}
-
-func Benchmark_ResolveDocumentStripe(b *testing.B) {
- stripe, _ := os.ReadFile("../test_specs/stripe.yaml")
- for n := 0; n < b.N; n++ {
- var rootNode yaml.Node
- _ = yaml.Unmarshal(stripe, &rootNode)
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
- resolver := NewResolver(idx)
- resolver.Resolve()
- }
-}
-
-func TestResolver_ResolveComponents_CircularSpec(t *testing.T) {
- circular, _ := os.ReadFile("../test_specs/circular-tests.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.Resolve()
- assert.Len(t, circ, 3)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences(t *testing.T) {
- circular, _ := os.ReadFile("../test_specs/circular-tests.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 3)
- assert.Len(t, resolver.GetResolvingErrors(), 3)
- assert.Len(t, resolver.GetCircularErrors(), 3)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_CatchArray(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "array"
- items:
- $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 1)
- assert.Len(t, resolver.GetResolvingErrors(), 1) // infinite loop is a resolving error.
- assert.Len(t, resolver.GetCircularErrors(), 1)
- assert.True(t, resolver.GetCircularErrors()[0].IsArrayResult)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_IgnoreArray(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "array"
- items:
- $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- resolver.IgnoreArrayCircularReferences()
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0)
- assert.Len(t, resolver.GetCircularErrors(), 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_IgnorePoly_Any(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "object"
- anyOf:
- - $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- resolver.IgnorePolymorphicCircularReferences()
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0)
- assert.Len(t, resolver.GetCircularErrors(), 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_IgnorePoly_All(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "object"
- allOf:
- - $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- resolver.IgnorePolymorphicCircularReferences()
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0)
- assert.Len(t, resolver.GetCircularErrors(), 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_IgnorePoly_One(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "object"
- oneOf:
- - $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- resolver.IgnorePolymorphicCircularReferences()
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0)
- assert.Len(t, resolver.GetCircularErrors(), 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_CatchPoly_Any(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "object"
- anyOf:
- - $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0) // not an infinite loop if poly.
- assert.Len(t, resolver.GetCircularErrors(), 1)
- assert.Equal(t, "anyOf", resolver.GetCircularErrors()[0].PolymorphicType)
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_CatchPoly_All(t *testing.T) {
- circular := []byte(`openapi: 3.0.0
-components:
- schemas:
- ProductCategory:
- type: "object"
- properties:
- name:
- type: "string"
- children:
- type: "object"
- allOf:
- - $ref: "#/components/schemas/ProductCategory"
- description: "Array of sub-categories in the same format."
- required:
- - "name"
- - "children"`)
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0) // not an infinite loop if poly.
- assert.Len(t, resolver.GetCircularErrors(), 1)
- assert.Equal(t, "allOf", resolver.GetCircularErrors()[0].PolymorphicType)
- assert.True(t, resolver.GetCircularErrors()[0].IsPolymorphicResult)
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CheckForCircularReferences_DigitalOcean(t *testing.T) {
- circular, _ := os.ReadFile("../test_specs/digitalocean.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- baseURL, _ := url.Parse("https://raw.githubusercontent.com/digitalocean/openapi/main/specification")
-
- idx := index.NewSpecIndexWithConfig(&rootNode, &index.SpecIndexConfig{
- AllowRemoteLookup: true,
- AllowFileLookup: true,
- BaseURL: baseURL,
- })
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
- assert.Len(t, resolver.GetResolvingErrors(), 0)
- assert.Len(t, resolver.GetCircularErrors(), 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CircularReferencesRequiredValid(t *testing.T) {
- circular, _ := os.ReadFile("../test_specs/swagger-valid-recursive-model.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_CircularReferencesRequiredInvalid(t *testing.T) {
- circular, _ := os.ReadFile("../test_specs/swagger-invalid-recursive-model.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(circular, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 2)
-
- _, err := yaml.Marshal(resolver.resolvedRoot)
- assert.NoError(t, err)
-}
-
-func TestResolver_DeepJourney(t *testing.T) {
- var journey []*index.Reference
- for f := 0; f < 200; f++ {
- journey = append(journey, nil)
- }
- idx := index.NewSpecIndexWithConfig(nil, index.CreateClosedAPIIndexConfig())
- resolver := NewResolver(idx)
- assert.Nil(t, resolver.extractRelatives(nil, nil, nil, journey, false))
-}
-
-func TestResolver_ResolveComponents_Stripe(t *testing.T) {
- stripe, _ := os.ReadFile("../test_specs/stripe.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(stripe, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.Resolve()
- assert.Len(t, circ, 3)
-
- assert.Len(t, resolver.GetNonPolymorphicCircularErrors(), 3)
- assert.Len(t, resolver.GetPolymorphicCircularErrors(), 0)
-}
-
-func TestResolver_ResolveComponents_BurgerShop(t *testing.T) {
- mixedref, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(mixedref, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.Resolve()
- assert.Len(t, circ, 0)
-}
-
-func TestResolver_ResolveComponents_PolyNonCircRef(t *testing.T) {
- yml := `paths:
- /hey:
- get:
- responses:
- "200":
- $ref: '#/components/schemas/crackers'
-components:
- schemas:
- cheese:
- description: cheese
- anyOf:
- items:
- $ref: '#/components/schemas/crackers'
- crackers:
- description: crackers
- allOf:
- - $ref: '#/components/schemas/tea'
- tea:
- description: tea`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.CheckForCircularReferences()
- assert.Len(t, circ, 0)
-}
-
-func TestResolver_ResolveComponents_PolyCircRef(t *testing.T) {
- yml := `openapi: 3.1.0
-components:
- schemas:
- cheese:
- description: cheese
- anyOf:
- - $ref: '#/components/schemas/crackers'
- crackers:
- description: crackers
- anyOf:
- - $ref: '#/components/schemas/cheese'
- tea:
- description: tea`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- _ = resolver.CheckForCircularReferences()
- resolver.circularReferences[0].IsInfiniteLoop = true // override
- assert.Len(t, idx.GetCircularReferences(), 1)
- assert.Len(t, resolver.GetPolymorphicCircularErrors(), 1)
- assert.Equal(t, 2, idx.GetCircularReferences()[0].LoopIndex)
-
-}
-
-func TestResolver_ResolveComponents_Missing(t *testing.T) {
- yml := `paths:
- /hey:
- get:
- responses:
- "200":
- $ref: '#/components/schemas/crackers'
-components:
- schemas:
- cheese:
- description: cheese
- properties:
- thang:
- $ref: '#/components/schemas/crackers'
- crackers:
- description: crackers
- properties:
- butter:
- $ref: 'go home, I am drunk'`
-
- var rootNode yaml.Node
- _ = yaml.Unmarshal([]byte(yml), &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- err := resolver.Resolve()
- assert.Len(t, err, 1)
- assert.Equal(t, "cannot resolve reference `go home, I am drunk`, it's missing: $go home, I am drunk [18:11]", err[0].Error())
-}
-
-func TestResolver_ResolveComponents_MixedRef(t *testing.T) {
- mixedref, _ := os.ReadFile("../test_specs/mixedref-burgershop.openapi.yaml")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(mixedref, &rootNode)
-
- b := index.CreateOpenAPIIndexConfig()
- idx := index.NewSpecIndexWithConfig(&rootNode, b)
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.Resolve()
- assert.Len(t, circ, 0)
- assert.Equal(t, 5, resolver.GetIndexesVisited())
-
- // in v0.8.2 a new check was added when indexing, to prevent re-indexing the same file multiple times.
- assert.Equal(t, 191, resolver.GetRelativesSeen())
- assert.Equal(t, 35, resolver.GetJourneysTaken())
- assert.Equal(t, 62, resolver.GetReferenceVisited())
-}
-
-func TestResolver_ResolveComponents_k8s(t *testing.T) {
- k8s, _ := os.ReadFile("../test_specs/k8s.json")
- var rootNode yaml.Node
- _ = yaml.Unmarshal(k8s, &rootNode)
-
- idx := index.NewSpecIndexWithConfig(&rootNode, index.CreateClosedAPIIndexConfig())
-
- resolver := NewResolver(idx)
- assert.NotNil(t, resolver)
-
- circ := resolver.Resolve()
- assert.Len(t, circ, 0)
-}
-
-// Example of how to resolve the Stripe OpenAPI specification, and check for circular reference errors
-func ExampleNewResolver() {
- // create a yaml.Node reference as a root node.
- var rootNode yaml.Node
-
- // load in the Stripe OpenAPI spec (lots of polymorphic complexity in here)
- stripeBytes, _ := os.ReadFile("../test_specs/stripe.yaml")
-
- // unmarshal bytes into our rootNode.
- _ = yaml.Unmarshal(stripeBytes, &rootNode)
-
- // create a new spec index (resolver depends on it)
- indexConfig := index.CreateClosedAPIIndexConfig()
- idx := index.NewSpecIndexWithConfig(&rootNode, indexConfig)
-
- // create a new resolver using the index.
- resolver := NewResolver(idx)
-
- // resolve the document, if there are circular reference errors, they are returned/
- // WARNING: this is a destructive action and the rootNode will be PERMANENTLY altered and cannot be unresolved
- circularErrors := resolver.Resolve()
-
- // The Stripe API has a bunch of circular reference problems, mainly from polymorphism.
- // So let's print them out.
- //
- fmt.Printf("There are %d circular reference errors, %d of them are polymorphic errors, %d are not",
- len(circularErrors), len(resolver.GetPolymorphicCircularErrors()), len(resolver.GetNonPolymorphicCircularErrors()))
- // Output: There are 3 circular reference errors, 0 of them are polymorphic errors, 3 are not
-}
-
-func ExampleResolvingError() {
- re := ResolvingError{
- ErrorRef: errors.New("je suis une erreur"),
- Node: &yaml.Node{
- Line: 5,
- Column: 21,
- },
- Path: "#/definitions/JeSuisUneErreur",
- CircularReference: &index.CircularReferenceResult{},
- }
-
- fmt.Printf("%s", re.Error())
- // Output: je suis une erreur: #/definitions/JeSuisUneErreur [5:21]
-}
diff --git a/test_specs/mixedref-burgershop.openapi.yaml b/test_specs/mixedref-burgershop.openapi.yaml
index a722ee0..001de5d 100644
--- a/test_specs/mixedref-burgershop.openapi.yaml
+++ b/test_specs/mixedref-burgershop.openapi.yaml
@@ -234,7 +234,7 @@ paths:
content:
application/json:
schema:
- $ref: 'https://raw.githubusercontent.com/daveshanley/vacuum/main/model/test_files/burgershop.openapi.yaml'
+ $ref: 'https://raw.githubusercontent.com/daveshanley/vacuum/main/model/test_files/burgershop.openapi.yaml#/components/schemas/Error'
components:
schemas:
Error:
diff --git a/test_specs/speakeasy-components.yaml b/test_specs/speakeasy-components.yaml
new file mode 100644
index 0000000..75bd266
--- /dev/null
+++ b/test_specs/speakeasy-components.yaml
@@ -0,0 +1,1452 @@
+components:
+ schemas:
+ readOnlyObject:
+ type: object
+ properties:
+ string:
+ type: string
+ readOnly: true
+ bool:
+ type: boolean
+ readOnly: true
+ num:
+ type: number
+ readOnly: true
+ required:
+ - string
+ - bool
+ - num
+ writeOnlyObject:
+ type: object
+ properties:
+ string:
+ type: string
+ writeOnly: true
+ bool:
+ type: boolean
+ writeOnly: true
+ num:
+ type: number
+ writeOnly: true
+ required:
+ - string
+ - bool
+ - num
+ readWriteObject:
+ type: object
+ properties:
+ num1:
+ type: integer
+ writeOnly: true
+ num2:
+ type: integer
+ writeOnly: true
+ num3:
+ type: integer
+ sum:
+ type: integer
+ readOnly: true
+ required:
+ - num1
+ - num2
+ - num3
+ - sum
+ stronglyTypedOneOfObject:
+ oneOf:
+ - $ref: "#/components/schemas/simpleObjectWithType"
+ - $ref: "#/components/schemas/deepObjectWithType"
+ discriminator:
+ propertyName: type
+ weaklyTypedOneOfObject:
+ oneOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - $ref: "#/components/schemas/deepObject"
+ weaklyTypedOneOfReadOnlyObject:
+ oneOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - $ref: "#/components/schemas/readOnlyObject"
+ weaklyTypedOneOfWriteOnlyObject:
+ oneOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - $ref: "#/components/schemas/writeOnlyObject"
+ weaklyTypedOneOfReadWriteObject:
+ oneOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - $ref: "#/components/schemas/readWriteObject"
+ typedObjectOneOf:
+ oneOf:
+ - $ref: "#/components/schemas/typedObject1"
+ - $ref: "#/components/schemas/typedObject2"
+ - $ref: "#/components/schemas/typedObject3"
+ typedObjectNullableOneOf:
+ oneOf:
+ - $ref: "#/components/schemas/typedObject1"
+ - $ref: "#/components/schemas/typedObject2"
+ - type: "null"
+ flattenedTypedObject1:
+ oneOf:
+ - $ref: "#/components/schemas/typedObject1"
+ nullableTypedObject1:
+ oneOf:
+ - $ref: "#/components/schemas/typedObject1"
+ - type: "null"
+ typedObject1:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - "obj1"
+ value:
+ type: string
+ required:
+ - type
+ - value
+ typedObject2:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - "obj2"
+ value:
+ type: string
+ required:
+ - type
+ - value
+ typedObject3:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - "obj3"
+ value:
+ type: string
+ required:
+ - type
+ - value
+ httpBinSimpleJsonObject:
+ type: object
+ properties:
+ slideshow:
+ type: object
+ properties:
+ author:
+ type: string
+ date:
+ type: string
+ title:
+ type: string
+ slides:
+ type: array
+ items:
+ type: object
+ properties:
+ title:
+ type: string
+ type:
+ type: string
+ items:
+ type: array
+ items:
+ type: string
+ required:
+ - title
+ - type
+ required:
+ - author
+ - date
+ - title
+ - slides
+ required:
+ - slideshow
+ enum:
+ type: string
+ description: "A string based enum"
+ enum:
+ - "one"
+ - "two"
+ - "three"
+ - "four_and_more"
+ example: "one"
+ simpleObject:
+ description: "A simple object that uses all our supported primitive types and enums and has optional properties."
+ externalDocs:
+ description: "A link to the external docs."
+ url: "https://docs.speakeasyapi.dev"
+ type: object
+ properties:
+ str:
+ type: string
+ description: "A string property."
+ example: "test"
+ bool:
+ type: boolean
+ description: "A boolean property."
+ example: true
+ int:
+ type: integer
+ description: "An integer property."
+ example: 1
+ int32:
+ type: integer
+ format: int32
+ description: "An int32 property."
+ example: 1
+ num:
+ type: number
+ description: "A number property."
+ example: 1.1
+ float32:
+ type: number
+ format: float
+ description: "A float32 property."
+ example: 1.1
+ enum:
+ $ref: "#/components/schemas/enum"
+ date:
+ type: string
+ format: date
+ description: "A date property."
+ example: "2020-01-01"
+ dateTime:
+ type: string
+ format: date-time
+ description: "A date-time property."
+ example: "2020-01-01T00:00:00.000000001Z"
+ any:
+ description: "An any property."
+ example: "any"
+ strOpt:
+ type: string
+ description: "An optional string property."
+ example: "testOptional"
+ boolOpt:
+ type: boolean
+ description: "An optional boolean property."
+ example: true
+ intOptNull:
+ type: integer
+ description: "An optional integer property will be null for tests."
+ numOptNull:
+ type: number
+ description: "An optional number property will be null for tests."
+ intEnum:
+ type: integer
+ description: "An integer enum property."
+ enum:
+ - 1
+ - 2
+ - 3
+ example: 2
+ x-speakeasy-enums:
+ - First
+ - Second
+ - Third
+ int32Enum:
+ type: integer
+ format: int32
+ description: "An int32 enum property."
+ enum:
+ - 55
+ - 69
+ - 181
+ example: 55
+ bigint:
+ type: integer
+ format: bigint
+ example: 8821239038968084
+ bigintStr:
+ type: string
+ format: bigint
+ example: "9223372036854775808"
+ decimal:
+ type: number
+ format: decimal
+ example: 3.141592653589793
+ decimalStr:
+ type: string
+ format: decimal
+ example: "3.14159265358979344719667586"
+ required:
+ - str
+ - bool
+ - int
+ - int32
+ - num
+ - float32
+ - enum
+ - date
+ - dateTime
+ - any
+ - intEnum
+ - int32Enum
+ simpleObjectCamelCase:
+ description: "A simple object that uses all our supported primitive types and enums and has optional properties."
+ externalDocs:
+ description: "A link to the external docs."
+ url: "https://docs.speakeasyapi.dev"
+ type: object
+ properties:
+ str_val:
+ type: string
+ description: "A string property."
+ example: "example"
+ bool_val:
+ type: boolean
+ description: "A boolean property."
+ example: true
+ int_val:
+ type: integer
+ description: "An integer property."
+ example: 999999
+ int32_val:
+ type: integer
+ format: int32
+ description: "An int32 property."
+ example: 1
+ num_val:
+ type: number
+ description: "A number property."
+ example: 1.1
+ float32_val:
+ type: number
+ format: float
+ description: "A float32 property."
+ example: 2.2222222
+ enum_val:
+ $ref: "#/components/schemas/enum"
+ date_val:
+ type: string
+ format: date
+ description: "A date property."
+ example: "2020-01-01"
+ date_time_val:
+ type: string
+ format: date-time
+ description: "A date-time property."
+ example: "2020-01-01T00:00:00Z"
+ any_val:
+ description: "An any property."
+ example: "any example"
+ str_opt_val:
+ type: string
+ description: "An optional string property."
+ example: "optional example"
+ bool_opt_val:
+ type: boolean
+ description: "An optional boolean property."
+ example: true
+ int_opt_null_val:
+ type: integer
+ description: "An optional integer property will be null for tests."
+ example: 999999
+ num_opt_null_val:
+ type: number
+ description: "An optional number property will be null for tests."
+ example: 1.1
+ int_enum_val:
+ type: integer
+ description: "An integer enum property."
+ enum:
+ - 1
+ - 2
+ - 3
+ example: 3
+ x-speakeasy-enums:
+ - First
+ - Second
+ - Third
+ int32_enum_val:
+ type: integer
+ format: int32
+ description: "An int32 enum property."
+ enum:
+ - 55
+ - 69
+ - 181
+ example: 69
+ bigint_val:
+ type: integer
+ format: bigint
+ bigint_str_val:
+ type: string
+ format: bigint
+ decimal_val:
+ type: number
+ format: decimal
+ required:
+ - str_val
+ - bool_val
+ - int_val
+ - int32_val
+ - num_val
+ - float32_val
+ - enum_val
+ - date_val
+ - date_time_val
+ - any_val
+ - int_enum_val
+ - int32_enum_val
+ simpleObjectWithType:
+ allOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - type: object
+ properties:
+ type:
+ type: string
+ required:
+ - type
+ deepObject:
+ type: object
+ properties:
+ str:
+ type: string
+ example: "test"
+ bool:
+ type: boolean
+ example: true
+ int:
+ type: integer
+ example: 1
+ num:
+ type: number
+ example: 1.1
+ obj:
+ $ref: "#/components/schemas/simpleObject"
+ map:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ example: { "key": "...", "key2": "..." }
+ arr:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObject"
+ example: ["...", "..."]
+ any:
+ anyOf:
+ - $ref: "#/components/schemas/simpleObject"
+ - type: string
+ example: "anyOf[0]"
+ type:
+ type: string
+ required:
+ - str
+ - bool
+ - int
+ - num
+ - obj
+ - map
+ - arr
+ - any
+ deepObjectCamelCase:
+ type: object
+ properties:
+ str_val:
+ type: string
+ bool_val:
+ type: boolean
+ int_val:
+ type: integer
+ num_val:
+ type: number
+ obj_val:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ map_val:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ arr_val:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ any_val:
+ anyOf:
+ - $ref: "#/components/schemas/simpleObjectCamelCase"
+ - type: string
+ type:
+ type: string
+ required:
+ - str_val
+ - bool_val
+ - int_val
+ - num_val
+ - obj_val
+ - map_val
+ - arr_val
+ - any_val
+ deepObjectWithType:
+ allOf:
+ - $ref: "#/components/schemas/deepObject"
+ - type: object
+ properties:
+ type:
+ type: string
+ fakerFormattedStrings:
+ type: object
+ description: A set of strings with format values that lead to relevant examples being generated for them
+ properties:
+ imageFormat:
+ format: image
+ type: string
+ description: A field that will have a image url generated as example
+ addressFormat:
+ format: address
+ type: string
+ description: A field that will have an address generated as example
+ timezoneFormat:
+ format: timezone
+ type: string
+ description: A field that will have a timezone generated as example
+ zipcodeFormat:
+ format: zipcode
+ type: string
+ description: A field that will have a postal code generated as example
+ jsonFormat:
+ format: json
+ type: string
+ description: A field that will have a JSON generated as example
+ uuidFormat:
+ format: uuid
+ type: string
+ description: A field that will have a UUID generated as example
+ domainFormat:
+ format: domain
+ type: string
+ description: A field that will have a domain name generated as example
+ emailFormat:
+ format: email
+ type: string
+ description: A field that will have an email address generated as example
+ ipv4Format:
+ format: ipv4
+ type: string
+ description: A field that will have an IPv4 address generated as example
+ ipv6Format:
+ format: ipv6
+ type: string
+ description: A field that will have an IPv6 address generated as example
+ macFormat:
+ format: mac
+ type: string
+ description: A field that will have a MAC address generated as example
+ passwordFormat:
+ format: password
+ type: string
+ description: A field that will have a fake password generated as example
+ urlFormat:
+ format: url
+ type: string
+ description: A field that will have a URL generated as example
+ phoneFormat:
+ format: phone
+ type: string
+ description: A field that will have a phone number generated as example
+ filenameFormat:
+ format: filename
+ type: string
+ description: A field that will have a filename generated as example
+ directoryFormat:
+ format: directory
+ type: string
+ description: A field that will have a directory path generated as example
+ filepathFormat:
+ format: filepath
+ type: string
+ description: A field that will have a file path generated as example
+ unknownFormat:
+ format: unknown
+ type: string
+ description: A field that will have random words generated as example
+ fakerStrings:
+ type: object
+ description: A set of strings with fieldnames that lead to relevant examples being generated for them
+ properties:
+ City:
+ type: string
+ country:
+ type: string
+ country_code:
+ type: string
+ latitude:
+ type: string
+ longitude:
+ type: string
+ street:
+ type: string
+ address:
+ type: string
+ timezone:
+ type: string
+ postal-code:
+ type: string
+ color:
+ type: string
+ price:
+ type: string
+ product:
+ type: string
+ material:
+ type: string
+ comment:
+ type: string
+ description:
+ type: string
+ company:
+ type: string
+ datatype:
+ type: string
+ json:
+ type: string
+ uuid:
+ type: string
+ account:
+ type: string
+ amount:
+ type: string
+ currency:
+ type: string
+ IBAN:
+ type: string
+ pin:
+ type: string
+ avatar:
+ type: string
+ domainName:
+ type: string
+ emailAddr:
+ type: string
+ IPv4:
+ type: string
+ IPv6:
+ type: string
+ mac:
+ type: string
+ password:
+ type: string
+ url:
+ type: string
+ username:
+ type: string
+ firstName:
+ type: string
+ fullName:
+ type: string
+ gender:
+ type: string
+ job:
+ type: string
+ lastName:
+ type: string
+ middleName:
+ type: string
+ sex:
+ type: string
+ phone:
+ type: string
+ locale:
+ type: string
+ unit:
+ type: string
+ extension:
+ type: string
+ filename:
+ type: string
+ filetype:
+ type: string
+ directory:
+ type: string
+ filepath:
+ type: string
+ manufacturer:
+ type: string
+ model:
+ type: string
+ key:
+ type: string
+ ID:
+ type: string
+ default:
+ type: string
+ authServiceRequestBody:
+ type: object
+ properties:
+ headerAuth:
+ type: array
+ items:
+ type: object
+ properties:
+ headerName:
+ type: string
+ expectedValue:
+ type: string
+ required:
+ - headerName
+ - expectedValue
+ basicAuth:
+ type: object
+ properties:
+ username:
+ type: string
+ password:
+ type: string
+ required:
+ - username
+ - password
+ arrValue:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObject"
+ arrValueCamelCase:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ arrArrValue:
+ type: array
+ items:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObject"
+ arrArrValueCamelCase:
+ type: array
+ items:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ arrObjValue:
+ type: object
+ properties:
+ json:
+ items:
+ $ref: "#/components/schemas/simpleObject"
+ type: array
+ required:
+ - json
+ arrObjValueCamelCase:
+ type: object
+ properties:
+ json:
+ items:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ type: array
+ required:
+ - json
+ mapValue:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ mapValueCamelCase:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ mapMapValue:
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ mapMapValueCamelCase:
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ mapObjValue:
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ required:
+ - json
+ mapObjValueCamelCase:
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ required:
+ - json
+ arrMapValue:
+ type: array
+ items:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ arrMapValueCamelCase:
+ type: array
+ items:
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ mapArrValue:
+ type: object
+ additionalProperties:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObject"
+ mapArrValueCamelCase:
+ type: object
+ additionalProperties:
+ type: array
+ items:
+ $ref: "#/components/schemas/simpleObjectCamelCase"
+ arrPrimitiveValue:
+ type: array
+ items:
+ type: string
+ mapPrimitiveValue:
+ type: object
+ additionalProperties:
+ type: string
+ arrArrPrimitiveValue:
+ type: array
+ items:
+ type: array
+ items:
+ type: string
+ mapMapPrimitiveValue:
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ type: string
+ orphanedObject:
+ x-speakeasy-include: true
+ type: object
+ properties:
+ orphaned:
+ type: string
+ required:
+ - orphaned
+ validCircularReferenceObject:
+ type: object
+ properties:
+ circular:
+ type: array
+ items:
+ $ref: "#/components/schemas/validCircularReferenceObject"
+ arrayCircularReferenceObject:
+ type: array
+ items:
+ type: object
+ properties:
+ circular:
+ $ref: "#/components/schemas/arrayCircularReferenceObject"
+ required:
+ - circular
+ objectCircularReferenceObject:
+ type: object
+ properties:
+ circular:
+ $ref: "#/components/schemas/objectCircularReferenceObject"
+ oneOfCircularReferenceObject:
+ type: object
+ properties:
+ child:
+ oneOf:
+ - $ref: "#/components/schemas/oneOfCircularReferenceObject"
+ - $ref: "#/components/schemas/simpleObject"
+ required:
+ - child
+ deprecatedObject:
+ type: object
+ deprecated: true
+ x-speakeasy-deprecation-message: This object is deprecated
+ properties:
+ str:
+ type: string
+ deprecatedFieldInObject:
+ type: object
+ properties:
+ deprecatedField:
+ type: string
+ deprecated: true
+ x-speakeasy-deprecation-replacement: newField
+ deprecatedEnum:
+ type: string
+ enum: ["a", "b", "c"]
+ deprecated: true
+ x-speakeasy-deprecation-message: This enum is deprecated
+ newField:
+ type: string
+ limitOffsetConfig:
+ type: object
+ properties:
+ offset:
+ type: integer
+ page:
+ type: integer
+ limit:
+ type: integer
+ error:
+ type: object
+ properties:
+ code:
+ type: string
+ message:
+ type: string
+ x-speakeasy-error-message: true
+ type:
+ $ref: "#/components/schemas/errorType"
+ errorType:
+ type: string
+ enum:
+ - "not_found"
+ - "invalid"
+ - "internal"
+ complexNumberTypes:
+ type: object
+ properties:
+ bigintStr:
+ type: string
+ format: bigint
+ bigint:
+ type: integer
+ format: bigint
+ decimal:
+ type: number
+ format: decimal
+ decimalStr:
+ type: string
+ format: decimal
+ required:
+ - bigintStr
+ - bigint
+ - decimal
+ - decimalStr
+ defaultsAndConsts:
+ type: object
+ properties:
+ normalField:
+ type: string
+ constStr:
+ type: string
+ const: "const"
+ constStrNull:
+ type: string
+ const: null
+ nullable: true
+ constInt:
+ type: integer
+ const: 123
+ constNum:
+ type: number
+ const: 123.456
+ constBool:
+ type: boolean
+ const: true
+ constDate:
+ type: string
+ format: date
+ const: "2020-01-01"
+ constDateTime:
+ type: string
+ format: date-time
+ const: "2020-01-01T00:00:00Z"
+ constEnumStr:
+ type: string
+ enum:
+ - "one"
+ - "two"
+ - "three"
+ const: "two"
+ constEnumInt:
+ type: integer
+ enum:
+ - 1
+ - 2
+ - 3
+ const: 2
+ constBigInt:
+ type: integer
+ format: bigint
+ const: 9007199254740991
+ constBigIntStr:
+ type: string
+ format: bigint
+ const: "9223372036854775807"
+ constDecimal:
+ type: number
+ format: decimal
+ const: 3.141592653589793
+ constDecimalStr:
+ type: string
+ format: decimal
+ const: "3.141592653589793238462643383279"
+ defaultStr:
+ type: string
+ default: "default"
+ defaultStrNullable:
+ type: string
+ default: null
+ nullable: true
+ defaultStrOptional:
+ type: string
+ default: "default"
+ defaultInt:
+ type: integer
+ default: 123
+ defaultNum:
+ type: number
+ default: 123.456
+ defaultBool:
+ type: boolean
+ default: true
+ defaultDate:
+ type: string
+ format: date
+ default: "2020-01-01"
+ defaultDateTime:
+ type: string
+ format: date-time
+ default: "2020-01-01T00:00:00Z"
+ defaultEnumStr:
+ type: string
+ enum:
+ - "one"
+ - "two"
+ - "three"
+ default: "two"
+ defaultEnumInt:
+ type: integer
+ enum:
+ - 1
+ - 2
+ - 3
+ default: 2
+ defaultBigInt:
+ type: integer
+ format: bigint
+ default: 9007199254740991
+ defaultBigIntStr:
+ type: string
+ format: bigint
+ default: "9223372036854775807"
+ defaultDecimal:
+ type: number
+ format: decimal
+ default: 3.141592653589793
+ defaultDecimalStr:
+ type: string
+ format: decimal
+ default: "3.141592653589793238462643383279"
+ required:
+ - normalField
+ - constStr
+ - constStrNull
+ - constInt
+ - constNum
+ - constBool
+ - constDate
+ - constDateTime
+ - constEnumStr
+ - constEnumInt
+ - constBigInt
+ - constBigIntStr
+ - constDecimal
+ - constDecimalStr
+ - defaultStr
+ - defaultStrNullable
+ - defaultInt
+ - defaultNum
+ - defaultBool
+ - defaultDate
+ - defaultDateTime
+ - defaultEnumStr
+ - defaultEnumInt
+ - defaultBigInt
+ - defaultBigIntStr
+ - defaultDecimal
+ - defaultDecimalStr
+ defaultsAndConstsOutput:
+ type: object
+ properties:
+ normalField:
+ type: string
+ constStr:
+ type: string
+ constStrNull:
+ type: string
+ nullable: true
+ constInt:
+ type: integer
+ constNum:
+ type: number
+ constBool:
+ type: boolean
+ constDate:
+ type: string
+ format: date
+ constDateTime:
+ type: string
+ format: date-time
+ constEnumStr:
+ type: string
+ enum:
+ - "one"
+ - "two"
+ - "three"
+ constEnumInt:
+ type: integer
+ enum:
+ - 1
+ - 2
+ - 3
+ constBigInt:
+ type: integer
+ format: bigint
+ constBigIntStr:
+ type: string
+ format: bigint
+ constDecimal:
+ type: number
+ format: decimal
+ constDecimalStr:
+ type: string
+ format: decimal
+ defaultStr:
+ type: string
+ defaultStrNullable:
+ type: string
+ nullable: true
+ defaultStrOptional:
+ type: string
+ defaultInt:
+ type: integer
+ defaultNum:
+ type: number
+ defaultBool:
+ type: boolean
+ defaultDate:
+ type: string
+ format: date
+ defaultDateTime:
+ type: string
+ format: date-time
+ defaultEnumStr:
+ type: string
+ enum:
+ - "one"
+ - "two"
+ - "three"
+ defaultEnumInt:
+ type: integer
+ enum:
+ - 1
+ - 2
+ - 3
+ defaultBigInt:
+ type: integer
+ format: bigint
+ defaultBigIntStr:
+ type: string
+ format: bigint
+ defaultDecimal:
+ type: number
+ format: decimal
+ defaultDecimalStr:
+ type: string
+ format: decimal
+ required:
+ - normalField
+ - constStr
+ - constStrNull
+ - constInt
+ - constNum
+ - constBool
+ - constDate
+ - constDateTime
+ - constEnumStr
+ - constEnumInt
+ - constBigInt
+ - constBigIntStr
+ - constDecimal
+ - constDecimalStr
+ - defaultStr
+ - defaultStrNullable
+ - defaultInt
+ - defaultNum
+ - defaultBool
+ - defaultDate
+ - defaultDateTime
+ - defaultEnumStr
+ - defaultEnumInt
+ - defaultBigInt
+ - defaultBigIntStr
+ - defaultDecimal
+ - defaultDecimalStr
+ objWithStringAdditionalProperties:
+ type: object
+ properties:
+ normalField:
+ type: string
+ additionalProperties:
+ type: string
+ required:
+ - normalField
+ objWithComplexNumbersAdditionalProperties:
+ type: object
+ properties:
+ normalField:
+ type: string
+ additionalProperties:
+ type: string
+ format: bigint
+ required:
+ - normalField
+ objWithZeroValueComplexTypePtrs:
+ type: object
+ properties:
+ date:
+ type: string
+ format: date
+ description: "A date property."
+ example: "2020-01-01"
+ dateTime:
+ type: string
+ format: date-time
+ description: "A date-time property."
+ example: "2020-01-01T00:00:00Z"
+ bigint:
+ type: integer
+ format: bigint
+ bigintStr:
+ type: string
+ format: bigint
+ decimal:
+ type: number
+ format: decimal
+ objWithDateAdditionalProperties:
+ type: object
+ properties:
+ normalField:
+ type: string
+ additionalProperties:
+ type: string
+ format: date
+ required:
+ - normalField
+ objWithObjAdditionalProperties:
+ type: object
+ required:
+ - datetime
+ - AdditionalProperties
+ properties:
+ datetime:
+ type: string
+ format: date-time
+ AdditionalProperties:
+ type: array
+ items:
+ type: integer
+ additionalProperties:
+ $ref: "#/components/schemas/simpleObject"
+ responses:
+ tokenAuthResponse:
+ description: Successful authentication.
+ content:
+ application/json:
+ schema:
+ title: token
+ type: object
+ properties:
+ authenticated:
+ type: boolean
+ token:
+ type: string
+ required:
+ - authenticated
+ - token
+ simpleObjectFormResponse:
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ form:
+ type: object
+ properties:
+ str:
+ type: string
+ bool:
+ type: string
+ int:
+ type: string
+ int32:
+ type: string
+ num:
+ type: string
+ float32:
+ type: string
+ enum:
+ type: string
+ date:
+ type: string
+ dateTime:
+ type: string
+ any:
+ type: string
+ strOpt:
+ type: string
+ boolOpt:
+ type: string
+ intOptNull:
+ type: string
+ numOptNull:
+ type: string
+ required:
+ - str
+ - bool
+ - int
+ - int32
+ - num
+ - float32
+ - enum
+ - date
+ - dateTime
+ - any
+ required:
+ - form
+ deepObjectFormResponse:
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ form:
+ type: object
+ properties:
+ str:
+ type: string
+ bool:
+ type: string
+ int:
+ type: string
+ num:
+ type: string
+ obj:
+ type: string
+ map:
+ type: string
+ arr:
+ type: string
+ required:
+ - str
+ - bool
+ - int
+ - num
+ - obj
+ - map
+ - arr
+ required:
+ - form
+ paginationResponse:
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ numPages:
+ type: integer
+ resultArray:
+ type: array
+ items:
+ type: integer
+ required:
+ - numPages
+ - resultArray
+ parameters:
+ emptyObjectParam:
+ name: emptyObject
+ in: path
+ required: true
+ schema:
+ type: object
+ properties: {}
+ strPathParam:
+ name: strParam
+ in: path
+ required: true
+ schema:
+ type: string
+ example: test
+ boolPathParam:
+ name: boolParam
+ in: path
+ required: true
+ schema:
+ type: boolean
+ example: true
+ intPathParam:
+ name: intParam
+ in: path
+ required: true
+ schema:
+ type: integer
+ example: 1
+ numPathParam:
+ name: numParam
+ in: path
+ required: true
+ schema:
+ type: number
+ example: 1.1
+ refQueryParamObjExploded:
+ name: refObjParamExploded
+ in: query
+ explode: true
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ example: test
+ bool:
+ type: boolean
+ example: true
+ int:
+ type: integer
+ example: 1
+ num:
+ type: number
+ example: 1.1
+ required:
+ - str
+ - bool
+ - int
+ - num
+ refQueryParamObj:
+ name: refObjParam
+ in: query
+ explode: false
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ example: test
+ bool:
+ type: boolean
+ example: true
+ int:
+ type: integer
+ example: 1
+ num:
+ type: number
+ example: 1.1
+ required:
+ - str
+ - bool
+ - int
+ - num
diff --git a/test_specs/speakeasy-test.yaml b/test_specs/speakeasy-test.yaml
new file mode 100644
index 0000000..1b2b553
--- /dev/null
+++ b/test_specs/speakeasy-test.yaml
@@ -0,0 +1,6364 @@
+openapi: 3.1.0
+info:
+ title: Test
+ version: 0.1.0
+ summary: Test Summary
+ description: |-
+ Some test description.
+ About our test document.
+x-speakeasy-extension-rewrite:
+ x-speakeasy-ignore: x-my-ignore
+externalDocs:
+ url: https://speakeasyapi.dev/docs/home
+ description: Speakeasy Docs
+servers:
+ - url: http://localhost:35123
+ description: The default server.
+ - url: http://broken
+ description: A server url to a non-existent server.
+ - url: http://{hostname}:{port}
+ description: A server url with templated variables.
+ variables:
+ port:
+ default: "35123"
+ description: The port on which the server is running.
+ hostname:
+ default: localhost
+ description: The hostname of the server.
+ - url: http://localhost:35123/anything/{something}
+ description: A server url with templated variables.
+ variables:
+ something:
+ default: something
+ description: Something is a variable for changing the root path
+ enum:
+ - something
+ - somethingElse
+ - somethingElseAgain
+ - url: "{protocol}://{hostname}:{port}"
+ description: A server url with templated variables (including the protocol).
+ variables:
+ protocol:
+ default: http
+ description: The networking protocol to use when making requests.
+ port:
+ default: "35123"
+ description: The port on which the server is running.
+ hostname:
+ default: localhost
+ description: The hostname of the server.
+x-speakeasy-globals:
+ parameters:
+ - name: globalQueryParam
+ in: query
+ required: true
+ schema:
+ type: string
+ example: "some example global query param"
+ - name: globalPathParam
+ in: path
+ required: true
+ schema:
+ type: integer
+ example: 100
+x-speakeasy-name-override:
+ - operationId: getGlobalNameOverride
+ methodNameOverride: globalNameOverridden
+tags:
+ - name: auth
+ description: Endpoints for testing authentication.
+ - name: authNew
+ description: Endpoints for testing authentication.
+ - name: servers
+ description: Endpoints for testing servers.
+ - name: parameters
+ description: Endpoints for testing parameters.
+ - name: requestBodies
+ description: Endpoints for testing request bodies.
+ - name: responseBodies
+ description: Endpoints for testing response bodies.
+ - name: retries
+ description: Endpoints for testing retries.
+ - name: generation
+ description: Endpoints for purely testing valid generation behavior.
+ - name: flattening
+ description: Endpoints for testing flattening through request body and parameter combinations.
+ - name: globals
+ description: Endpoints for testing global parameters.
+ - name: unions
+ description: Endpoints for testing union types.
+ - name: errors
+ description: Endpoints for testing error responses.
+ - name: telemetry
+ description: Endpoints for testing telemetry.
+ - name: pagination
+ description: Endpoints for testing the pagination extension
+ - name: documentation
+ description: Testing for documentation extensions and tooling.
+ x-speakeasy-docs:
+ go:
+ description: Testing for documentation extensions in Go.
+ python:
+ description: Testing for documentation extensions in Python.
+ typescript:
+ description: Testing for documentation extensions in TypeScript.
+security:
+ - apiKeyAuth: []
+ - apiKeyAuthNew: []
+ - oauth2: []
+ - {}
+paths:
+ /anything/selectGlobalServer:
+ get:
+ operationId: selectGlobalServer
+ tags:
+ - servers
+ responses:
+ "200":
+ description: OK
+ headers:
+ X-Optional-Header:
+ schema:
+ type: string
+ /anything/selectServerWithID:
+ get:
+ operationId: selectServerWithID
+ description: Select a server by ID.
+ tags:
+ - servers
+ servers:
+ - url: http://localhost:35123
+ description: The default server.
+ x-speakeasy-server-id: valid
+ - url: http://broken
+ description: A server url to a non-existent server.
+ x-speakeasy-server-id: broken
+ responses:
+ "200":
+ description: OK
+ /anything/serverWithTemplates:
+ get:
+ operationId: serverWithTemplates
+ tags:
+ - servers
+ servers:
+ - url: http://{hostname}:{port}
+ variables:
+ port:
+ default: "35123"
+ description: The port on which the server is running.
+ hostname:
+ default: localhost
+
+ description: The hostname of the server.
+ responses:
+ "200":
+ description: OK
+ /anything/serversByIDWithTemplates:
+ get:
+ operationId: serversByIDWithTemplates
+ tags:
+ - servers
+ servers:
+ - url: http://{hostname}:{port}
+ variables:
+ port:
+ default: "35123"
+ description: The port on which the server is running.
+ hostname:
+ default: localhost
+ description: The hostname of the server.
+ x-speakeasy-server-id: main
+ responses:
+ "200":
+ description: OK
+ /anything/serverWithTemplatesGlobal:
+ get:
+ operationId: serverWithTemplatesGlobal
+ tags:
+ - servers
+ responses:
+ "200":
+ description: OK
+ /anything/serverWithProtocolTemplate:
+ get:
+ operationId: serverWithProtocolTemplate
+ tags:
+ - servers
+ servers:
+ - url: "{protocol}://{hostname}:{port}"
+ variables:
+ protocol:
+ default: http
+ description: The protocol to use when making the network request.
+ port:
+ default: "35123"
+ description: The port on which the server is running.
+ hostname:
+ default: localhost
+ description: The hostname of the server.
+ x-speakeasy-server-id: main
+ responses:
+ "200":
+ description: OK
+ /basic-auth/{user}/{passwd}:
+ get:
+ operationId: basicAuth
+ tags:
+ - auth
+ security:
+ - basicAuth: []
+ parameters:
+ - name: user
+ in: path
+ required: true
+ schema:
+ type: string
+ - name: passwd
+ in: path
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ description: Successful authentication.
+ content:
+ application/json:
+ schema:
+ title: user
+ type: object
+ properties:
+ authenticated:
+ type: boolean
+ user:
+ type: string
+ required:
+ - authenticated
+ - user
+ "401":
+ description: Unsuccessful authentication.
+ /bearer:
+ get:
+ operationId: apiKeyAuthGlobal
+ tags:
+ - auth
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#operation:
+ get:
+ operationId: apiKeyAuth
+ tags:
+ - auth
+ security:
+ - apiKeyAuth: []
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#oauth2:
+ get:
+ operationId: oauth2Auth
+ tags:
+ - auth
+ security:
+ - oauth2: []
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#global:
+ get:
+ operationId: globalBearerAuth
+ tags:
+ - auth
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#openIdConnect:
+ get:
+ operationId: openIdConnectAuth
+ tags:
+ - auth
+ security:
+ - openIdConnect: []
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#bearer:
+ get:
+ operationId: bearerAuth
+ tags:
+ - auth
+ security:
+ - bearerAuth: []
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /bearer#oauth2AuthOverride:
+ get:
+ operationId: oauth2Override
+ tags:
+ - auth
+ parameters:
+ - name: Authorization
+ in: header
+ required: true
+ schema:
+ type: string
+ security:
+ - oauth2: []
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/tokenAuthResponse"
+ "401":
+ description: Unsuccessful authentication.
+ /auth#basicAuth:
+ post:
+ operationId: basicAuthNew
+ tags:
+ - authNew
+ security:
+ - basicAuth: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#apiKeyAuthGlobal:
+ post:
+ operationId: apiKeyAuthGlobalNew
+ tags:
+ - authNew
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#oauth2Auth:
+ post:
+ operationId: oauth2AuthNew
+ tags:
+ - authNew
+ security:
+ - oauth2: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#authGlobal:
+ post:
+ operationId: authGlobal
+ tags:
+ - authNew
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#openIdConnectAuth:
+ post:
+ operationId: openIdConnectAuthNew
+ tags:
+ - authNew
+ security:
+ - openIdConnect: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleSimpleSchemeAuth:
+ post:
+ operationId: multipleSimpleSchemeAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ oauth2: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleMixedSchemeAuth:
+ post:
+ operationId: multipleMixedSchemeAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ basicAuth: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleSimpleOptionsAuth:
+ post:
+ operationId: multipleSimpleOptionsAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ - oauth2: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleMixedOptionsAuth:
+ post:
+ operationId: multipleMixedOptionsAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ - basicAuth: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleOptionsWithSimpleSchemesAuth:
+ post:
+ operationId: multipleOptionsWithSimpleSchemesAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ oauth2: []
+ - apiKeyAuthNew: []
+ openIdConnect: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /auth#multipleOptionsWithMixedSchemesAuth:
+ post:
+ operationId: multipleOptionsWithMixedSchemesAuth
+ tags:
+ - authNew
+ security:
+ - apiKeyAuthNew: []
+ oauth2: []
+ - basicAuth: []
+ apiKeyAuthNew: []
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/authServiceRequestBody"
+ required: true
+ responses:
+ "200":
+ description: OK
+ "401":
+ description: Unsuccessful authentication.
+ /anything/mixedParams/path/{pathParam}:
+ get:
+ x-speakeasy-test: true
+ operationId: mixedParametersPrimitives
+ tags:
+ - parameters
+ parameters:
+ - name: pathParam
+ in: path
+ schema:
+ type: string
+ example: pathValue
+ required: true
+ - name: queryStringParam
+ in: query
+ schema:
+ type: string
+ example: queryValue
+ required: true
+ - name: headerParam
+ in: header
+ schema:
+ type: string
+ example: headerValue
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/mixedParams/path/pathValue?queryStringParam=queryValue
+ args:
+ type: object
+ properties:
+ queryStringParam:
+ type: string
+ example: queryValue
+ required:
+ - queryStringParam
+ headers:
+ type: object
+ properties:
+ Headerparam:
+ type: string
+ example: headerValue
+ required:
+ - Headerparam
+ required:
+ - url
+ - args
+ - headers
+ /anything/params/{duplicateParamRequest}:
+ get:
+ operationId: duplicateParam
+ tags:
+ - parameters
+ parameters:
+ - name: duplicateParamRequest
+ in: path
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: duplicateParamResponse
+ type: object
+ properties:
+ url:
+ type: string
+ /anything/mixedParams/path/{path_param}/camelcase:
+ get:
+ x-speakeasy-test: true
+ operationId: mixedParametersCamelCase
+ tags:
+ - parameters
+ parameters:
+ - name: path_param
+ in: path
+ schema:
+ type: string
+ example: pathValue
+ required: true
+ - name: query_string_param
+ in: query
+ schema:
+ type: string
+ example: queryValue
+ required: true
+ - name: header_param
+ in: header
+ schema:
+ type: string
+ example: headerValue
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/mixedParams/path/pathValue/camelcase?query_string_param=queryValue
+ args:
+ type: object
+ properties:
+ query_string_param:
+ type: string
+ example: queryValue
+ required:
+ - query_string_param
+ headers:
+ type: object
+ properties:
+ Header-Param:
+ type: string
+ example: headerValue
+ required:
+ - Header-Param
+ required:
+ - url
+ - args
+ - headers
+ /anything/pathParams/str/{strParam}/bool/{boolParam}/int/{intParam}/num/{numParam}:
+ get:
+ x-speakeasy-test: true
+ operationId: simplePathParameterPrimitives
+ tags:
+ - parameters
+ parameters:
+ - $ref: "speakeasy-components.yaml#/components/parameters/strPathParam"
+ - $ref: "speakeasy-components.yaml#/components/parameters/boolPathParam"
+ - $ref: "speakeasy-components.yaml#/components/parameters/intPathParam"
+ - $ref: "speakeasy-components.yaml#/components/parameters/numPathParam"
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/pathParams/str/test/bool/true/int/1/num/1.1
+ required:
+ - url
+ /anything/pathParams/obj/{objParam}/objExploded/{objParamExploded}:
+ get:
+ x-speakeasy-test: true
+ operationId: simplePathParameterObjects
+ tags:
+ - parameters
+ parameters:
+ - name: objParam
+ in: path
+ required: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ - name: objParamExploded
+ in: path
+ required: true
+ explode: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/pathParams/obj/any,any,bigint,8821239038968084,bigintStr,9223372036854775808,bool,true,boolOpt,true,date,2020-01-01,dateTime,2020-01-01T00:00:00.000000001Z,decimal,3.141592653589793,decimalStr,3.14159265358979344719667586,enum,one,float32,1.1,int,1,int32,1,int32Enum,55,intEnum,2,num,1.1,str,test,strOpt,testOptional/objExploded/any=any,bigint=8821239038968084,bigintStr=9223372036854775808,bool=true,boolOpt=true,date=2020-01-01,dateTime=2020-01-01T00:00:00.000000001Z,decimal=3.141592653589793,decimalStr=3.14159265358979344719667586,enum=one,float32=1.1,int=1,int32=1,int32Enum=55,intEnum=2,num=1.1,str=test,strOpt=testOptional
+ required:
+ - url
+ /anything/pathParams/arr/{arrParam}:
+ get:
+ x-speakeasy-test: true
+ operationId: simplePathParameterArrays
+ tags:
+ - parameters
+ parameters:
+ - name: arrParam
+ in: path
+ required: true
+ schema:
+ type: array
+ items:
+ type: string
+ examples:
+ - test
+ - test2
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/pathParams/arr/test,test2
+ required:
+ - url
+ /anything/pathParams/map/{mapParam}/mapExploded/{mapParamExploded}:
+ get:
+ x-speakeasy-test: true
+ operationId: simplePathParameterMaps
+ tags:
+ - parameters
+ parameters:
+ - name: mapParam
+ in: path
+ required: true
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "test": "value", "test2": "value2" }
+ - name: mapParamExploded
+ in: path
+ required: true
+ explode: true
+ schema:
+ type: object
+ additionalProperties:
+ type: integer
+ example: { "test": 1, "test2": 2 }
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/pathParams/map/test,value,test2,value2/mapExploded/test=1,test2=2
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps:
+ {
+ "regex": ".*?\\/map\\/(.*?)\\/mapExploded\\/(.*)",
+ "delim": ",",
+ }
+ required:
+ - url
+ /anything/pathParams/json/{jsonObj}:
+ get:
+ x-speakeasy-test: true
+ operationId: pathParameterJson
+ tags:
+ - parameters
+ parameters:
+ - name: jsonObj
+ in: path
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: 'http://localhost:35123/anything/pathParams/json/{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"}'
+ required:
+ - url
+ description: OK
+ /anything/queryParams/form/primitive:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsPrimitive
+ tags:
+ - parameters
+ parameters:
+ - name: strParam
+ in: query
+ schema:
+ type: string
+ example: test
+ required: true
+ - name: boolParam
+ in: query
+ schema:
+ type: boolean
+ example: true
+ required: true
+ - name: intParam
+ in: query
+ schema:
+ type: integer
+ example: 1
+ required: true
+ - name: numParam
+ in: query
+ schema:
+ type: number
+ example: 1.1
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ args:
+ type: object
+ properties:
+ strParam:
+ type: string
+ example: "test"
+ boolParam:
+ type: string
+ example: "true"
+ intParam:
+ type: string
+ example: "1"
+ numParam:
+ type: string
+ example: "1.1"
+ required:
+ - strParam
+ - boolParam
+ - intParam
+ - numParam
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/primitive?boolParam=true&intParam=1&numParam=1.1&strParam=test
+ required:
+ - args
+ - url
+ /anything/queryParams/form/obj:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsObject
+ tags:
+ - parameters
+ parameters:
+ - name: objParamExploded
+ in: query
+ explode: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: objParam
+ in: query
+ explode: false
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/obj?any=any&bigint=8821239038968084&bigintStr=9223372036854775808&bool=true&boolOpt=true&date=2020-01-01&dateTime=2020-01-01T00%3A00%3A00.000000001Z&decimal=3.141592653589793&decimalStr=3.14159265358979344719667586&enum=one&float32=1.1&int=1&int32=1&int32Enum=55&intEnum=2&num=1.1&objParam=any%2Cany%2Cbigint%2C8821239038968084%2CbigintStr%2C9223372036854775808%2Cbool%2Ctrue%2CboolOpt%2Ctrue%2Cdate%2C2020-01-01%2CdateTime%2C2020-01-01T00%3A00%3A00.000000001Z%2Cdecimal%2C3.141592653589793%2CdecimalStr%2C3.14159265358979344719667586%2Cenum%2Cone%2Cfloat32%2C1.1%2Cint%2C1%2Cint32%2C1%2Cint32Enum%2C55%2CintEnum%2C2%2Cnum%2C1.1%2Cstr%2Ctest%2CstrOpt%2CtestOptional&str=test&strOpt=testOptional
+ args:
+ type: object
+ properties:
+ str:
+ type: string
+ example: "test"
+ bool:
+ type: string
+ example: "true"
+ int:
+ type: string
+ example: "1"
+ int32:
+ type: string
+ example: "1"
+ num:
+ type: string
+ example: "1.1"
+ float32:
+ type: string
+ example: "1.1"
+ enum:
+ type: string
+ example: "one"
+ any:
+ type: string
+ example: "any"
+ date:
+ type: string
+ example: "2020-01-01"
+ dateTime:
+ type: string
+ example: "2020-01-01T00:00:00.000000001Z"
+ boolOpt:
+ type: string
+ example: "true"
+ strOpt:
+ type: string
+ example: "testOptional"
+ intOptNull:
+ type: string
+ numOptNull:
+ type: string
+ objParam:
+ type: string
+ example: "any,any,bigint,8821239038968084,bigintStr,9223372036854775808,bool,true,boolOpt,true,date,2020-01-01,dateTime,2020-01-01T00:00:00.000000001Z,decimal,3.141592653589793,decimalStr,3.14159265358979344719667586,enum,one,float32,1.1,int,1,int32,1,int32Enum,55,intEnum,2,num,1.1,str,test,strOpt,testOptional"
+ intEnum:
+ type: string
+ example: "2"
+ int32Enum:
+ type: string
+ example: "55"
+ bigint:
+ type: string
+ example: "8821239038968084"
+ bigintStr:
+ type: string
+ example: "9223372036854775808"
+ decimal:
+ type: string
+ example: "3.141592653589793"
+ decimalStr:
+ type: string
+ example: "3.14159265358979344719667586"
+ required:
+ - str
+ - bool
+ - int
+ - int32
+ - num
+ - float32
+ - enum
+ - any
+ - date
+ - dateTime
+ - objParam
+ - intEnum
+ - int32Enum
+ required:
+ - url
+ - args
+ /anything/queryParams/form/camelObj:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsCamelObject
+ tags:
+ - parameters
+ parameters:
+ - name: obj_param_exploded
+ in: query
+ explode: true
+ schema:
+ type: object
+ properties:
+ search_term:
+ type: string
+ example: foo
+ item_count:
+ type: string
+ example: "10"
+ required: true
+ - name: obj_param
+ in: query
+ explode: false
+ schema:
+ type: object
+ properties:
+ encoded_term:
+ type: string
+ example: bar
+ encoded_count:
+ type: string
+ example: "11"
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/camelObj?item_count=10&obj_param=encoded_count%2C11%2Cencoded_term%2Cbar&search_term=foo
+ args:
+ type: object
+ properties:
+ search_term:
+ type: string
+ example: "foo"
+ item_count:
+ type: string
+ example: "10"
+ required:
+ - search_term
+ - item_count
+ required:
+ - url
+ - args
+ /anything/queryParams/form/refParamObject:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsRefParamObject
+ tags:
+ - parameters
+ parameters:
+ - $ref: "speakeasy-components.yaml#/components/parameters/refQueryParamObjExploded"
+ - $ref: "speakeasy-components.yaml#/components/parameters/refQueryParamObj"
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/refParamObject?bool=true&int=1&num=1.1&refObjParam=bool%2Ctrue%2Cint%2C1%2Cnum%2C1.1%2Cstr%2Ctest&str=test
+ args:
+ type: object
+ properties:
+ str:
+ type: string
+ example: "test"
+ bool:
+ type: string
+ example: "true"
+ int:
+ type: string
+ example: "1"
+ num:
+ type: string
+ example: "1.1"
+ refObjParam:
+ type: string
+ example: "bool,true,int,1,num,1.1,str,test"
+ required:
+ - str
+ - bool
+ - int
+ - num
+ - refObjParam
+ required:
+ - url
+ - args
+ /anything/queryParams/form/array:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsArray
+ tags:
+ - parameters
+ parameters:
+ - name: arrParam
+ in: query
+ explode: false
+ schema:
+ type: array
+ items:
+ type: string
+ examples:
+ - test
+ - test2
+ - name: arrParamExploded
+ in: query
+ explode: true
+ schema:
+ type: array
+ items:
+ type: integer
+ examples:
+ - 1
+ - 2
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/array?arrParam=test%2Ctest2&arrParamExploded=1&arrParamExploded=2
+ args:
+ type: object
+ properties:
+ arrParam:
+ type: string
+ example: "test,test2"
+ arrParamExploded:
+ type: array
+ items:
+ type: string
+ examples:
+ - "1"
+ - "2"
+ required:
+ - arrParam
+ - arrParamExploded
+ required:
+ - url
+ - args
+ /anything/queryParams/pipe/array:
+ get:
+ x-speakeasy-test: true
+ operationId: pipeDelimitedQueryParamsArray
+ tags:
+ - parameters
+ parameters:
+ - name: arrParam
+ style: pipeDelimited
+ in: query
+ explode: false
+ schema:
+ type: array
+ items:
+ type: string
+ examples:
+ - test
+ - test2
+ - name: arrParamExploded
+ style: pipeDelimited
+ in: query
+ explode: true
+ schema:
+ type: array
+ items:
+ type: integer
+ examples:
+ - 1
+ - 2
+ - name: objParam
+ style: pipeDelimited
+ in: query
+ explode: false
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ - name: mapParam
+ style: pipeDelimited
+ in: query
+ explode: false
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "key1": "val1", "key2": "val2" }
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: "http://localhost:35123/anything/queryParams/pipe/array?arrParam=test|test2&arrParamExploded=1&arrParamExploded=2&mapParam=key1|val1|key2|val2&objParam=any|any|bigint|8821239038968084|bigintStr|9223372036854775808|bool|true|boolOpt|true|date|2020-01-01|dateTime|2020-01-01T00%3A00%3A00.000000001Z|decimal|3.141592653589793|decimalStr|3.14159265358979344719667586|enum|one|float32|1.1|int|1|int32|1|int32Enum|55|intEnum|2|num|1.1|str|test|strOpt|testOptional"
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps:
+ { "regex": ".*?&mapParam=(.*?)&.*", "delim": "|" }
+ args:
+ type: object
+ properties:
+ arrParam:
+ type: string
+ example: "test|test2"
+ arrParamExploded:
+ type: array
+ items:
+ type: string
+ examples:
+ - "1"
+ - "2"
+ required:
+ - arrParam
+ - arrParamExploded
+ required:
+ - url
+ - args
+ /anything/queryParams/form/map:
+ get:
+ x-speakeasy-test: true
+ operationId: formQueryParamsMap
+ tags:
+ - parameters
+ parameters:
+ - name: mapParam
+ in: query
+ explode: false
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "test": "value", "test2": "value2" }
+ - name: mapParamExploded
+ in: query
+ explode: true
+ schema:
+ type: object
+ additionalProperties:
+ type: integer
+ example: { "test": 1, "test2": 2 }
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/form/map?mapParam=test%2Cvalue%2Ctest2%2Cvalue2&test=1&test2=2
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps:
+ {
+ "regex": ".*?\\?mapParam=(.*?)&(.*)",
+ "delim": "%2C",
+ }
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ example:
+ {
+ "mapParam": "test,value,test2,value2",
+ "test": "1",
+ "test2": "2",
+ }
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps: { "regex": "(.*)", "delim": "," }
+ required:
+ - url
+ - args
+ /anything/queryParams/deepObject/obj:
+ get:
+ x-speakeasy-test: true
+ operationId: deepObjectQueryParamsObject
+ tags:
+ - parameters
+ parameters:
+ - name: objParam
+ in: query
+ style: deepObject
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: objArrParam
+ in: query
+ style: deepObject
+ schema:
+ type: object
+ properties:
+ arr:
+ type: array
+ items:
+ type: string
+ examples:
+ - test
+ - test2
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/deepObject/obj?objArrParam[arr]=test&objArrParam[arr]=test2&objParam[any]=any&objParam[bigintStr]=9223372036854775808&objParam[bigint]=8821239038968084&objParam[boolOpt]=true&objParam[bool]=true&objParam[dateTime]=2020-01-01T00%3A00%3A00.000000001Z&objParam[date]=2020-01-01&objParam[decimalStr]=3.14159265358979344719667586&objParam[decimal]=3.141592653589793&objParam[enum]=one&objParam[float32]=1.1&objParam[int32Enum]=55&objParam[int32]=1&objParam[intEnum]=2&objParam[int]=1&objParam[num]=1.1&objParam[strOpt]=testOptional&objParam[str]=test
+ args:
+ type: object
+ properties:
+ objArrParam[arr]:
+ type: array
+ items:
+ type: string
+ examples:
+ - test
+ - test2
+ objParam[any]:
+ type: string
+ example: "any"
+ objParam[boolOpt]:
+ type: string
+ example: "true"
+ objParam[bool]:
+ type: string
+ example: "true"
+ objParam[dateTime]:
+ type: string
+ example: "2020-01-01T00:00:00.000000001Z"
+ objParam[date]:
+ type: string
+ example: "2020-01-01"
+ objParam[enum]:
+ type: string
+ example: "one"
+ objParam[float32]:
+ type: string
+ example: "1.1"
+ objParam[int32]:
+ type: string
+ example: "1"
+ objParam[int]:
+ type: string
+ example: "1"
+ objParam[num]:
+ type: string
+ example: "1.1"
+ objParam[strOpt]:
+ type: string
+ example: "testOptional"
+ objParam[str]:
+ type: string
+ example: "test"
+ objParam[intEnum]:
+ type: string
+ example: "2"
+ objParam[int32Enum]:
+ type: string
+ example: "55"
+ objParam[bigint]:
+ type: string
+ example: "8821239038968084"
+ objParam[bigintStr]:
+ type: string
+ example: "9223372036854775808"
+ objParam[decimal]:
+ type: string
+ example: "3.141592653589793"
+ objParam[decimalStr]:
+ type: string
+ example: "3.14159265358979344719667586"
+ required:
+ - objArrParam[arr]
+ - objParam[any]
+ - objParam[boolOpt]
+ - objParam[bool]
+ - objParam[dateTime]
+ - objParam[date]
+ - objParam[enum]
+ - objParam[float32]
+ - objParam[int32]
+ - objParam[int]
+ - objParam[num]
+ - objParam[strOpt]
+ - objParam[str]
+ - objParam[intEnum]
+ - objParam[int32Enum]
+ required:
+ - url
+ - args
+ /anything/queryParams/deepObject/map:
+ get:
+ x-speakeasy-test: true
+ operationId: deepObjectQueryParamsMap
+ tags:
+ - parameters
+ parameters:
+ - name: mapParam
+ in: query
+ style: deepObject
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "test": "value", "test2": "value2" }
+ required: true
+ - name: mapArrParam
+ in: query
+ style: deepObject
+ schema:
+ type: object
+ additionalProperties:
+ type: array
+ items:
+ type: string
+ example: { "test": ["test", "test2"], "test2": ["test3", "test4"] }
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: http://localhost:35123/anything/queryParams/deepObject/map?mapArrParam[test2]=test3&mapArrParam[test2]=test4&mapArrParam[test]=test&mapArrParam[test]=test2&mapParam[test2]=value2&mapParam[test]=value
+ args:
+ type: object
+ additionalProperties:
+ anyOf:
+ - type: string
+ - type: array
+ items:
+ type: string
+ example:
+ {
+ "mapArrParam[test]": ["test", "test2"],
+ "mapArrParam[test2]": ["test3", "test4"],
+ "mapParam[test]": "value",
+ "mapParam[test2]": "value2",
+ }
+ required:
+ - url
+ - args
+ /anything/queryParams/json/obj:
+ get:
+ x-speakeasy-test: true
+ operationId: jsonQueryParamsObject
+ tags:
+ - parameters
+ parameters:
+ - name: simpleObjParam
+ in: query
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: deepObjParam
+ in: query
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: 'http://localhost:35123/anything/queryParams/json/obj?deepObjParam={"any"%3A{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}%2C"arr"%3A[{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}%2C{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}]%2C"bool"%3Atrue%2C"int"%3A1%2C"map"%3A{"key"%3A{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}%2C"key2"%3A{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}}%2C"num"%3A1.1%2C"obj"%3A{"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}%2C"str"%3A"test"}&simpleObjParam={"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}'
+ args:
+ type: object
+ properties:
+ simpleObjParam:
+ type: string
+ example: '{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"}'
+ deepObjParam:
+ type: string
+ example: '{"any":{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"},"arr":[{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"},{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"}],"bool":true,"int":1,"map":{"key":{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"},"key2":{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"}},"num":1.1,"obj":{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"},"str":"test"}'
+ required:
+ - simpleObjParam
+ - deepObjParam
+ required:
+ - url
+ - args
+ /anything/queryParams/mixed:
+ get:
+ x-speakeasy-test: true
+ operationId: mixedQueryParams
+ tags:
+ - parameters
+ parameters:
+ - name: jsonParam
+ in: query
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: formParam
+ in: query
+ style: form
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: deepObjectParam
+ in: query
+ style: deepObject
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ example: 'http://localhost:35123/anything/queryParams/mixed?any=any&bigint=8821239038968084&bigintStr=9223372036854775808&bool=true&boolOpt=true&date=2020-01-01&dateTime=2020-01-01T00%3A00%3A00.000000001Z&decimal=3.141592653589793&decimalStr=3.14159265358979344719667586&deepObjectParam[any]=any&deepObjectParam[bigintStr]=9223372036854775808&deepObjectParam[bigint]=8821239038968084&deepObjectParam[boolOpt]=true&deepObjectParam[bool]=true&deepObjectParam[dateTime]=2020-01-01T00%3A00%3A00.000000001Z&deepObjectParam[date]=2020-01-01&deepObjectParam[decimalStr]=3.14159265358979344719667586&deepObjectParam[decimal]=3.141592653589793&deepObjectParam[enum]=one&deepObjectParam[float32]=1.1&deepObjectParam[int32Enum]=55&deepObjectParam[int32]=1&deepObjectParam[intEnum]=2&deepObjectParam[int]=1&deepObjectParam[num]=1.1&deepObjectParam[strOpt]=testOptional&deepObjectParam[str]=test&enum=one&float32=1.1&int=1&int32=1&int32Enum=55&intEnum=2&jsonParam={"any"%3A"any"%2C"bigint"%3A8821239038968084%2C"bigintStr"%3A"9223372036854775808"%2C"bool"%3Atrue%2C"boolOpt"%3Atrue%2C"date"%3A"2020-01-01"%2C"dateTime"%3A"2020-01-01T00%3A00%3A00.000000001Z"%2C"decimal"%3A3.141592653589793%2C"decimalStr"%3A"3.14159265358979344719667586"%2C"enum"%3A"one"%2C"float32"%3A1.1%2C"int"%3A1%2C"int32"%3A1%2C"int32Enum"%3A55%2C"intEnum"%3A2%2C"num"%3A1.1%2C"str"%3A"test"%2C"strOpt"%3A"testOptional"}&num=1.1&str=test&strOpt=testOptional'
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ example:
+ {
+ "any": "any",
+ "bigint": "8821239038968084",
+ "bigintStr": "9223372036854775808",
+ "bool": "true",
+ "boolOpt": "true",
+ "date": "2020-01-01",
+ "dateTime": "2020-01-01T00:00:00.000000001Z",
+ "deepObjectParam[any]": "any",
+ "deepObjectParam[bigint]": "8821239038968084",
+ "deepObjectParam[bigintStr]": "9223372036854775808",
+ "deepObjectParam[boolOpt]": "true",
+ "deepObjectParam[bool]": "true",
+ "deepObjectParam[dateTime]": "2020-01-01T00:00:00.000000001Z",
+ "deepObjectParam[date]": "2020-01-01",
+ "deepObjectParam[enum]": "one",
+ "deepObjectParam[float32]": "1.1",
+ "deepObjectParam[int32]": "1",
+ "deepObjectParam[int]": "1",
+ "deepObjectParam[intEnum]": "2",
+ "deepObjectParam[int32Enum]": "55",
+ "deepObjectParam[num]": "1.1",
+ "deepObjectParam[decimal]": "3.141592653589793",
+ "deepObjectParam[decimalStr]": "3.14159265358979344719667586",
+ "deepObjectParam[strOpt]": "testOptional",
+ "deepObjectParam[str]": "test",
+ "enum": "one",
+ "float32": "1.1",
+ "int": "1",
+ "int32": "1",
+ "intEnum": "2",
+ "int32Enum": "55",
+ "jsonParam": '{"any":"any","bigint":8821239038968084,"bigintStr":"9223372036854775808","bool":true,"boolOpt":true,"date":"2020-01-01","dateTime":"2020-01-01T00:00:00.000000001Z","decimal":3.141592653589793,"decimalStr":"3.14159265358979344719667586","enum":"one","float32":1.1,"int":1,"int32":1,"int32Enum":55,"intEnum":2,"num":1.1,"str":"test","strOpt":"testOptional"}',
+ "num": "1.1",
+ "decimal": "3.141592653589793",
+ "decimalStr": "3.14159265358979344719667586",
+ "str": "test",
+ "strOpt": "testOptional",
+ }
+ required:
+ - url
+ - args
+ /anything/headers/primitive:
+ get:
+ x-speakeasy-test: true
+ operationId: headerParamsPrimitive
+ tags:
+ - parameters
+ parameters:
+ - name: X-Header-String
+ in: header
+ schema:
+ type: string
+ example: "test"
+ required: true
+ - name: X-Header-Boolean
+ in: header
+ schema:
+ type: boolean
+ example: true
+ required: true
+ - name: X-Header-Integer
+ in: header
+ schema:
+ type: integer
+ example: 1
+ required: true
+ - name: X-Header-Number
+ in: header
+ schema:
+ type: number
+ example: 1.1
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ properties:
+ X-Header-String:
+ type: string
+ example: "test"
+ X-Header-Boolean:
+ type: string
+ example: "true"
+ X-Header-Integer:
+ type: string
+ example: "1"
+ X-Header-Number:
+ type: string
+ example: "1.1"
+ required:
+ - X-Header-String
+ - X-Header-Boolean
+ - X-Header-Integer
+ - X-Header-Number
+ required:
+ - headers
+ /anything/headers/obj:
+ get:
+ x-speakeasy-test: true
+ operationId: headerParamsObject
+ tags:
+ - parameters
+ parameters:
+ - name: X-Header-Obj
+ in: header
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ - name: X-Header-Obj-Explode
+ in: header
+ explode: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ properties:
+ X-Header-Obj:
+ type: string
+ example: any,any,bigint,8821239038968084,bigintStr,9223372036854775808,bool,true,boolOpt,true,date,2020-01-01,dateTime,2020-01-01T00:00:00.000000001Z,decimal,3.141592653589793,decimalStr,3.14159265358979344719667586,enum,one,float32,1.1,int,1,int32,1,int32Enum,55,intEnum,2,num,1.1,str,test,strOpt,testOptional
+ X-Header-Obj-Explode:
+ type: string
+ example: any=any,bigint=8821239038968084,bigintStr=9223372036854775808,bool=true,boolOpt=true,date=2020-01-01,dateTime=2020-01-01T00:00:00.000000001Z,decimal=3.141592653589793,decimalStr=3.14159265358979344719667586,enum=one,float32=1.1,int=1,int32=1,int32Enum=55,intEnum=2,num=1.1,str=test,strOpt=testOptional
+ required:
+ - X-Header-Obj
+ - X-Header-Obj-Explode
+ required:
+ - headers
+ /anything/headers/map:
+ get:
+ x-speakeasy-test: true
+ operationId: headerParamsMap
+ tags:
+ - parameters
+ parameters:
+ - name: X-Header-Map
+ in: header
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "key1": "value1", "key2": "value2" }
+ required: true
+ - name: X-Header-Map-Explode
+ in: header
+ explode: true
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ example: { "test1": "val1", "test2": "val2" }
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ properties:
+ X-Header-Map:
+ type: string
+ example: "key1,value1,key2,value2"
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps:
+ { "regex": "(.*)", "delim": "," }
+ X-Header-Map-Explode:
+ type: string
+ example: "test1=val1,test2=val2"
+ x-speakeasy-test-internal-directives:
+ - sortSerializedMaps:
+ { "regex": "(.*)", "delim": "," }
+ required:
+ - X-Header-Map
+ - X-Header-Map-Explode
+ required:
+ - headers
+ /anything/headers/array:
+ get:
+ x-speakeasy-test: true
+ operationId: headerParamsArray
+ tags:
+ - parameters
+ parameters:
+ - name: X-Header-Array
+ in: header
+ schema:
+ type: array
+ items:
+ type: string
+ examples:
+ - test1
+ - test2
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ properties:
+ X-Header-Array:
+ type: string
+ example: "test1,test2"
+ required:
+ - X-Header-Array
+ required:
+ - headers
+ /readonlyorwriteonly#readOnlyInput:
+ post:
+ operationId: requestBodyReadOnlyInput
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readOnlyObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readOnlyObject"
+ /writeonlyoutput#writeOnlyOutput:
+ post:
+ operationId: requestBodyWriteOnlyOutput
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/writeOnlyObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/writeOnlyObject"
+ /readonlyorwriteonly#writeOnly:
+ post:
+ operationId: requestBodyWriteOnly
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/writeOnlyObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readOnlyObject"
+ /readonlyandwriteonly:
+ post:
+ operationId: requestBodyReadAndWrite
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readWriteObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readWriteObject"
+ /readonlyorwriteonly#readOnlyUnion:
+ post:
+ operationId: requestBodyReadOnlyUnion
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfReadOnlyObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfReadOnlyObject"
+ /writeonlyoutput#writeOnlyUnion:
+ post:
+ operationId: requestBodyWriteOnlyUnion
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfWriteOnlyObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfWriteOnlyObject"
+ /readonlyandwriteonly#readWriteOnlyUnion:
+ post:
+ operationId: requestBodyReadWriteOnlyUnion
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfReadWriteObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfReadWriteObject"
+ /anything/requestBodies/post/application/json/simple:
+ post:
+ operationId: requestBodyPostApplicationJsonSimple
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required:
+ - json
+ /anything/requestBodies/post/application/json/camelcase:
+ post:
+ operationId: requestBodyPostApplicationJsonSimpleCamelCase
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ required:
+ - json
+ /requestbody#array:
+ post:
+ operationId: requestBodyPostApplicationJsonArray
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#arrayCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#arrayOfArrays:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfArray
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrArrValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ type: array
+ title: arr
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#arrayOfArraysCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfArrayCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrArrValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ type: array
+ title: arr
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#map:
+ post:
+ operationId: requestBodyPostApplicationJsonMap
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#mapCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonMapCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#mapOfMaps:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfMap
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapMapValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#mapOfMapsCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfMapCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapMapValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#mapOfArrays:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfArray
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapArrValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#mapOfArraysCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfArrayCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapArrValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#arrayOfMaps:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfMap
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrMapValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ title: map
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /requestbody#arrayOfMapsCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfMapCamelCase
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrMapValueCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ title: map
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ /requestbody#mapOfPrimitives:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfPrimitive
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapPrimitiveValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: string
+ /requestbody#arrayOfPrimitives:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfPrimitive
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrPrimitiveValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ title: string
+ type: string
+ /requestbody#arrayOfArraysOfPrimitives:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayOfArrayOfPrimitive
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrArrPrimitiveValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: array
+ items:
+ title: arr
+ type: array
+ items:
+ title: string
+ type: string
+ /requestbody#mapOfMapsOfPrimitives:
+ post:
+ operationId: requestBodyPostApplicationJsonMapOfMapOfPrimitive
+ tags:
+ - requestBodies
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapMapPrimitiveValue"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ type: string
+ /anything/requestBodies/post/application/json/array/objResponse:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayObj
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrObjValue"
+ /anything/requestBodies/post/application/json/array/objResponseCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonArrayObjCamelCase
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrObjValueCamelCase"
+ /anything/requestBodies/post/application/json/map/objResponse:
+ post:
+ operationId: requestBodyPostApplicationJsonMapObj
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapObjValue"
+ /anything/requestBodies/post/application/json/map/objResponseCamelCase:
+ post:
+ operationId: requestBodyPostApplicationJsonMapObjCamelCase
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObjectCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/mapObjValueCamelCase"
+ /anything/requestBodies/post/application/json/deep:
+ post:
+ operationId: requestBodyPostApplicationJsonDeep
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObject"
+ /anything/requestBodies/post/application/json/deep/camelcase:
+ post:
+ operationId: requestBodyPostApplicationJsonDeepCamelCase
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObjectCamelCase"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObjectCamelCase"
+ /anything/requestBodies/post/application/json/multiple/json/filtered:
+ post:
+ operationId: requestBodyPostApplicationJsonMultipleJsonFiltered
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ text/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ application/test+json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ text/json.test:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required:
+ - json
+ /anything/requestBodies/post/multiple/contentTypes/component/filtered:
+ post:
+ operationId: requestBodyPostMultipleContentTypesComponentFiltered
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ multipart/form-data:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ application/x-www-form-urlencoded:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required:
+ - json
+ /anything/requestBodies/post/multiple/contentTypes/inline/filtered:
+ post:
+ operationId: requestBodyPostMultipleContentTypesInlineFiltered
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ application/x-www-form-urlencoded:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties: true
+ /anything/requestBodies/post/multiple/contentTypes/split:
+ post:
+ operationId: requestBodyPostMultipleContentTypesSplit
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ str2:
+ type: string
+ num2:
+ type: number
+ bool2:
+ type: boolean
+ required:
+ - str2
+ - num2
+ - bool2
+ application/x-www-form-urlencoded:
+ schema:
+ type: object
+ properties:
+ str3:
+ type: string
+ num3:
+ type: number
+ bool3:
+ type: boolean
+ required:
+ - str3
+ - num3
+ - bool3
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties: true
+ form:
+ type: object
+ additionalProperties: true
+ /anything/requestBodies/post/multiple/contentTypes/split/param:
+ post:
+ operationId: requestBodyPostMultipleContentTypesSplitParam
+ tags:
+ - requestBodies
+ parameters:
+ - name: paramStr
+ in: query
+ schema:
+ type: string
+ required: true
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ str2:
+ type: string
+ num2:
+ type: number
+ bool2:
+ type: boolean
+ required:
+ - str2
+ - num2
+ - bool2
+ application/x-www-form-urlencoded:
+ schema:
+ type: object
+ properties:
+ str3:
+ type: string
+ num3:
+ type: number
+ bool3:
+ type: boolean
+ required:
+ - str3
+ - num3
+ - bool3
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties: true
+ form:
+ type: object
+ additionalProperties: true
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ /anything/requestBodies/put/multipart/simple:
+ put:
+ operationId: requestBodyPutMultipartSimple
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ multipart/form-data:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/simpleObjectFormResponse"
+ /anything/requestBodies/put/multipart/deep:
+ put:
+ operationId: requestBodyPutMultipartDeep
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ multipart/form-data:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObject"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/deepObjectFormResponse"
+ /anything/requestBodies/put/multipart/file:
+ put:
+ operationId: requestBodyPutMultipartFile
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ file:
+ type: string
+ format: binary
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ files:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - files
+ /anything/requestBodies/put/multipart/differentFileName:
+ put:
+ operationId: requestBodyPutMultipartDifferentFileName
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ differentFileName:
+ type: string
+ format: binary
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ files:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - files
+ /anything/requestBodies/post/form/simple:
+ post:
+ operationId: requestBodyPostFormSimple
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/simpleObjectFormResponse"
+ /anything/requestBodies/post/form/deep:
+ post:
+ operationId: requestBodyPostFormDeep
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deepObject"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/deepObjectFormResponse"
+ /anything/requestBodies/post/form/map/primitive:
+ post:
+ operationId: requestBodyPostFormMapPrimitive
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ form:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - form
+ /anything/requestBodies/put/string:
+ put:
+ operationId: requestBodyPutString
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ text/plain:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ data:
+ type: string
+ required:
+ - data
+ /anything/requestBodies/put/bytes:
+ put:
+ operationId: requestBodyPutBytes
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ data:
+ type: string
+ required:
+ - data
+ /anything/requestBodies/put/stringWithParams:
+ put:
+ operationId: requestBodyPutStringWithParams
+ tags:
+ - requestBodies
+ parameters:
+ - name: queryStringParam
+ in: query
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ text/plain:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ data:
+ type: string
+ args:
+ type: object
+ properties:
+ queryStringParam:
+ type: string
+ required:
+ - queryStringParam
+ required:
+ - data
+ - args
+ /anything/requestBodies/put/bytesWithParams:
+ put:
+ operationId: requestBodyPutBytesWithParams
+ tags:
+ - requestBodies
+ parameters:
+ - name: queryStringParam
+ in: query
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ data:
+ type: string
+ args:
+ type: object
+ properties:
+ queryStringParam:
+ type: string
+ required:
+ - queryStringParam
+ required:
+ - data
+ - args
+ /anything/requestBodies/post/empty-object:
+ post:
+ operationId: requestBodyPostEmptyObject
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ empty:
+ type: object
+ emptyWithEmptyProperties:
+ type: object
+ properties: {}
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ empty:
+ type: object
+ emptyRespWithEmptyProperies:
+ type: object
+ properties: {}
+ /anything/requestBodies/post/null-dictionary:
+ post:
+ operationId: requestBodyPostNullDictionary
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type: string
+ required:
+ - data
+ /anything/requestBodies/post/null-array:
+ post:
+ operationId: requestBodyPostNullArray
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type: string
+ required:
+ - data
+ /anything/requestBodies/post/nullableRequiredObject:
+ post:
+ operationId: nullableObjectPost
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/nullableObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "#/components/schemas/nullableObject"
+ required:
+ - json
+ /anything/requestBodies/post/nullableRequiredProperty:
+ post:
+ operationId: nullableRequiredPropertyPost
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - NullableRequiredInt
+ - NullableRequiredArray
+ - NullableRequiredEnum
+ properties:
+ NullableOptionalInt:
+ type: integer
+ nullable: true
+ NullableRequiredInt:
+ type:
+ - integer
+ - null
+ NullableRequiredArray:
+ type: [array, null]
+ items:
+ type: number
+ NullableRequiredEnum:
+ type: ["string", "null"]
+ enum:
+ - first
+ - second
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: string
+ /anything/requestBodies/post/nullableRequiredSharedObject:
+ post:
+ operationId: nullableRequiredSharedObjectPost
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - NullableRequiredObj
+ properties:
+ NullableOptionalObj:
+ $ref: "#/components/schemas/nullableObject"
+ NullableRequiredObj:
+ $ref: "#/components/schemas/nullableObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: string
+ /anything/requestBodies/post/nullableRequiredEmptyObject:
+ post:
+ operationId: nullableRequiredEmptyObjectPost
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - RequiredObj
+ - NullableRequiredObj
+ properties:
+ RequiredObj:
+ type: ["object"]
+ NullableOptionalObj:
+ type: ["object", "null"]
+ NullableRequiredObj:
+ type: ["object", "null"]
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: string
+ /anything/requestBodies/post/{pathBigInt}/{pathBigIntStr}/{pathDecimal}/{pathDecimalStr}/complex-number-types:
+ post:
+ operationId: requestBodyPostComplexNumberTypes
+ tags:
+ - requestBodies
+ parameters:
+ - name: pathBigInt
+ in: path
+ schema:
+ type: integer
+ format: bigint
+ required: true
+ - name: pathBigIntStr
+ in: path
+ schema:
+ type: string
+ format: bigint
+ required: true
+ - name: pathDecimal
+ in: path
+ schema:
+ type: number
+ format: decimal
+ required: true
+ - name: pathDecimalStr
+ in: path
+ schema:
+ type: string
+ format: decimal
+ required: true
+ - name: queryBigInt
+ in: query
+ schema:
+ type: integer
+ format: bigint
+ required: true
+ - name: queryBigIntStr
+ in: query
+ schema:
+ type: string
+ format: bigint
+ required: true
+ - name: queryDecimal
+ in: query
+ schema:
+ type: number
+ format: decimal
+ required: true
+ - name: queryDecimalStr
+ in: query
+ schema:
+ type: string
+ format: decimal
+ required: true
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/complexNumberTypes"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/complexNumberTypes"
+ url:
+ type: string
+ required:
+ - json
+ - url
+ /anything/requestBodies/post/defaultsAndConsts:
+ post:
+ operationId: requestBodyPostDefaultsAndConsts
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/defaultsAndConsts"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/defaultsAndConstsOutput"
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/string:
+ post:
+ operationId: requestBodyPostJsonDataTypesString
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/integer:
+ post:
+ operationId: requestBodyPostJsonDataTypesInteger
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: integer
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: integer
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/int32:
+ post:
+ operationId: requestBodyPostJsonDataTypesInt32
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: integer
+ format: int32
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: integer
+ format: int32
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/bigint:
+ post:
+ operationId: requestBodyPostJsonDataTypesBigInt
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: integer
+ format: bigint
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: integer
+ format: bigint
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/bigintStr:
+ post:
+ operationId: requestBodyPostJsonDataTypesBigIntStr
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ format: bigint
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ format: bigint
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/number:
+ post:
+ operationId: requestBodyPostJsonDataTypesNumber
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: number
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: number
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/float32:
+ post:
+ operationId: requestBodyPostJsonDataTypesFloat32
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: number
+ format: float32
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: number
+ format: float32
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/decimal:
+ post:
+ operationId: requestBodyPostJsonDataTypesDecimal
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: number
+ format: decimal
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: number
+ format: decimal
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/decimalStr:
+ post:
+ operationId: requestBodyPostJsonDataTypesDecimalStr
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ format: decimal
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ format: decimal
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/boolean:
+ post:
+ operationId: requestBodyPostJsonDataTypesBoolean
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: boolean
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: boolean
+ required:
+ - json
+ /anything/requestBodies/post/jsonDataTypes/date:
+ post:
+ operationId: requestBodyPostJsonDataTypesDate
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ format: date
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ format: date
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/dateTime:
+ post:
+ operationId: requestBodyPostJsonDataTypesDateTime
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ format: date-time
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ format: date-time
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/map/dateTime:
+ post:
+ operationId: requestBodyPostJsonDataTypesMapDateTime
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ format: date-time
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties:
+ type: string
+ format: date-time
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/map/bigIntStr:
+ post:
+ operationId: requestBodyPostJsonDataTypesMapBigIntStr
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ type: string
+ format: bigint
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties:
+ type: string
+ format: bigint
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/map/decimal:
+ post:
+ operationId: requestBodyPostJsonDataTypesMapDecimal
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ additionalProperties:
+ type: number
+ format: decimal
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: object
+ additionalProperties:
+ type: number
+ format: decimal
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/array/date:
+ post:
+ operationId: requestBodyPostJsonDataTypesArrayDate
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ format: date
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: array
+ items:
+ type: string
+ format: date
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/array/bigInt:
+ post:
+ operationId: requestBodyPostJsonDataTypesArrayBigInt
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: integer
+ format: bigint
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: array
+ items:
+ type: integer
+ format: bigint
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/jsonDataTypes/array/decimalStr:
+ post:
+ operationId: requestBodyPostJsonDataTypesArrayDecimalStr
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ format: decimal
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: array
+ items:
+ type: string
+ format: decimal
+ data:
+ type: string
+ required:
+ - json
+ - data
+ /anything/requestBodies/post/nullable/required/string:
+ post:
+ operationId: requestBodyPostNullableRequiredStringBody
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ nullable: true
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type:
+ - string
+ required:
+ - data
+ /anything/requestBodies/post/nullable/notrequired/string:
+ post:
+ operationId: requestBodyPostNullableNotRequiredStringBody
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ nullable: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type:
+ - string
+ required:
+ - data
+ /anything/requestBodies/post/notnullable/notrequired/string:
+ post:
+ operationId: requestBodyPostNotNullableNotRequiredStringBody
+ tags:
+ - requestBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type:
+ - string
+ required:
+ - data
+ /anything/flattening/inlineBodyAndParamNoConflict:
+ post:
+ operationId: inlineBodyAndParamNoConflict
+ tags:
+ - flattening
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ bodyStr:
+ type: string
+ required:
+ - bodyStr
+ required: true
+ parameters:
+ - name: paramStr
+ in: query
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ type: object
+ properties:
+ bodyStr:
+ type: string
+ required:
+ - bodyStr
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - json
+ - args
+ /anything/flattening/componentBodyAndParamNoConflict:
+ post:
+ operationId: componentBodyAndParamNoConflict
+ tags:
+ - flattening
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ parameters:
+ - name: paramStr
+ in: query
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - json
+ - args
+ /anything/flattening/inlineBodyAndParamConflict:
+ post:
+ operationId: inlineBodyAndParamConflict
+ tags:
+ - flattening
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ required:
+ - str
+ required: true
+ parameters:
+ - name: str
+ in: query
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ type: object
+ properties:
+ str:
+ type: string
+ required:
+ - str
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - json
+ - args
+ /anything/flattening/componentBodyAndParamConflict:
+ post:
+ operationId: componentBodyAndParamConflict
+ tags:
+ - flattening
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ parameters:
+ - name: str
+ in: query
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - json
+ - args
+ /anything/flattening/conflictingParams/{str}:
+ get:
+ operationId: conflictingParams
+ tags:
+ - flattening
+ parameters:
+ - name: str
+ in: path
+ schema:
+ type: string
+ required: true
+ - name: str
+ in: query
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ args:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - url
+ - args
+ /json:
+ get:
+ operationId: responseBodyJsonGet
+ # No tag as we want this simple request in the root sdk for testing operations get generated there
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/httpBinSimpleJsonObject"
+ /html:
+ get:
+ operationId: responseBodyStringGet
+ tags:
+ - responseBodies
+ responses:
+ "200":
+ description: OK
+ content:
+ text/html:
+ schema:
+ title: html
+ type: string
+ /xml:
+ get:
+ operationId: responseBodyXmlGet
+ tags:
+ - responseBodies
+ responses:
+ "200":
+ description: OK
+ content:
+ application/xml:
+ schema:
+ title: xml
+ type: string
+ /bytes/100:
+ get:
+ operationId: responseBodyBytesGet
+ tags:
+ - responseBodies
+ responses:
+ "200":
+ description: OK
+ content:
+ application/octet-stream:
+ schema:
+ title: bytes
+ type: string
+ format: binary
+ /optional:
+ get:
+ operationId: responseBodyOptionalGet
+ tags:
+ - responseBodies
+ servers:
+ - url: http://localhost:35456
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ text/plain:
+ schema:
+ type: string
+ /readonlyorwriteonly#readOnly:
+ post:
+ operationId: responseBodyReadOnly
+ servers:
+ - url: http://localhost:35456
+ tags:
+ - responseBodies
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/readOnlyObject"
+ /response-headers:
+ post:
+ operationId: responseBodyEmptyWithHeaders
+ tags:
+ - responseBodies
+ parameters:
+ - name: X-String-Header
+ in: query
+ schema:
+ type: string
+ required: true
+ - name: X-Number-Header
+ in: query
+ schema:
+ type: number
+ required: true
+ responses:
+ "200":
+ description: OK
+ headers:
+ X-String-Header:
+ schema:
+ type: string
+ X-Number-Header:
+ schema:
+ type: number
+ /anything/responseBodies/additionalProperties:
+ post:
+ operationId: responseBodyAdditionalPropertiesPost
+ tags:
+ - responseBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithStringAdditionalProperties"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithStringAdditionalProperties"
+ required:
+ - json
+ /anything/responseBodies/additionalPropertiesComplexNumbers:
+ post:
+ operationId: responseBodyAdditionalPropertiesComplexNumbersPost
+ tags:
+ - responseBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithComplexNumbersAdditionalProperties"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithComplexNumbersAdditionalProperties"
+ required:
+ - json
+ /anything/responseBodies/zeroValueComplexTypePtrs:
+ post:
+ operationId: responseBodyZeroValueComplexTypePtrsPost
+ tags:
+ - responseBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithZeroValueComplexTypePtrs"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithZeroValueComplexTypePtrs"
+ required:
+ - json
+ /anything/responseBodies/additionalPropertiesDate:
+ post:
+ operationId: responseBodyAdditionalPropertiesDatePost
+ tags:
+ - responseBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithDateAdditionalProperties"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithDateAdditionalProperties"
+ required:
+ - json
+ /anything/responseBodies/additionalPropertiesObject:
+ post:
+ operationId: responseBodyAdditionalPropertiesObjectPost
+ tags:
+ - responseBodies
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithObjAdditionalProperties"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/objWithObjAdditionalProperties"
+ required:
+ - json
+ /anything/{emptyObject}:
+ get:
+ operationId: emptyObjectGet
+ tags:
+ - generation
+ parameters:
+ - $ref: "speakeasy-components.yaml#/components/parameters/emptyObjectParam"
+ responses:
+ "200":
+ description: OK
+ /anything/circularReference:
+ get:
+ operationId: circularReferenceGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/validCircularReferenceObject"
+ /anything/arrayCircularReference:
+ get:
+ operationId: arrayCircularReferenceGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/arrayCircularReferenceObject"
+ /anything/objectCircularReference:
+ get:
+ operationId: objectCircularReferenceGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/objectCircularReferenceObject"
+ /anything/oneOfCircularReference:
+ get:
+ operationId: oneOfCircularReferenceGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/oneOfCircularReferenceObject"
+ /anything/emptyResponseObjectWithComment:
+ get:
+ operationId: emptyResponseObjectWithCommentGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ content:
+ application/octet-stream:
+ schema:
+ type: object
+ /anything/ignores:
+ post:
+ operationId: ignoresPost
+ tags:
+ - generation
+ parameters:
+ - name: testParam
+ in: query
+ schema:
+ type: string
+ - name: test_param
+ in: query
+ x-my-ignore: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ testProp:
+ type: string
+ test_prop:
+ x-my-ignore: true
+ type: string
+ callbackUrl:
+ type: string
+ format: uri
+ application/xml:
+ x-my-ignore: true
+ schema:
+ type: object
+ properties:
+ testProp:
+ type: string
+ required: true
+ callbacks:
+ cb:
+ "{$request.bodycomponents.yaml#/callbackUrl}":
+ x-my-ignore: true
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ testProp:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/httpBinSimpleJsonObject"
+ application/xml:
+ x-my-ignore: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/httpBinSimpleJsonObject"
+ text/plain:
+ x-my-ignore: true
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/httpBinSimpleJsonObject"
+ "201":
+ x-my-ignore: true
+ description: Created
+ get:
+ x-my-ignore: true
+ operationId: ignoresGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ /anything/ignoreAll:
+ x-my-ignore: true
+ get:
+ operationId: ignoreAllGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: OK
+ /anything/usageExample:
+ post:
+ operationId: usageExamplePost
+ summary: An operation used for testing usage examples
+ description: An operation used for testing usage examples that includes a large array of parameters and input types to ensure that all are handled correctly
+ externalDocs:
+ description: Usage example docs
+ url: https://docs.example.com
+ x-speakeasy-usage-example:
+ title: "Second"
+ description: "Do this second"
+ position: 2
+ tags:
+ - generation
+ security:
+ - basicAuth: []
+ parameters:
+ - name: strParameter
+ in: query
+ required: true
+ description: A string parameter
+ schema:
+ type: string
+ description: A string type
+ examples:
+ - "example 1"
+ - "example 2"
+ - "example 3"
+ - name: intParameter
+ in: query
+ required: true
+ description: An integer parameter
+ schema:
+ type: integer
+ format: int32
+ description: An int32 type
+ - name: int64Parameter
+ in: query
+ required: true
+ description: An int64 parameter
+ schema:
+ type: integer
+ format: int64
+ description: An int64 type
+ - name: bigintParameter
+ in: query
+ required: true
+ description: An bigint parameter
+ schema:
+ type: integer
+ format: bigint
+ description: An bigint type
+ - name: bigintParameterOptional
+ in: query
+ description: An bigint parameter
+ schema:
+ type: integer
+ format: bigint
+ description: An bigint type
+ - name: bigintStrParameter
+ in: query
+ required: true
+ description: An bigint parameter
+ schema:
+ type: string
+ format: bigint
+ description: An bigint type
+ - name: bigintStrParameterOptional
+ in: query
+ description: An bigint parameter
+ schema:
+ type: string
+ format: bigint
+ description: An bigint type
+ - name: floatParameter
+ in: query
+ required: true
+ description: A float parameter
+ schema:
+ type: number
+ description: A float type
+ - name: float32Parameter
+ in: query
+ required: true
+ description: A float32 parameter
+ schema:
+ type: number
+ format: float
+ description: A float32 type
+ - name: decimalParameter
+ in: query
+ required: true
+ description: A decimal parameter
+ schema:
+ type: number
+ format: decimal
+ description: A decimal type
+ - name: decimalParameterOptional
+ in: query
+ required: false
+ description: A decimal parameter
+ schema:
+ type: number
+ format: decimal
+ description: A decimal type
+ - name: decimalStrParameter
+ in: query
+ required: true
+ description: A decimal parameter
+ schema:
+ type: string
+ format: decimal
+ description: A decimal type
+ - name: decimalStrParameterOptional
+ in: query
+ required: false
+ description: A decimal parameter
+ schema:
+ type: string
+ format: decimal
+ description: A decimal type
+ - name: doubleParameter
+ in: query
+ required: true
+ description: A double parameter
+ schema:
+ type: number
+ format: double
+ description: A double type
+ - name: boolParameter
+ in: query
+ required: true
+ description: A boolean parameter
+ schema:
+ type: boolean
+ description: A boolean type
+ - name: dateParameter
+ in: query
+ required: true
+ description: A date parameter
+ schema:
+ type: string
+ format: date
+ description: A date type
+ - name: dateTimeParameter
+ in: query
+ required: true
+ description: A date time parameter
+ schema:
+ type: string
+ format: date-time
+ description: A date time type
+ - name: dateTimeDefaultParameter
+ in: query
+ required: true
+ description: A date time parameter with a default value
+ schema:
+ type: string
+ format: date-time
+ description: A date time type
+ - name: enumParameter
+ in: query
+ required: true
+ description: An enum parameter
+ schema:
+ type: string
+ description: An enum type
+ enum:
+ - "value1"
+ - "value2"
+ - "value3"
+ - name: optEnumParameter
+ in: query
+ description: An enum parameter
+ schema:
+ type: string
+ description: An enum type
+ enum:
+ - "value1"
+ - "value2"
+ - "value3"
+ example: "value3"
+ - name: falseyNumberParameter
+ in: query
+ required: true
+ description: A number parameter that contains a falsey example value
+ schema:
+ type: number
+ description: A number type
+ example: 0
+ requestBody:
+ description: A request body that contains fields with different formats for testing example generation
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ simpleObject:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ fakerStrings:
+ $ref: "speakeasy-components.yaml#/components/schemas/fakerStrings"
+ fakerFormattedStrings:
+ $ref: "speakeasy-components.yaml#/components/schemas/fakerFormattedStrings"
+ responses:
+ "200":
+ description: A successful response that contains the simpleObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ description: A response body that contains the simpleObject sent in the request body
+ properties:
+ json:
+ type: object
+ properties:
+ simpleObject:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ fakerStrings:
+ $ref: "speakeasy-components.yaml#/components/schemas/fakerStrings"
+ fakerFormattedStrings:
+ $ref: "speakeasy-components.yaml#/components/schemas/fakerFormattedStrings"
+ required:
+ - json
+ /anything/dateParamWithDefault:
+ get:
+ tags:
+ - generation
+ operationId: dateParamWithDefault
+ parameters:
+ - name: dateInput
+ in: query
+ required: true
+ description: A date parameter with a default value
+ schema:
+ type: string
+ format: date
+ description: A date type
+ default: "2023-10-13"
+ responses:
+ "204":
+ description: OK
+ /anything/dateTimeParamWithDefault:
+ get:
+ tags:
+ - generation
+ operationId: dateTimeParamWithDefault
+ parameters:
+ - name: dateTimeInput
+ in: query
+ required: true
+ description: A date time parameter with a default value
+ schema:
+ type: string
+ format: date-time
+ description: A date time type
+ default: "2023-10-13T12:42:42.999+00:00"
+ responses:
+ "204":
+ description: OK
+ /anything/decimalParamWithDefault:
+ get:
+ tags:
+ - generation
+ operationId: decimalParamWithDefault
+ parameters:
+ - name: decimalInput
+ in: query
+ required: true
+ description: A decimal parameter with a default value
+ schema:
+ type: number
+ format: decimal
+ description: A decimal type
+ default: "903275809834567386763"
+ responses:
+ "204":
+ description: OK
+
+ /anything/anchorTypes:
+ get:
+ operationId: anchorTypesGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: A successful response that contains the simpleObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ $anchor: TypeFromAnchor
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /anything/nameOverride:
+ get:
+ operationId: nameOverrideGet
+ x-speakeasy-name-override: nameOverride
+ x-speakeasy-usage-example: false
+ tags:
+ - generation
+ parameters:
+ - name: nameOverride
+ x-speakeasy-name-override: testQueryParam
+ in: query
+ required: true
+ schema:
+ type: string
+ description: A string type
+ example: "example"
+ - name: enumNameOverride
+ x-speakeasy-name-override: testEnumQueryParam
+ in: query
+ required: true
+ schema:
+ type: string
+ description: An enum type
+ enum:
+ - "value1"
+ - "value2"
+ - "value3"
+ example: "value3"
+ responses:
+ "200":
+ description: A successful response that contains the simpleObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ x-speakeasy-name-override: overriddenResponse
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /anything/globalNameOverride:
+ get:
+ x-speakeasy-usage-example: true
+ operationId: getGlobalNameOverride
+ tags:
+ - generation
+ responses:
+ "200":
+ description: A successful response that contains the simpleObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ /anything/ignoredGeneration:
+ get:
+ operationId: ignoredGenerationGet
+ tags:
+ - generation
+ parameters:
+ - name: ignoredParameter
+ in: query
+ required: true
+ x-my-ignore: true
+ schema:
+ type: string
+ description: A string type
+ example: "example"
+ responses:
+ "200":
+ description: A successful response that contains the simpleObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ ignoredProperty:
+ type: string
+ x-my-ignore: true
+ callbacks:
+ notIgnoredCallback:
+ "/somecallback":
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ someProp:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ ignoredCallbackItem:
+ "/someignoredcallback":
+ x-my-ignore: true
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ singledIgnoredCallbackOperation:
+ "/someothercallback":
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ someProp:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ put:
+ x-my-ignore: true
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ put:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ application/xml:
+ x-my-ignore: true
+ schema:
+ type: object
+ properties:
+ xml:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ type: string
+ application/xml:
+ x-my-ignore: true
+ schema:
+ type: object
+ properties:
+ xml:
+ type: string
+ "201":
+ description: Created
+ x-my-ignore: true
+ post:
+ x-my-ignore: true
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ /anything/deprecatedOperationWithComments:
+ get:
+ operationId: deprecatedOperationWithCommentsGet
+ tags:
+ - generation
+ deprecated: true
+ x-speakeasy-deprecation-replacement: simplePathParameterObjects
+ x-speakeasy-deprecation-message: This operation is deprecated
+ summary: This is an endpoint setup to test deprecation with comments
+ parameters:
+ - name: deprecatedParameter
+ in: query
+ schema:
+ type: string
+ deprecated: true
+ x-speakeasy-deprecation-replacement: newParameter
+ x-speakeasy-deprecation-message: This parameter is deprecated
+ description: This is a string parameter
+ - name: newParameter
+ in: query
+ schema:
+ type: string
+ description: This is a string parameter
+ responses:
+ "200":
+ description: OK
+ /anything/deprecatedOperationNoComments:
+ get:
+ operationId: deprecatedOperationNoCommentsGet
+ tags:
+ - generation
+ deprecated: true
+ parameters:
+ - name: deprecatedParameter
+ in: query
+ schema:
+ type: string
+ deprecated: true
+ responses:
+ "200":
+ description: OK
+ /anything/deprecatedObjectInSchema:
+ get:
+ operationId: deprecatedObjectInSchemaGet
+ tags:
+ - generation
+ responses:
+ "200":
+ description: A successful response that contains a deprecatedObject sent in the request body
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/deprecatedObject"
+ /anything/deprecatedFieldInSchema:
+ post:
+ operationId: deprecatedFieldInSchemaPost
+ tags:
+ - generation
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/deprecatedFieldInObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ /anything/typedParameterGeneration:
+ get:
+ operationId: typedParameterGenerationGet
+ tags:
+ - generation
+ parameters:
+ - name: date
+ in: query
+ schema:
+ type: string
+ format: date
+ - name: bigint
+ in: query
+ schema:
+ type: integer
+ format: bigint
+ - name: decimal
+ in: query
+ schema:
+ type: number
+ format: decimal
+ - name: obj
+ in: query
+ schema:
+ type: object
+ properties:
+ str:
+ type: string
+ num:
+ type: number
+ bool:
+ type: boolean
+ required:
+ - str
+ - num
+ - bool
+ responses:
+ "200":
+ description: OK
+ /anything/ignoredPath:
+ x-my-ignore: true
+ get:
+ responses:
+ "200":
+ description: OK
+ /anything/globals/queryParameter:
+ get:
+ x-speakeasy-usage-example:
+ tags:
+ - global-parameters
+ operationId: globalsQueryParameterGet
+ tags:
+ - globals
+ parameters:
+ - name: globalQueryParam
+ in: query
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ args:
+ type: object
+ properties:
+ globalQueryParam:
+ type: string
+ required:
+ - globalQueryParam
+ required:
+ - args
+ /anything/globals/pathParameter/{globalPathParam}:
+ get:
+ x-speakeasy-usage-example:
+ tags:
+ - global-parameters
+ operationId: globalPathParameterGet
+ tags:
+ - globals
+ parameters:
+ - name: globalPathParam
+ in: path
+ required: true
+ schema:
+ type: integer
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ url:
+ type: string
+ required:
+ - url
+ /anything/stronglyTypedOneOf:
+ post:
+ operationId: stronglyTypedOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/stronglyTypedOneOfObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/stronglyTypedOneOfObject"
+ required:
+ - json
+ /anything/weaklyTypedOneOf:
+ post:
+ operationId: weaklyTypedOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/weaklyTypedOneOfObject"
+ required:
+ - json
+ /anything/typedObjectOneOf:
+ post:
+ operationId: typedObjectOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/typedObjectOneOf"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/typedObjectOneOf"
+ required:
+ - json
+ /anything/typedObjectNullableOneOf:
+ post:
+ operationId: typedObjectNullableOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/typedObjectNullableOneOf"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/typedObjectNullableOneOf"
+ required:
+ - json
+ /anything/flattenedTypedObject:
+ post:
+ operationId: flattenedTypedObjectPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/flattenedTypedObject1"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/flattenedTypedObject1"
+ required:
+ - json
+ /anything/nullableTypedObject:
+ post:
+ operationId: nullableTypedObjectPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/nullableTypedObject1"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "speakeasy-components.yaml#/components/schemas/nullableTypedObject1"
+ required:
+ - json
+ /anything/nullableOneOfSchema:
+ post:
+ operationId: nullableOneOfSchemaPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject2"
+ - type: "null"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject2"
+ - type: "null"
+ required:
+ - json
+ /anything/nullableOneOfInObject:
+ post:
+ operationId: nullableOneOfTypeInObjectPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/nullableOneOfTypeInObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "#/components/schemas/nullableOneOfTypeInObject"
+ required:
+ - json
+ /anything/nullableOneOfRefInObject:
+ post:
+ operationId: nullableOneOfRefInObjectPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/nullableOneOfRefInObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ $ref: "#/components/schemas/nullableOneOfRefInObject"
+ required:
+ - json
+ /anything/primitiveTypeOneOf:
+ post:
+ operationId: primitiveTypeOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - type: string
+ - type: integer
+ - type: number
+ - type: boolean
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ oneOf:
+ - type: string
+ - type: integer
+ - type: number
+ - type: boolean
+ required:
+ - json
+ /anything/mixedTypeOneOf:
+ post:
+ operationId: mixedTypeOneOfPost
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - type: string
+ - type: integer
+ - $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ oneOf:
+ - type: string
+ - type: integer
+ - $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ required:
+ - json
+ /anything/unionDateNull:
+ post:
+ operationId: unionDateNull
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - type: string
+ format: date
+ - type: "null"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ oneOf:
+ - type: string
+ format: date
+ - type: "null"
+ required:
+ - json
+ /anything/unionDateTimeNull:
+ post:
+ operationId: unionDateTimeNull
+ tags:
+ - unions
+ requestBody:
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - type: string
+ format: date-time
+ - type: "null"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ json:
+ oneOf:
+ - type: string
+ format: date-time
+ - type: "null"
+ required:
+ - json
+ # /anything/unionDateTimeBigInt:
+ # post:
+ # operationId: unionDateTimeBigInt
+ # tags:
+ # - unions
+ # requestBody:
+ # content:
+ # application/json:
+ # schema:
+ # oneOf:
+ # - type: string
+ # format: date-time
+ # - type: integer
+ # format: bigint
+ # required: true
+ # responses:
+ # "200":
+ # description: OK
+ # content:
+ # application/json:
+ # schema:
+ # title: res
+ # type: object
+ # properties:
+ # json:
+ # oneOf:
+ # - type: string
+ # format: date-time
+ # - type: integer
+ # format: bigint
+ # required:
+ # - json
+ # /anything/unionBigIntDecimal:
+ # post:
+ # operationId: unionBigIntDecimal
+ # tags:
+ # - unions
+ # requestBody:
+ # content:
+ # application/json:
+ # schema:
+ # oneOf:
+ # - type: string
+ # format: bigint
+ # - type: number
+ # format: decimal
+ # required: true
+ # responses:
+ # "200":
+ # description: OK
+ # content:
+ # application/json:
+ # schema:
+ # title: res
+ # type: object
+ # properties:
+ # json:
+ # oneOf:
+ # - type: string
+ # format: bigint
+ # - type: number
+ # format: decimal
+ # required:
+ # - json
+ /status/{statusCode}:
+ get:
+ operationId: statusGetError
+ tags:
+ - errors
+ parameters:
+ - name: statusCode
+ in: path
+ required: true
+ schema:
+ type: integer
+ responses:
+ "200":
+ description: OK
+ "300":
+ description: Multiple Choices
+ "400":
+ description: Bad Request
+ "500":
+ description: Internal Server Error
+ /errors/{statusCode}:
+ servers:
+ - url: http://localhost:35456
+ get:
+ x-speakeasy-errors:
+ statusCodes:
+ - "400"
+ - "401"
+ - "4XX"
+ - "500"
+ - "501"
+ operationId: statusGetXSpeakeasyErrors
+ tags:
+ - errors
+ parameters:
+ - name: statusCode
+ in: path
+ required: true
+ schema:
+ type: integer
+ responses:
+ "200":
+ description: OK
+ "300":
+ description: Multiple Choices
+ "400":
+ description: Bad Request
+ "500":
+ description: Internal Server Error
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/error"
+ "501":
+ description: Not Implemented
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ code:
+ type: string
+ message:
+ type: string
+ type:
+ $ref: "speakeasy-components.yaml#/components/schemas/errorType"
+ /anything/connectionError:
+ get:
+ operationId: connectionErrorGet
+ servers:
+ - url: http://somebrokenapi.broken
+ tags:
+ - errors
+ responses:
+ "200":
+ description: OK
+ /anything/telemetry/user-agent:
+ get:
+ operationId: telemetryUserAgentGet
+ tags:
+ - telemetry
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - headers
+ /anything/telemetry/speakeasy-user-agent:
+ get:
+ operationId: telemetrySpeakeasyUserAgentGet
+ tags:
+ - telemetry
+ parameters:
+ - name: User-Agent
+ in: header
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: res
+ type: object
+ properties:
+ headers:
+ type: object
+ additionalProperties:
+ type: string
+ required:
+ - headers
+ /pagination/limitoffset/page:
+ get:
+ operationId: paginationLimitOffsetPageParams
+ servers:
+ - url: http://localhost:35456
+ parameters:
+ - name: page
+ in: query
+ schema:
+ type: integer
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: offsetLimit
+ inputs:
+ - name: page
+ in: parameters
+ type: page
+ outputs:
+ results: $.resultArray
+ put:
+ operationId: paginationLimitOffsetPageBody
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/limitOffsetConfig"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: offsetLimit
+ inputs:
+ - name: limit
+ in: requestBody
+ type: limit
+ - name: page
+ in: requestBody
+ type: page
+ outputs:
+ numPages: $.numPages
+ /pagination/limitoffset/offset:
+ get:
+ operationId: paginationLimitOffsetOffsetParams
+ servers:
+ - url: http://localhost:35456
+ parameters:
+ - name: offset
+ in: query
+ schema:
+ type: integer
+ - name: limit
+ in: query
+ schema:
+ type: integer
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: offsetLimit
+ inputs:
+ - name: limit
+ in: parameters
+ type: limit
+ - name: offset
+ in: parameters
+ type: offset
+ outputs:
+ results: $.resultArray
+ put:
+ operationId: paginationLimitOffsetOffsetBody
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "speakeasy-components.yaml#/components/schemas/limitOffsetConfig"
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: offsetLimit
+ inputs:
+ - name: limit
+ in: requestBody
+ type: limit
+ - name: offset
+ in: requestBody
+ type: offset
+ outputs:
+ results: $.resultArray
+ /pagination/cursor:
+ get:
+ operationId: paginationCursorParams
+ servers:
+ - url: http://localhost:35456
+ parameters:
+ - name: cursor
+ in: query
+ schema:
+ type: integer
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: cursor
+ inputs:
+ - name: cursor
+ in: parameters
+ type: cursor
+ outputs:
+ nextCursor: $.resultArray[(@.length-1)]
+ put:
+ operationId: paginationCursorBody
+ servers:
+ - url: http://localhost:35456
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ cursor:
+ type: integer
+ required:
+ - cursor
+ required: true
+ responses:
+ "200":
+ $ref: "speakeasy-components.yaml#/components/responses/paginationResponse"
+ tags:
+ - pagination
+ x-speakeasy-pagination:
+ type: cursor
+ inputs:
+ - name: cursor
+ in: requestBody
+ type: cursor
+ outputs:
+ nextCursor: $.resultArray[(@.length-1)]
+ /group/first:
+ get:
+ operationId: groupFirstGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: first
+ responses:
+ "200":
+ description: OK
+ /group/second:
+ get:
+ operationId: groupSecondGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: second
+ responses:
+ "200":
+ description: OK
+ /anything/nested:
+ get:
+ operationId: nestedGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: nested
+ responses:
+ "200":
+ description: OK
+ /anything/nested/first:
+ get:
+ operationId: nestedFirstGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: nested.first
+ responses:
+ "200":
+ description: OK
+ /anything/nested/second:
+ get:
+ operationId: nestedSecondGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: nested.second
+ responses:
+ "200":
+ description: OK
+ /anything/nest/first:
+ get:
+ operationId: nestFirstGet
+ x-speakeasy-name-override: get
+ x-speakeasy-group: nest.first
+ responses:
+ "200":
+ description: OK
+ /resource:
+ post:
+ x-speakeasy-entity-operation: ExampleResource#create
+ operationId: createResource
+ tags:
+ - resource
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ExampleResource"
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ExampleResource"
+ /fileResource:
+ post:
+ x-speakeasy-entity-operation: File#create
+ operationId: createFile
+ tags:
+ - resource
+ requestBody:
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ file:
+ type: string
+ format: binary
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/FileResource"
+ /resource/{resourceId}:
+ get:
+ x-speakeasy-entity-operation: ExampleResource#read
+ operationId: getResource
+ tags:
+ - resource
+ parameters:
+ - name: resourceId
+ in: path
+ x-speakeasy-match: id
+ schema:
+ type: string
+ required: true
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ExampleResource"
+ post:
+ x-speakeasy-entity-operation: ExampleResource#update
+ operationId: updateResource
+ tags:
+ - resource
+ parameters:
+ - name: resourceId
+ in: path
+ x-speakeasy-match: id
+ schema:
+ type: string
+ required: true
+ responses:
+ "202":
+ description: OK
+ delete:
+ x-speakeasy-entity-operation: ExampleResource#delete
+ operationId: deleteResource
+ tags:
+ - resource
+ parameters:
+ - name: resourceId
+ in: path
+ x-speakeasy-match: id
+ schema:
+ type: string
+ required: true
+ responseBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ExampleResource"
+ responses:
+ 204:
+ description: No Content
+ /retries:
+ get:
+ operationId: retriesGet
+ servers:
+ - url: http://localhost:35456
+ parameters:
+ - name: request-id
+ in: query
+ schema:
+ type: string
+ required: true
+ - name: num-retries
+ in: query
+ schema:
+ type: integer
+ tags:
+ - retries
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ title: retries
+ type: object
+ properties:
+ retries:
+ type: integer
+ required:
+ - retries
+ x-speakeasy-retries:
+ strategy: backoff
+ backoff:
+ initialInterval: 10 # 10 ms
+ maxInterval: 200 # 200 ms
+ maxElapsedTime: 1000 # 1 seconds
+ exponent: 1.5
+ statusCodes:
+ - 503
+ retryConnectionErrors: false
+ /docs/per-language-docs:
+ get:
+ operationId: getDocumentationPerLanguage
+ description: Gets documentation for some language, I guess.
+ x-speakeasy-docs:
+ go:
+ description: Get stuff in Golang.
+ python:
+ description: Get stuff in Python.
+ typescript:
+ description: Get stuff in TypeScript.
+ parameters:
+ - name: language
+ description: The language parameter for this endpoint.
+ in: query
+ required: true
+ schema:
+ type: string
+ x-speakeasy-docs:
+ go:
+ description: The Golang language is uptight.
+ python:
+ description: The Python language is popular.
+ typescript:
+ description: THe TypeScript language is corporate.
+ tags:
+ - documentation
+ responses:
+ "200":
+ description: OK
+ x-speakeasy-docs:
+ go:
+ description: Golang is OK
+ python:
+ description: Python is OK
+ typescript:
+ description: TypeScript is OK
+components:
+ schemas:
+ ExampleVehicle:
+ type: object
+ oneOf:
+ - $ref: "#/components/schemas/ExampleBoat"
+ - $ref: "#/components/schemas/ExampleCar"
+ ExampleBoat:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - boat
+ name:
+ type: string
+ length:
+ type: number
+ createdAt:
+ type: string
+ format: date-time
+ updatedAt:
+ type: string
+ format: date-time
+ required:
+ - type
+ - name
+ - length
+ ExampleCar:
+ type: object
+ properties:
+ type:
+ type: string
+ enum:
+ - car
+ name:
+ type: string
+ make:
+ type: string
+ model:
+ type: string
+ year:
+ type: number
+ createdAt:
+ type: string
+ format: date-time
+ updatedAt:
+ type: string
+ format: date-time
+ required:
+ - type
+ - name
+ - make
+ - model
+ - year
+ FileResource:
+ x-speakeasy-entity: File
+ type: object
+ properties:
+ id:
+ type: string
+ required:
+ - id
+ ExampleResource:
+ x-speakeasy-entity: ExampleResource
+ type: object
+ properties:
+ id:
+ type: string
+ name:
+ type: string
+ createdAt:
+ type: string
+ format: date-time
+ mapOfString:
+ type: object
+ additionalProperties:
+ type: string
+ mapOfInteger:
+ type: object
+ additionalProperties:
+ type: integer
+ arrayOfString:
+ type: array
+ items:
+ type: string
+ arrayOfNumber:
+ type: array
+ items:
+ type: number
+ enumStr:
+ type: string
+ enum:
+ - one
+ - two
+ - three
+ enumNumber:
+ type: integer
+ enum:
+ - 1
+ - 2
+ - 3
+ updatedAt:
+ type: string
+ format: date-time
+ chocolates:
+ type: array
+ items:
+ type: object
+ properties:
+ description:
+ type: string
+ required:
+ - description
+ vehicle:
+ $ref: "#/components/schemas/ExampleVehicle"
+ required:
+ - id
+ - name
+ - chocolates
+ - vehicle
+ primitiveTypeUnion:
+ x-speakeasy-include: true
+ oneOf:
+ - type: string
+ - type: integer
+ - type: integer
+ format: int32
+ - type: number
+ - type: number
+ format: float
+ - type: boolean
+ numericUnion:
+ x-speakeasy-include: true
+ oneOf:
+ - type: integer
+ - type: number
+ - type: integer
+ format: bigint
+ - type: string
+ format: decimal
+ nullableTypes:
+ type: object
+ properties:
+ nullableTypeArray:
+ type:
+ - null
+ - string
+ nullableType:
+ type: string
+ nullable: true
+ nullableObject:
+ type: ["object", "null"]
+ required:
+ - required
+ properties:
+ required:
+ type: integer
+ optional:
+ type: string
+ oneOfObjectOrArrayOfObjects:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ - type: "array"
+ items:
+ $ref: "speakeasy-components.yaml#/components/schemas/simpleObject"
+ nullableOneOfTypeInObject:
+ type: object
+ required:
+ - OneOfOne
+ - NullableOneOfOne
+ - NullableOneOfTwo
+ properties:
+ OneOfOne:
+ oneOf:
+ - type: boolean
+ NullableOneOfOne:
+ oneOf:
+ - type: boolean
+ - type: "null"
+ NullableOneOfTwo:
+ oneOf:
+ - type: boolean
+ - type: integer
+ - type: "null"
+ nullableOneOfRefInObject:
+ type: object
+ required:
+ - OneOfOne
+ - NullableOneOfOne
+ - NullableOneOfTwo
+ properties:
+ OneOfOne:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ NullableOneOfOne:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ - type: "null"
+ NullableOneOfTwo:
+ oneOf:
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject1"
+ - $ref: "speakeasy-components.yaml#/components/schemas/typedObject2"
+ - type: "null"
+ allOfToAllOf:
+ x-speakeasy-include: true
+ title: "allOf1"
+ type: object
+ allOf:
+ - $ref: "#/components/schemas/allOf2"
+ allOf2:
+ type: object
+ title: "allOf2"
+ allOf:
+ - $ref: "#/components/schemas/allOf3"
+ allOf3:
+ type: object
+ title: "allOf3"
+ allOf:
+ - properties:
+ id:
+ type: string
+ title: "allOf4"
+ unsupportedEnums:
+ type: object
+ x-speakeasy-include: true
+ properties:
+ booleanEnum:
+ type: boolean
+ enum:
+ - false
+ numberEnum:
+ type: number
+ enum:
+ - 1.5
+ - 2.5
+ required:
+ - booleanEnum
+ - numberEnum
+ oneOfGenerationStressTest:
+ x-speakeasy-include: true
+ type: object
+ properties:
+ oneOfSameType:
+ oneOf:
+ - type: string
+ minLength: 40
+ maxLength: 40
+ - type: string
+ enum:
+ - latest
+ - type: "null"
+ oneOfFromArrayOfTypes:
+ type: [string, integer, "null"]
+ nullableAny:
+ type: "null"
+ any: {}
+ required:
+ - oneOfSameType
+ - oneOfFromArrayOfTypes
+ - nullableAny
+ - any
+ securitySchemes:
+ basicAuth:
+ type: http
+ scheme: basic
+ x-speakeasy-example: YOUR_USERNAME;YOUR_PASSWORD
+ apiKeyAuth:
+ type: apiKey
+ in: header
+ name: Authorization
+ description: Authenticate using an API Key generated via our platform.
+ x-speakeasy-example: Token YOUR_API_KEY
+ bearerAuth:
+ type: http
+ scheme: bearer
+ x-speakeasy-example: YOUR_JWT
+ apiKeyAuthNew:
+ type: apiKey
+ in: header
+ name: x-api-key
+ x-speakeasy-example: Token
+ oauth2:
+ type: oauth2
+ flows:
+ implicit:
+ authorizationUrl: http://localhost:35123/oauth2/authorize
+ scopes: {}
+ x-speakeasy-example: Bearer YOUR_OAUTH2_TOKEN
+ openIdConnect:
+ type: openIdConnect
+ openIdConnectUrl: http://localhost:35123/.well-known/openid-configuration
+ x-speakeasy-example: Bearer YOUR_OPENID_TOKEN
diff --git a/test_specs/third.yaml b/test_specs/third.yaml
index ba14cc2..2eb586c 100644
--- a/test_specs/third.yaml
+++ b/test_specs/third.yaml
@@ -6,7 +6,8 @@ additionalProperties: false
maxProperties: 1
properties:
-
+ pencils:
+ $ref: '#/properties/property/properties/statistics'
property:
title: title of third prop in third doc
type: object
diff --git a/utils/unwrap_errors.go b/utils/unwrap_errors.go
new file mode 100644
index 0000000..78788b7
--- /dev/null
+++ b/utils/unwrap_errors.go
@@ -0,0 +1,15 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package utils
+
+func UnwrapErrors(err error) []error {
+ if err == nil {
+ return []error{}
+ }
+ if uw, ok := err.(interface{ Unwrap() []error }); ok {
+ return uw.Unwrap()
+ } else {
+ return []error{err}
+ }
+}
diff --git a/utils/unwrap_errors_test.go b/utils/unwrap_errors_test.go
new file mode 100644
index 0000000..4737dc2
--- /dev/null
+++ b/utils/unwrap_errors_test.go
@@ -0,0 +1,36 @@
+// Copyright 2023 Princess B33f Heavy Industries / Dave Shanley
+// SPDX-License-Identifier: MIT
+
+package utils
+
+import (
+ "errors"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestUnwrapErrors(t *testing.T) {
+
+ // create an array of errors
+ errs := []error{
+ errors.New("first error"),
+ errors.New("second error"),
+ errors.New("third error"),
+ }
+
+ // join them up
+ joined := errors.Join(errs...)
+ assert.Error(t, joined)
+
+ // unwrap them
+ unwrapped := UnwrapErrors(joined)
+ assert.Len(t, unwrapped, 3)
+}
+
+func TestUnwrapErrors_Empty(t *testing.T) {
+ assert.Len(t, UnwrapErrors(nil), 0)
+}
+
+func TestUnwrapErrors_SingleError(t *testing.T) {
+ assert.Len(t, UnwrapErrors(errors.New("single error")), 1)
+}
diff --git a/utils/utils.go b/utils/utils.go
index b99ea84..1370241 100644
--- a/utils/utils.go
+++ b/utils/utils.go
@@ -3,6 +3,7 @@ package utils
import (
"encoding/json"
"fmt"
+ "net/http"
"net/url"
"regexp"
"sort"
@@ -252,6 +253,9 @@ func FindKeyNode(key string, nodes []*yaml.Node) (keyNode *yaml.Node, valueNode
//numNodes := len(nodes)
for i, v := range nodes {
if i%2 == 0 && key == v.Value {
+ if len(nodes) <= i+1 {
+ return v, nodes[i]
+ }
return v, nodes[i+1] // next node is what we need.
}
for x, j := range v.Content {
@@ -564,7 +568,8 @@ func IsHttpVerb(verb string) bool {
}
// define bracket name expression
-var bracketNameExp = regexp.MustCompile("^(\\w+)\\[(\\w+)\\]$")
+var bracketNameExp = regexp.MustCompile(`^(\w+)\[(\w+)\]$`)
+var pathCharExp = regexp.MustCompile(`[%=;~.]`)
func ConvertComponentIdIntoFriendlyPathSearch(id string) (string, string) {
segs := strings.Split(id, "/")
@@ -573,8 +578,7 @@ func ConvertComponentIdIntoFriendlyPathSearch(id string) (string, string) {
// check for strange spaces, chars and if found, wrap them up, clean them and create a new cleaned path.
for i := range segs {
- pathCharExp, _ := regexp.MatchString("[%=;~.]", segs[i])
- if pathCharExp {
+ if pathCharExp.Match([]byte(segs[i])) {
segs[i], _ = url.QueryUnescape(strings.ReplaceAll(segs[i], "~1", "/"))
segs[i] = fmt.Sprintf("['%s']", segs[i])
if len(cleaned) > 0 {
@@ -612,11 +616,9 @@ func ConvertComponentIdIntoFriendlyPathSearch(id string) (string, string) {
_, err := strconv.ParseInt(name, 10, 32)
var replaced string
if err != nil {
- replaced = strings.ReplaceAll(fmt.Sprintf("%s",
- strings.Join(cleaned, ".")), "#", "$")
+ replaced = strings.ReplaceAll(strings.Join(cleaned, "."), "#", "$")
} else {
- replaced = strings.ReplaceAll(fmt.Sprintf("%s",
- strings.Join(cleaned, ".")), "#", "$")
+ replaced = strings.ReplaceAll(strings.Join(cleaned, "."), "#", "$")
}
if len(replaced) > 0 {
@@ -711,10 +713,11 @@ func CheckEnumForDuplicates(seq []*yaml.Node) []*yaml.Node {
return res
}
+var whitespaceExp = regexp.MustCompile(`\n( +)`)
+
// DetermineWhitespaceLength will determine the length of the whitespace for a JSON or YAML file.
func DetermineWhitespaceLength(input string) int {
- exp := regexp.MustCompile(`\n( +)`)
- whiteSpace := exp.FindAllStringSubmatch(input, -1)
+ whiteSpace := whitespaceExp.FindAllStringSubmatch(input, -1)
var filtered []string
for i := range whiteSpace {
filtered = append(filtered, whiteSpace[i][1])
@@ -749,3 +752,5 @@ func CheckForMergeNodes(node *yaml.Node) {
}
}
}
+
+type RemoteURLHandler = func(url string) (*http.Response, error)
diff --git a/utils/utils_test.go b/utils/utils_test.go
index 6bdd586..028deca 100644
--- a/utils/utils_test.go
+++ b/utils/utils_test.go
@@ -1,11 +1,12 @@
package utils
import (
- "github.com/stretchr/testify/assert"
- "gopkg.in/yaml.v3"
"os"
"sync"
"testing"
+
+ "github.com/stretchr/testify/assert"
+ "gopkg.in/yaml.v3"
)
type petstore []byte
@@ -168,8 +169,7 @@ func TestConvertInterfaceToStringArray_NoType(t *testing.T) {
}
func TestConvertInterfaceToStringArray_Invalid(t *testing.T) {
- var d interface{}
- d = "I am a carrot"
+ var d interface{} = "I am a carrot"
parsed := ConvertInterfaceToStringArray(d)
assert.Nil(t, parsed)
}
@@ -195,8 +195,7 @@ func TestConvertInterfaceArrayToStringArray_NoType(t *testing.T) {
}
func TestConvertInterfaceArrayToStringArray_Invalid(t *testing.T) {
- var d interface{}
- d = "weed is good"
+ var d interface{} = "weed is good"
parsed := ConvertInterfaceArrayToStringArray(d)
assert.Nil(t, parsed)
}
@@ -229,12 +228,11 @@ func TestExtractValueFromInterfaceMap_Flat(t *testing.T) {
m["maddy"] = "niblet"
d = m
parsed := ExtractValueFromInterfaceMap("maddy", d)
- assert.Equal(t, "niblet", parsed.(interface{}))
+ assert.Equal(t, "niblet", parsed)
}
func TestExtractValueFromInterfaceMap_NotFound(t *testing.T) {
- var d interface{}
- d = "not a map"
+ var d interface{} = "not a map"
parsed := ExtractValueFromInterfaceMap("melody", d)
assert.Nil(t, parsed)
}
@@ -319,6 +317,19 @@ func TestFindKeyNode(t *testing.T) {
assert.Equal(t, 47, k.Line)
}
+func TestFindKeyNodeOffByOne(t *testing.T) {
+
+ k, v := FindKeyNode("key", []*yaml.Node{
+ {
+ Value: "key",
+ Line: 999,
+ },
+ })
+ assert.NotNil(t, k)
+ assert.NotNil(t, v)
+ assert.Equal(t, 999, k.Line)
+}
+
func TestFindKeyNode_ValueIsKey(t *testing.T) {
a := &yaml.Node{
@@ -686,6 +697,14 @@ func TestConvertComponentIdIntoFriendlyPathSearch_Crazy(t *testing.T) {
assert.Equal(t, "expires_at", segment)
}
+func BenchmarkConvertComponentIdIntoFriendlyPathSearch_Crazy(t *testing.B) {
+ for n := 0; n < t.N; n++ {
+ segment, path := ConvertComponentIdIntoFriendlyPathSearch("#/components/schemas/gpg-key/properties/subkeys/example/0/expires_at")
+ assert.Equal(t, "$.components.schemas.gpg-key.properties.subkeys.example[0].expires_at", path)
+ assert.Equal(t, "expires_at", segment)
+ }
+}
+
func TestConvertComponentIdIntoFriendlyPathSearch_Simple(t *testing.T) {
segment, path := ConvertComponentIdIntoFriendlyPathSearch("#/~1fresh~1pizza/get")
assert.Equal(t, "$['/fresh/pizza'].get", path)
diff --git a/what-changed/model/callback_test.go b/what-changed/model/callback_test.go
index 6e559c0..efa64f4 100644
--- a/what-changed/model/callback_test.go
+++ b/what-changed/model/callback_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -36,8 +37,8 @@ func TestCompareCallback(t *testing.T) {
var rDoc v3.Callback
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareCallback(&lDoc, &rDoc)
@@ -82,8 +83,8 @@ func TestCompareCallback_Add(t *testing.T) {
var rDoc v3.Callback
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareCallback(&lDoc, &rDoc)
@@ -133,8 +134,8 @@ func TestCompareCallback_Modify(t *testing.T) {
var rDoc v3.Callback
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareCallback(&lDoc, &rDoc)
@@ -183,8 +184,8 @@ func TestCompareCallback_Remove(t *testing.T) {
var rDoc v3.Callback
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareCallback(&rDoc, &lDoc)
diff --git a/what-changed/model/components_test.go b/what-changed/model/components_test.go
index 4853b66..fb79301 100644
--- a/what-changed/model/components_test.go
+++ b/what-changed/model/components_test.go
@@ -4,11 +4,11 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/pb33f/libopenapi/index"
- "github.com/pb33f/libopenapi/resolver"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
"testing"
@@ -39,8 +39,8 @@ thing2:
var rDoc v2.Definitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -69,8 +69,8 @@ thing2:
var rDoc v2.Definitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -108,8 +108,8 @@ thing3:
var rDoc v2.Definitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -148,8 +148,8 @@ thing3:
var rDoc v2.Definitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -187,8 +187,8 @@ param4:
var rDoc v2.ParameterDefinitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -226,8 +226,8 @@ param4:
var rDoc v2.ParameterDefinitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -261,8 +261,8 @@ resp3:
var rDoc v2.ResponsesDefinitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -298,8 +298,8 @@ resp3:
var rDoc v2.ResponsesDefinitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -331,8 +331,8 @@ scheme2:
var rDoc v2.SecurityDefinitions
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -367,8 +367,8 @@ schemas:
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -399,8 +399,8 @@ func TestCompareComponents_OpenAPI_Schemas_Refs_FullBuild(t *testing.T) {
idx := index.NewSpecIndex(&lNode)
- _ = lDoc.Build(lNode.Content[0], idx)
- _ = rDoc.Build(rNode.Content[0], idx)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], idx)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], idx)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -431,8 +431,8 @@ schemas:
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -468,8 +468,8 @@ schemas:
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -504,8 +504,8 @@ schemas:
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -534,8 +534,8 @@ responses:
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -564,8 +564,8 @@ func TestCompareComponents_OpenAPI_Responses_FullBuild(t *testing.T) {
idx := index.NewSpecIndex(&lNode)
- _ = lDoc.Build(lNode.Content[0], idx)
- _ = rDoc.Build(rNode.Content[0], idx)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], idx)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], idx)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -600,8 +600,8 @@ func TestCompareComponents_OpenAPI_ResponsesAdd_FullBuild(t *testing.T) {
idx := index.NewSpecIndex(&lNode)
- _ = lDoc.Build(lNode.Content[0], idx)
- _ = rDoc.Build(rNode.Content[0], idx)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], idx)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], idx)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -639,8 +639,8 @@ func TestCompareComponents_OpenAPI_Responses_FullBuild_IdenticalRef(t *testing.T
idx := index.NewSpecIndex(&lNode)
idx2 := index.NewSpecIndex(&rNode)
- _ = lDoc.Build(lNode.Content[0], idx)
- _ = rDoc.Build(rNode.Content[0], idx2)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], idx)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], idx2)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -677,14 +677,14 @@ func TestCompareComponents_OpenAPI_Responses_FullBuild_CircularRef(t *testing.T)
idx2 := index.NewSpecIndex(&rNode)
// resolver required to check circular refs.
- re1 := resolver.NewResolver(idx)
- re2 := resolver.NewResolver(idx2)
+ re1 := index.NewResolver(idx)
+ re2 := index.NewResolver(idx2)
re1.CheckForCircularReferences()
re2.CheckForCircularReferences()
- _ = lDoc.Build(lNode.Content[0], idx)
- _ = rDoc.Build(rNode.Content[0], idx2)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], idx)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], idx2)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -694,16 +694,16 @@ func TestCompareComponents_OpenAPI_Responses_FullBuild_CircularRef(t *testing.T)
//func TestCompareComponents_OpenAPI_Responses_Modify(t *testing.T) {
//
// left := `responses:
-// niceResponse:
-// description: hello
-// badResponse:
-// description: go away please`
+// niceResponse:
+// description: hello
+// badResponse:
+// description: go away please`
//
// right := `responses:
-// niceResponse:
-// description: hello my matey
-// badResponse:
-// description: go away please, now!`
+// niceResponse:
+// description: hello my matey
+// badResponse:
+// description: go away please, now!`
//
// var lNode, rNode yaml.Node
// _ = yaml.Unmarshal([]byte(left), &lNode)
@@ -714,8 +714,8 @@ func TestCompareComponents_OpenAPI_Responses_FullBuild_CircularRef(t *testing.T)
// var rDoc v3.Components
// _ = low.BuildModel(lNode.Content[0], &lDoc)
// _ = low.BuildModel(rNode.Content[0], &rDoc)
-// _ = lDoc.Build(lNode.Content[0], nil)
-// _ = rDoc.Build(rNode.Content[0], nil)
+// _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+// _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
//
// // compare.
// extChanges := CompareComponents(&rDoc, &lDoc)
@@ -748,8 +748,8 @@ func TestCompareComponents_OpenAPI_Responses_Add(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -783,8 +783,8 @@ func TestCompareComponents_OpenAPI_Responses_Remove(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -812,8 +812,8 @@ func TestCompareComponents_OpenAPI_Parameters_Equal(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -845,8 +845,8 @@ func TestCompareComponents_OpenAPI_Parameters_Added(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -880,8 +880,8 @@ func TestCompareComponents_OpenAPI_Parameters_Removed(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&rDoc, &lDoc)
@@ -911,8 +911,8 @@ func TestCompareComponents_OpenAPI_RequestBodies_Modified(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -941,8 +941,8 @@ func TestCompareComponents_OpenAPI_Headers_Add(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -969,8 +969,8 @@ func TestCompareComponents_OpenAPI_SecuritySchemes_Equal(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -1000,8 +1000,8 @@ func TestCompareComponents_OpenAPI_SecuritySchemes_Modified(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -1030,8 +1030,8 @@ func TestCompareComponents_OpenAPI_Links_Added(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -1066,8 +1066,8 @@ func TestCompareComponents_OpenAPI_Callbacks_Modified(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
@@ -1090,8 +1090,8 @@ func TestCompareComponents_OpenAPI_Extensions_Modified(t *testing.T) {
var rDoc v3.Components
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(lNode.Content[0], nil)
- _ = rDoc.Build(rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), rNode.Content[0], nil)
// compare.
extChanges := CompareComponents(&lDoc, &rDoc)
diff --git a/what-changed/model/contact_test.go b/what-changed/model/contact_test.go
index 24b746e..1f77dcd 100644
--- a/what-changed/model/contact_test.go
+++ b/what-changed/model/contact_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/stretchr/testify/assert"
@@ -27,8 +28,8 @@ url: https://pb33f.io`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -54,8 +55,8 @@ url: https://pb33f.io`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -80,8 +81,8 @@ name: buckaroo`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -106,8 +107,8 @@ name: buckaroo`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -131,8 +132,8 @@ email: buckaroo@pb33f.io`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -157,8 +158,8 @@ email: buckaroo@pb33f.io`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -183,8 +184,8 @@ email: dave@quobix.com`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -210,8 +211,8 @@ email: dave@quobix.com`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
@@ -235,8 +236,8 @@ url: https://pb33f.io`
var rDoc lowbase.Contact
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareContact(&lDoc, &rDoc)
diff --git a/what-changed/model/document_test.go b/what-changed/model/document_test.go
index 960a231..73ee2b5 100644
--- a/what-changed/model/document_test.go
+++ b/what-changed/model/document_test.go
@@ -79,8 +79,8 @@ produces:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -108,8 +108,8 @@ produces:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -137,8 +137,8 @@ basePath: /api`
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -168,8 +168,8 @@ info:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -194,8 +194,8 @@ info:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -221,8 +221,8 @@ info:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(rDoc, lDoc)
@@ -248,8 +248,8 @@ externalDocs:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -271,8 +271,8 @@ externalDocs:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -296,8 +296,8 @@ externalDocs:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(rDoc, lDoc)
@@ -335,8 +335,8 @@ security:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -370,8 +370,8 @@ security:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -403,8 +403,8 @@ definitions:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -436,8 +436,8 @@ securityDefinitions:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -464,8 +464,8 @@ securityDefinitions:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -501,8 +501,8 @@ parameters:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -533,8 +533,8 @@ parameters:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -570,8 +570,8 @@ responses:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -601,8 +601,8 @@ responses:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -634,8 +634,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -665,8 +665,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -698,8 +698,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -728,8 +728,8 @@ tags:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -758,8 +758,8 @@ tags:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v2.CreateDocument(siLeft)
- rDoc, _ := v2.CreateDocument(siRight)
+ lDoc, _ := v2.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v2.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -783,8 +783,8 @@ jsonSchemaDialect: https://pb33f.io/schema`
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(&lDoc, &rDoc)
@@ -811,8 +811,8 @@ jsonSchemaDialect: https://pb33f.io/schema/changed`
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -840,8 +840,8 @@ components:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -870,8 +870,8 @@ components:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(rDoc, lDoc)
@@ -910,8 +910,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -949,8 +949,8 @@ security:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -983,8 +983,8 @@ components:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -1015,8 +1015,8 @@ servers:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -1050,8 +1050,8 @@ components:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -1089,8 +1089,8 @@ webhooks:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -1133,8 +1133,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
@@ -1176,8 +1176,8 @@ paths:
siLeft, _ := datamodel.ExtractSpecInfo([]byte(left))
siRight, _ := datamodel.ExtractSpecInfo([]byte(right))
- lDoc, _ := v3.CreateDocument(siLeft)
- rDoc, _ := v3.CreateDocument(siRight)
+ lDoc, _ := v3.CreateDocumentFromConfig(siLeft, datamodel.NewDocumentConfiguration())
+ rDoc, _ := v3.CreateDocumentFromConfig(siRight, datamodel.NewDocumentConfiguration())
// compare.
extChanges := CompareDocuments(lDoc, rDoc)
diff --git a/what-changed/model/encoding_test.go b/what-changed/model/encoding_test.go
index 02cdc73..1ca5542 100644
--- a/what-changed/model/encoding_test.go
+++ b/what-changed/model/encoding_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -38,8 +39,8 @@ allowReserved: true`
var rDoc v3.Encoding
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareEncoding(&lDoc, &rDoc)
@@ -73,8 +74,8 @@ allowReserved: true`
var rDoc v3.Encoding
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareEncoding(&lDoc, &rDoc)
@@ -108,8 +109,8 @@ allowReserved: true`
var rDoc v3.Encoding
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareEncoding(&lDoc, &rDoc)
@@ -144,8 +145,8 @@ allowReserved: true`
var rDoc v3.Encoding
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareEncoding(&rDoc, &lDoc)
diff --git a/what-changed/model/example_test.go b/what-changed/model/example_test.go
index 087cc4e..e97f819 100644
--- a/what-changed/model/example_test.go
+++ b/what-changed/model/example_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"testing"
"github.com/stretchr/testify/assert"
@@ -28,8 +29,8 @@ func TestCompareExamples_SummaryModified(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
@@ -61,8 +62,8 @@ func TestCompareExamples_Map(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
@@ -90,8 +91,8 @@ func TestCompareExamples_MapAdded(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
@@ -119,8 +120,8 @@ func TestCompareExamples_MapRemoved(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&rDoc, &lDoc)
@@ -144,8 +145,8 @@ description: cure all`
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
@@ -171,8 +172,8 @@ x-herbs: cure all`
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
@@ -197,8 +198,8 @@ func TestCompareExamples_Identical(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamples(&lDoc, &rDoc)
assert.Nil(t, extChanges)
@@ -220,8 +221,8 @@ func TestCompareExamples_Date(t *testing.T) {
var rDoc base.Example
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
changes := CompareExamples(&lDoc, &rDoc)
diff --git a/what-changed/model/examples_test.go b/what-changed/model/examples_test.go
index 5ac6bab..731aef3 100644
--- a/what-changed/model/examples_test.go
+++ b/what-changed/model/examples_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -26,8 +27,8 @@ func TestCompareExamplesV2(t *testing.T) {
var rDoc v2.Examples
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamplesV2(&lDoc, &rDoc)
assert.Equal(t, extChanges.TotalChanges(), 1)
@@ -54,8 +55,8 @@ yummy: coffee`
var rDoc v2.Examples
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamplesV2(&lDoc, &rDoc)
assert.Equal(t, extChanges.TotalChanges(), 1)
@@ -79,8 +80,8 @@ yummy: coffee`
var rDoc v2.Examples
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamplesV2(&rDoc, &lDoc)
assert.Equal(t, extChanges.TotalChanges(), 1)
@@ -103,8 +104,8 @@ func TestCompareExamplesV2_Identical(t *testing.T) {
var rDoc v2.Examples
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareExamplesV2(&rDoc, &lDoc)
assert.Nil(t, extChanges)
diff --git a/what-changed/model/external_docs_test.go b/what-changed/model/external_docs_test.go
index 4fc06ed..df2fa4b 100644
--- a/what-changed/model/external_docs_test.go
+++ b/what-changed/model/external_docs_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
lowv3 "github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -31,8 +32,8 @@ x-testing: hiya!`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -88,8 +89,8 @@ url: https://quobix.com`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -139,8 +140,8 @@ x-testing: hello`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -165,8 +166,8 @@ x-testing: hello`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -191,8 +192,8 @@ url: https://pb33f.io`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -217,8 +218,8 @@ description: something`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareExternalDocs(&lDoc, &rDoc)
@@ -243,8 +244,8 @@ url: https://pb33f.io`
var rDoc lowbase.ExternalDoc
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareExternalDocs(&lDoc, &rDoc)
diff --git a/what-changed/model/header_test.go b/what-changed/model/header_test.go
index 74e51ca..c6d6a4b 100644
--- a/what-changed/model/header_test.go
+++ b/what-changed/model/header_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -73,8 +74,8 @@ func TestCompareHeaders_v2_identical(t *testing.T) {
var rDoc v2.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV2(&lDoc, &rDoc)
@@ -116,8 +117,8 @@ x-beer: really yummy`
var rDoc v2.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV2(&lDoc, &rDoc)
@@ -160,8 +161,8 @@ x-beer: yummy`
var rDoc v2.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV2(&rDoc, &lDoc)
@@ -205,8 +206,8 @@ x-beer: yummy`
var rDoc v2.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV2(&lDoc, &rDoc)
@@ -232,8 +233,8 @@ func TestCompareHeaders_v2_ItemsModified(t *testing.T) {
var rDoc v2.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV2(&lDoc, &rDoc)
@@ -255,8 +256,8 @@ func TestCompareHeaders_v3_identical(t *testing.T) {
var rDoc v3.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV3(&lDoc, &rDoc)
@@ -297,8 +298,8 @@ x-beer: yummy`
var rDoc v3.Header
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareHeadersV3(&lDoc, &rDoc)
diff --git a/what-changed/model/info.go b/what-changed/model/info.go
index 4f93364..a1de09f 100644
--- a/what-changed/model/info.go
+++ b/what-changed/model/info.go
@@ -11,8 +11,9 @@ import (
// InfoChanges represents the number of changes to an Info object. Part of an OpenAPI document
type InfoChanges struct {
*PropertyChanges
- ContactChanges *ContactChanges `json:"contact,omitempty" yaml:"contact,omitempty"`
- LicenseChanges *LicenseChanges `json:"license,omitempty" yaml:"license,omitempty"`
+ ContactChanges *ContactChanges `json:"contact,omitempty" yaml:"contact,omitempty"`
+ LicenseChanges *LicenseChanges `json:"license,omitempty" yaml:"license,omitempty"`
+ ExtensionChanges *ExtensionChanges `json:"extensions,omitempty" yaml:"extensions,omitempty"`
}
// GetAllChanges returns a slice of all changes made between Info objects
@@ -25,6 +26,9 @@ func (i *InfoChanges) GetAllChanges() []*Change {
if i.LicenseChanges != nil {
changes = append(changes, i.LicenseChanges.GetAllChanges()...)
}
+ if i.ExtensionChanges != nil {
+ changes = append(changes, i.ExtensionChanges.GetAllChanges()...)
+ }
return changes
}
@@ -37,6 +41,9 @@ func (i *InfoChanges) TotalChanges() int {
if i.LicenseChanges != nil {
t += i.LicenseChanges.TotalChanges()
}
+ if i.ExtensionChanges != nil {
+ t += i.ExtensionChanges.TotalChanges()
+ }
return t
}
@@ -139,6 +146,10 @@ func CompareInfo(l, r *base.Info) *InfoChanges {
l.License.ValueNode, nil, false, r.License.Value, nil)
}
}
+
+ // check extensions.
+ i.ExtensionChanges = CompareExtensions(l.Extensions, r.Extensions)
+
i.PropertyChanges = NewPropertyChanges(changes)
if i.TotalChanges() <= 0 {
return nil
diff --git a/what-changed/model/info_test.go b/what-changed/model/info_test.go
index 33396f5..cc06a04 100644
--- a/what-changed/model/info_test.go
+++ b/what-changed/model/info_test.go
@@ -4,12 +4,14 @@
package model
import (
+ "context"
+ "testing"
+
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
- "github.com/pb33f/libopenapi/datamodel/low/v3"
+ v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v3"
- "testing"
)
func TestCompareInfo_DescriptionAdded(t *testing.T) {
@@ -42,8 +44,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -83,8 +85,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -123,8 +125,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -161,8 +163,8 @@ contact:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -199,8 +201,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -239,8 +241,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -276,8 +278,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -313,8 +315,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -353,8 +355,8 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
@@ -374,7 +376,8 @@ contact:
name: buckaroo
email: buckaroo@pb33f.io
license:
- name: MIT`
+ name: MIT
+x-extension: extension`
right := `title: a nice spec
termsOfService: https://pb33f.io/terms
@@ -383,7 +386,8 @@ contact:
name: buckaroo
email: buckaroo@pb33f.io
license:
- name: MIT`
+ name: MIT
+x-extension: extension`
var lNode, rNode yaml.Node
_ = yaml.Unmarshal([]byte(left), &lNode)
@@ -394,10 +398,104 @@ license:
var rDoc base.Info
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareInfo(&lDoc, &rDoc)
assert.Nil(t, extChanges)
}
+
+func TestCompareInfo_ExtensionAdded(t *testing.T) {
+
+ left := `title: a nice spec
+version: '1.2.3'
+`
+
+ right := `title: a nice spec
+version: '1.2.3'
+x-extension: new extension
+`
+
+ var lNode, rNode yaml.Node
+ _ = yaml.Unmarshal([]byte(left), &lNode)
+ _ = yaml.Unmarshal([]byte(right), &rNode)
+
+ // create low level objects
+ var lDoc base.Info
+ var rDoc base.Info
+ _ = low.BuildModel(lNode.Content[0], &lDoc)
+ _ = low.BuildModel(rNode.Content[0], &rDoc)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
+
+ // compare.
+ extChanges := CompareInfo(&lDoc, &rDoc)
+ assert.Equal(t, 1, extChanges.TotalChanges())
+ assert.Len(t, extChanges.GetAllChanges(), 1)
+ assert.Equal(t, ObjectAdded, extChanges.ExtensionChanges.Changes[0].ChangeType)
+ assert.Equal(t, "x-extension", extChanges.ExtensionChanges.Changes[0].Property)
+}
+
+func TestCompareInfo_ExtensionRemoved(t *testing.T) {
+
+ left := `title: a nice spec
+version: '1.2.3'
+x-extension: extension
+`
+
+ right := `title: a nice spec
+version: '1.2.3'
+`
+
+ var lNode, rNode yaml.Node
+ _ = yaml.Unmarshal([]byte(left), &lNode)
+ _ = yaml.Unmarshal([]byte(right), &rNode)
+
+ // create low level objects
+ var lDoc base.Info
+ var rDoc base.Info
+ _ = low.BuildModel(lNode.Content[0], &lDoc)
+ _ = low.BuildModel(rNode.Content[0], &rDoc)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
+
+ // compare.
+ extChanges := CompareInfo(&lDoc, &rDoc)
+ assert.Equal(t, 1, extChanges.TotalChanges())
+ assert.Len(t, extChanges.GetAllChanges(), 1)
+ assert.Equal(t, ObjectRemoved, extChanges.ExtensionChanges.Changes[0].ChangeType)
+ assert.Equal(t, "x-extension", extChanges.ExtensionChanges.Changes[0].Property)
+}
+
+func TestCompareInfo_ExtensionModified(t *testing.T) {
+
+ left := `title: a nice spec
+version: '1.2.3'
+x-extension: original extension
+`
+
+ right := `title: a nice spec
+version: '1.2.3'
+x-extension: new extension
+`
+
+ var lNode, rNode yaml.Node
+ _ = yaml.Unmarshal([]byte(left), &lNode)
+ _ = yaml.Unmarshal([]byte(right), &rNode)
+
+ // create low level objects
+ var lDoc base.Info
+ var rDoc base.Info
+ _ = low.BuildModel(lNode.Content[0], &lDoc)
+ _ = low.BuildModel(rNode.Content[0], &rDoc)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
+
+ // compare.
+ extChanges := CompareInfo(&lDoc, &rDoc)
+ assert.Equal(t, 1, extChanges.TotalChanges())
+ assert.Len(t, extChanges.GetAllChanges(), 1)
+ assert.Equal(t, Modified, extChanges.ExtensionChanges.Changes[0].ChangeType)
+ assert.Equal(t, "x-extension", extChanges.ExtensionChanges.Changes[0].Property)
+}
diff --git a/what-changed/model/items_test.go b/what-changed/model/items_test.go
index d394b7c..3f2b09b 100644
--- a/what-changed/model/items_test.go
+++ b/what-changed/model/items_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -27,8 +28,8 @@ func TestCompareItems(t *testing.T) {
var rDoc v2.Items
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
changes := CompareItems(&lDoc, &rDoc)
@@ -58,8 +59,8 @@ items:
var rDoc v2.Items
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
changes := CompareItems(&lDoc, &rDoc)
@@ -88,8 +89,8 @@ items:
var rDoc v2.Items
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
changes := CompareItems(&lDoc, &rDoc)
@@ -118,8 +119,8 @@ items:
var rDoc v2.Items
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
changes := CompareItems(&rDoc, &lDoc)
diff --git a/what-changed/model/license_test.go b/what-changed/model/license_test.go
index 17efc14..56ced0d 100644
--- a/what-changed/model/license_test.go
+++ b/what-changed/model/license_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
lowbase "github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/stretchr/testify/assert"
@@ -27,8 +28,8 @@ url: https://pb33f.io`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -55,8 +56,8 @@ url: https://pb33f.io`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -82,8 +83,8 @@ name: buckaroo`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -109,8 +110,8 @@ name: buckaroo`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -135,8 +136,8 @@ func TestCompareLicense_URLModified(t *testing.T) {
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -162,8 +163,8 @@ url: https://pb33f.io`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
@@ -190,8 +191,8 @@ url: https://pb33f.io`
var rDoc lowbase.License
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLicense(&lDoc, &rDoc)
diff --git a/what-changed/model/link_test.go b/what-changed/model/link_test.go
index a16a1a3..4723fec 100644
--- a/what-changed/model/link_test.go
+++ b/what-changed/model/link_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -32,8 +33,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -70,8 +71,8 @@ x-cake: very tasty`
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -109,8 +110,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -145,8 +146,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -181,8 +182,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&rDoc, &lDoc)
@@ -219,8 +220,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -261,8 +262,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&lDoc, &rDoc)
@@ -302,8 +303,8 @@ parameters:
var rDoc v3.Link
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareLinks(&rDoc, &lDoc)
diff --git a/what-changed/model/media_type_test.go b/what-changed/model/media_type_test.go
index 6ed9c56..eb98ad3 100644
--- a/what-changed/model/media_type_test.go
+++ b/what-changed/model/media_type_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -40,8 +41,8 @@ encoding:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
@@ -77,8 +78,8 @@ encoding:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
@@ -112,8 +113,8 @@ example:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
@@ -145,8 +146,8 @@ example:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
@@ -178,8 +179,8 @@ example:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&rDoc, &lDoc)
@@ -218,8 +219,8 @@ encoding:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
@@ -258,8 +259,8 @@ encoding:
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&rDoc, &lDoc)
@@ -304,8 +305,8 @@ x-tea: cup`
var rDoc v3.MediaType
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareMediaTypes(&lDoc, &rDoc)
diff --git a/what-changed/model/oauth_flows_test.go b/what-changed/model/oauth_flows_test.go
index 13432fa..12c1fb5 100644
--- a/what-changed/model/oauth_flows_test.go
+++ b/what-changed/model/oauth_flows_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -34,8 +35,8 @@ scopes:
var rDoc v3.OAuthFlow
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlow(&lDoc, &rDoc)
@@ -68,8 +69,8 @@ x-burgers: crispy`
var rDoc v3.OAuthFlow
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlow(&lDoc, &rDoc)
@@ -104,8 +105,8 @@ x-burgers: nice`
var rDoc v3.OAuthFlow
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlow(&lDoc, &rDoc)
@@ -142,8 +143,8 @@ x-burgers: nice`
var rDoc v3.OAuthFlow
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlow(&rDoc, &lDoc)
@@ -179,8 +180,8 @@ x-burgers: nice`
var rDoc v3.OAuthFlow
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlow(&lDoc, &rDoc)
@@ -223,8 +224,8 @@ x-coke: cola`
var rDoc v3.OAuthFlows
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlows(&lDoc, &rDoc)
@@ -253,8 +254,8 @@ x-coke: cola`
var rDoc v3.OAuthFlows
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlows(&lDoc, &rDoc)
@@ -285,8 +286,8 @@ x-coke: cola`
var rDoc v3.OAuthFlows
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlows(&rDoc, &lDoc)
@@ -325,8 +326,8 @@ x-coke: cherry`
var rDoc v3.OAuthFlows
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareOAuthFlows(&lDoc, &rDoc)
diff --git a/what-changed/model/operation.go b/what-changed/model/operation.go
index db73e9a..01d58e9 100644
--- a/what-changed/model/operation.go
+++ b/what-changed/model/operation.go
@@ -405,7 +405,6 @@ func CompareOperations(l, r any) *OperationChanges {
oc.ServerChanges = checkServers(lOperation.Servers, rOperation.Servers)
oc.ExtensionChanges = CompareExtensions(lOperation.Extensions, rOperation.Extensions)
- // todo: callbacks
}
CheckProperties(props)
oc.PropertyChanges = NewPropertyChanges(changes)
diff --git a/what-changed/model/operation_test.go b/what-changed/model/operation_test.go
index 654e72a..a148672 100644
--- a/what-changed/model/operation_test.go
+++ b/what-changed/model/operation_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -43,8 +44,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -96,8 +97,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -147,8 +148,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -201,8 +202,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -261,8 +262,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -319,8 +320,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -377,8 +378,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -435,8 +436,8 @@ parameters:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -472,8 +473,8 @@ schemes:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -506,8 +507,8 @@ responses:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -548,8 +549,8 @@ responses:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -578,8 +579,8 @@ responses:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -610,8 +611,8 @@ responses:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -646,8 +647,8 @@ security:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -683,8 +684,8 @@ security:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -722,8 +723,8 @@ security:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -761,8 +762,8 @@ security:
var rDoc v2.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -812,8 +813,8 @@ parameters:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -841,8 +842,8 @@ func TestCompareOperations_V3_ModifyParam(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -875,8 +876,8 @@ func TestCompareOperations_V3_AddParam(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -909,8 +910,8 @@ func TestCompareOperations_V3_RemoveParam(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -939,8 +940,8 @@ parameters:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -969,8 +970,8 @@ parameters:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -1000,8 +1001,8 @@ func TestCompareOperations_V3_ModifyServers(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1034,8 +1035,8 @@ func TestCompareOperations_V3_ModifyCallback(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1075,8 +1076,8 @@ func TestCompareOperations_V3_AddCallback(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1110,8 +1111,8 @@ callbacks:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1145,8 +1146,8 @@ callbacks:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -1183,8 +1184,8 @@ func TestCompareOperations_V3_RemoveCallback(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -1212,8 +1213,8 @@ func TestCompareOperations_V3_AddServer(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1241,8 +1242,8 @@ func TestCompareOperations_V3_RemoveServer(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -1270,8 +1271,8 @@ servers:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1299,8 +1300,8 @@ servers:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
@@ -1332,8 +1333,8 @@ security:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1361,8 +1362,8 @@ security:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1390,8 +1391,8 @@ security: []`
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1418,8 +1419,8 @@ func TestCompareOperations_V3_ModifyRequestBody(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1446,8 +1447,8 @@ requestBody:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1472,8 +1473,8 @@ func TestCompareOperations_V3_ModifyExtension(t *testing.T) {
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&lDoc, &rDoc)
@@ -1500,8 +1501,8 @@ requestBody:
var rDoc v3.Operation
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareOperations(&rDoc, &lDoc)
diff --git a/what-changed/model/parameter_test.go b/what-changed/model/parameter_test.go
index a77ef64..f569f46 100644
--- a/what-changed/model/parameter_test.go
+++ b/what-changed/model/parameter_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -26,8 +27,8 @@ func TestCompareParameters(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -48,8 +49,8 @@ func TestCompareParameters_V3(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParametersV3(&lDoc, &rDoc)
@@ -72,8 +73,8 @@ func TestCompareParameters_V3_Schema(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -100,8 +101,8 @@ schema:
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -128,8 +129,8 @@ schema:
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&rDoc, &lDoc)
@@ -154,8 +155,8 @@ func TestCompareParameters_V3_Extensions(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -181,8 +182,8 @@ func TestCompareParameters_V3_ExampleChange(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -205,8 +206,8 @@ func TestCompareParameters_V3_ExampleEqual(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -228,8 +229,8 @@ example: a string`
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -254,8 +255,8 @@ example: a string`
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&rDoc, &lDoc)
@@ -283,8 +284,8 @@ func TestCompareParameters_V3_ExamplesChanged(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -315,8 +316,8 @@ func TestCompareParameters_V3_ExamplesAdded(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -347,8 +348,8 @@ func TestCompareParameters_V3_ExamplesRemoved(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&rDoc, &lDoc)
@@ -379,8 +380,8 @@ func TestCompareParameters_V3_ContentChanged(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -415,8 +416,8 @@ func TestCompareParameters_V3_ContentAdded(t *testing.T) {
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -440,8 +441,8 @@ func TestCompareParameters_V2_DefaultChange(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -465,8 +466,8 @@ default: wat?`
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -491,8 +492,8 @@ func TestCompareParameters_V2_EnumChange(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -519,8 +520,8 @@ func TestCompareParameters_V2_EnumEqual_Reorder(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -542,8 +543,8 @@ example: a string`
var rDoc v3.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareParameters(&rDoc, &lDoc)
@@ -567,8 +568,8 @@ func TestCompareParameters_V2_Equal(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -589,8 +590,8 @@ func TestCompareParameters_V2(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -613,8 +614,8 @@ func TestCompareParameters_V2_ItemsChange(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -641,8 +642,8 @@ items:
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
@@ -668,8 +669,8 @@ items:
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&rDoc, &lDoc)
@@ -693,8 +694,8 @@ func TestCompareParameters_V2_Extensions(t *testing.T) {
var rDoc v2.Parameter
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareParameters(&lDoc, &rDoc)
diff --git a/what-changed/model/path_item.go b/what-changed/model/path_item.go
index 06dd3d4..af3f67a 100644
--- a/what-changed/model/path_item.go
+++ b/what-changed/model/path_item.go
@@ -235,7 +235,7 @@ func compareSwaggerPathItem(lPath, rPath *v2.PathItem, changes *[]*Change, pc *P
}
if lPath.Get.IsEmpty() && !rPath.Get.IsEmpty() {
CreateChange(changes, PropertyAdded, v3.GetLabel,
- nil, rPath.Get.ValueNode, false, nil, lPath.Get.Value)
+ nil, rPath.Get.ValueNode, false, nil, rPath.Get.Value)
}
// put
diff --git a/what-changed/model/path_item_test.go b/what-changed/model/path_item_test.go
index 2f01056..90d654e 100644
--- a/what-changed/model/path_item_test.go
+++ b/what-changed/model/path_item_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"testing"
"github.com/pb33f/libopenapi/datamodel/low"
@@ -44,8 +45,8 @@ x-thing: thang.`
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -99,8 +100,8 @@ x-thing: ding-a-ling`
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -136,8 +137,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -177,8 +178,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -217,8 +218,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&rDoc, &lDoc)
@@ -252,8 +253,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -288,8 +289,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -329,8 +330,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -369,8 +370,8 @@ parameters:
var rDoc v2.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&rDoc, &lDoc)
@@ -416,8 +417,8 @@ x-thing: thang.`
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -484,8 +485,8 @@ x-thing: dang.`
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -511,8 +512,8 @@ parameters:
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -540,8 +541,8 @@ parameters:
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItemsV3(&rDoc, &lDoc)
@@ -583,8 +584,8 @@ trace:
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
@@ -624,8 +625,8 @@ trace:
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&rDoc, &lDoc)
@@ -657,8 +658,8 @@ func TestComparePathItem_V3_ChangeParam(t *testing.T) {
var rDoc v3.PathItem
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePathItems(&lDoc, &rDoc)
diff --git a/what-changed/model/paths.go b/what-changed/model/paths.go
index e86270e..3324181 100644
--- a/what-changed/model/paths.go
+++ b/what-changed/model/paths.go
@@ -148,11 +148,16 @@ func ComparePaths(l, r any) *PathsChanges {
lKeys := make(map[string]low.ValueReference[*v3.PathItem])
rKeys := make(map[string]low.ValueReference[*v3.PathItem])
- for pair := orderedmap.First(lPath.PathItems); pair != nil; pair = pair.Next() {
- lKeys[pair.Key().Value] = pair.Value()
+
+ if lPath != nil {
+ for pair := orderedmap.First(lPath.PathItems); pair != nil; pair = pair.Next() {
+ lKeys[pair.Key().Value] = pair.Value()
+ }
}
- for pair := orderedmap.First(rPath.PathItems); pair != nil; pair = pair.Next() {
- rKeys[pair.Key().Value] = pair.Value()
+ if rPath != nil {
+ for pair := orderedmap.First(rPath.PathItems); pair != nil; pair = pair.Next() {
+ rKeys[pair.Key().Value] = pair.Value()
+ }
}
// run every comparison in a thread.
@@ -201,7 +206,15 @@ func ComparePaths(l, r any) *PathsChanges {
pc.PathItemsChanges = pathChanges
}
- pc.ExtensionChanges = CompareExtensions(lPath.Extensions, rPath.Extensions)
+ var lExt, rExt map[low.KeyReference[string]]low.ValueReference[any]
+ if lPath != nil {
+ lExt = lPath.Extensions
+ }
+ if rPath != nil {
+ rExt = rPath.Extensions
+ }
+
+ pc.ExtensionChanges = CompareExtensions(lExt, rExt)
}
pc.PropertyChanges = NewPropertyChanges(changes)
return pc
diff --git a/what-changed/model/paths_test.go b/what-changed/model/paths_test.go
index f6a52e6..deda4d7 100644
--- a/what-changed/model/paths_test.go
+++ b/what-changed/model/paths_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -36,8 +37,8 @@ func TestComparePaths_v2(t *testing.T) {
var rDoc v2.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&rDoc, &lDoc)
@@ -78,8 +79,8 @@ x-windows: washed
var rDoc v2.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&lDoc, &rDoc)
@@ -118,8 +119,8 @@ x-windows: dirty
var rDoc v2.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&lDoc, &rDoc)
@@ -160,8 +161,8 @@ x-windows: dirty
var rDoc v2.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&rDoc, &lDoc)
@@ -195,8 +196,8 @@ func TestComparePaths_v3(t *testing.T) {
var rDoc v3.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&rDoc, &lDoc)
@@ -237,8 +238,8 @@ x-windows: washed
var rDoc v3.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&lDoc, &rDoc)
@@ -284,8 +285,8 @@ x-windows: dirty`
var rDoc v3.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&lDoc, &rDoc)
@@ -333,8 +334,8 @@ x-windows: dirty`
var rDoc v3.Paths
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := ComparePaths(&rDoc, &lDoc)
diff --git a/what-changed/model/request_body_test.go b/what-changed/model/request_body_test.go
index 0235698..3b47642 100644
--- a/what-changed/model/request_body_test.go
+++ b/what-changed/model/request_body_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -36,8 +37,8 @@ content:
var rDoc v3.RequestBody
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareRequestBodies(&lDoc, &rDoc)
@@ -71,8 +72,8 @@ content:
var rDoc v3.RequestBody
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareRequestBodies(&lDoc, &rDoc)
diff --git a/what-changed/model/response_test.go b/what-changed/model/response_test.go
index f7b84a5..14ebace 100644
--- a/what-changed/model/response_test.go
+++ b/what-changed/model/response_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -36,8 +37,8 @@ x-toot: poot`
var rDoc v2.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&lDoc, &rDoc)
assert.Nil(t, extChanges)
@@ -74,8 +75,8 @@ x-toot: poot`
var rDoc v2.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&lDoc, &rDoc)
assert.Equal(t, 5, extChanges.TotalChanges())
@@ -108,8 +109,8 @@ examples:
var rDoc v2.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&lDoc, &rDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
@@ -142,8 +143,8 @@ examples:
var rDoc v2.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&rDoc, &lDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
@@ -176,8 +177,8 @@ x-toot: poot`
var rDoc v3.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&lDoc, &rDoc)
assert.Nil(t, extChanges)
@@ -222,8 +223,8 @@ x-toot: pooty`
var rDoc v3.Response
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponse(&lDoc, &rDoc)
diff --git a/what-changed/model/responses_test.go b/what-changed/model/responses_test.go
index 5732bae..131fa47 100644
--- a/what-changed/model/responses_test.go
+++ b/what-changed/model/responses_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -38,8 +39,8 @@ default:
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Nil(t, extChanges)
@@ -76,8 +77,8 @@ x-ting: tang`
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
@@ -117,8 +118,8 @@ x-apple: pie`
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&rDoc, &lDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
@@ -153,8 +154,8 @@ func TestCompareResponses_V2_RemoveSchema(t *testing.T) {
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -187,8 +188,8 @@ default:
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -221,8 +222,8 @@ default:
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&rDoc, &lDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -259,8 +260,8 @@ default:
var rDoc v2.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
@@ -289,8 +290,8 @@ default:
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Nil(t, extChanges)
@@ -323,8 +324,8 @@ x-coffee: yum
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 4, extChanges.TotalChanges())
@@ -357,8 +358,8 @@ default:
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -392,8 +393,8 @@ default:
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&rDoc, &lDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -429,8 +430,8 @@ default:
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 1, extChanges.TotalChanges())
@@ -462,8 +463,8 @@ func TestCompareResponses_V3_AddRemoveMediaType(t *testing.T) {
var rDoc v3.Responses
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
extChanges := CompareResponses(&lDoc, &rDoc)
assert.Equal(t, 2, extChanges.TotalChanges())
diff --git a/what-changed/model/schema.go b/what-changed/model/schema.go
index 2f2045b..b78a234 100644
--- a/what-changed/model/schema.go
+++ b/what-changed/model/schema.go
@@ -8,12 +8,12 @@ import (
"sort"
"sync"
+ "golang.org/x/exp/slices"
+
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/pb33f/libopenapi/orderedmap"
- "github.com/pb33f/libopenapi/utils"
- "golang.org/x/exp/slices"
"gopkg.in/yaml.v3"
)
@@ -25,16 +25,17 @@ import (
// PropertyChanges.Changes, and not in the AnyOfChanges property.
type SchemaChanges struct {
*PropertyChanges
- DiscriminatorChanges *DiscriminatorChanges `json:"discriminator,omitempty" yaml:"discriminator,omitempty"`
- AllOfChanges []*SchemaChanges `json:"allOf,omitempty" yaml:"allOf,omitempty"`
- AnyOfChanges []*SchemaChanges `json:"anyOf,omitempty" yaml:"anyOf,omitempty"`
- OneOfChanges []*SchemaChanges `json:"oneOf,omitempty" yaml:"oneOf,omitempty"`
- NotChanges *SchemaChanges `json:"not,omitempty" yaml:"not,omitempty"`
- ItemsChanges *SchemaChanges `json:"items,omitempty" yaml:"items,omitempty"`
- SchemaPropertyChanges map[string]*SchemaChanges `json:"properties,omitempty" yaml:"properties,omitempty"`
- ExternalDocChanges *ExternalDocChanges `json:"externalDoc,omitempty" yaml:"externalDoc,omitempty"`
- XMLChanges *XMLChanges `json:"xml,omitempty" yaml:"xml,omitempty"`
- ExtensionChanges *ExtensionChanges `json:"extensions,omitempty" yaml:"extensions,omitempty"`
+ DiscriminatorChanges *DiscriminatorChanges `json:"discriminator,omitempty" yaml:"discriminator,omitempty"`
+ AllOfChanges []*SchemaChanges `json:"allOf,omitempty" yaml:"allOf,omitempty"`
+ AnyOfChanges []*SchemaChanges `json:"anyOf,omitempty" yaml:"anyOf,omitempty"`
+ OneOfChanges []*SchemaChanges `json:"oneOf,omitempty" yaml:"oneOf,omitempty"`
+ NotChanges *SchemaChanges `json:"not,omitempty" yaml:"not,omitempty"`
+ ItemsChanges *SchemaChanges `json:"items,omitempty" yaml:"items,omitempty"`
+ SchemaPropertyChanges map[string]*SchemaChanges `json:"properties,omitempty" yaml:"properties,omitempty"`
+ ExternalDocChanges *ExternalDocChanges `json:"externalDoc,omitempty" yaml:"externalDoc,omitempty"`
+ XMLChanges *XMLChanges `json:"xml,omitempty" yaml:"xml,omitempty"`
+ ExtensionChanges *ExtensionChanges `json:"extensions,omitempty" yaml:"extensions,omitempty"`
+ AdditionalPropertiesChanges *SchemaChanges `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"`
// 3.1 specifics
IfChanges *SchemaChanges `json:"if,omitempty" yaml:"if,omitempty"`
@@ -103,6 +104,9 @@ func (s *SchemaChanges) GetAllChanges() []*Change {
if s.UnevaluatedPropertiesChanges != nil {
changes = append(changes, s.UnevaluatedPropertiesChanges.GetAllChanges()...)
}
+ if s.AdditionalPropertiesChanges != nil {
+ changes = append(changes, s.AdditionalPropertiesChanges.GetAllChanges()...)
+ }
if s.SchemaPropertyChanges != nil {
for n := range s.SchemaPropertyChanges {
if s.SchemaPropertyChanges[n] != nil {
@@ -186,6 +190,9 @@ func (s *SchemaChanges) TotalChanges() int {
if s.UnevaluatedPropertiesChanges != nil {
t += s.UnevaluatedPropertiesChanges.TotalChanges()
}
+ if s.AdditionalPropertiesChanges != nil {
+ t += s.AdditionalPropertiesChanges.TotalChanges()
+ }
if s.SchemaPropertyChanges != nil {
for n := range s.SchemaPropertyChanges {
if s.SchemaPropertyChanges[n] != nil {
@@ -268,6 +275,9 @@ func (s *SchemaChanges) TotalBreakingChanges() int {
if s.UnevaluatedPropertiesChanges != nil {
t += s.UnevaluatedPropertiesChanges.TotalBreakingChanges()
}
+ if s.AdditionalPropertiesChanges != nil {
+ t += s.AdditionalPropertiesChanges.TotalBreakingChanges()
+ }
if s.DependentSchemasChanges != nil {
for n := range s.DependentSchemasChanges {
t += s.DependentSchemasChanges[n].TotalBreakingChanges()
@@ -729,18 +739,36 @@ func checkSchemaPropertyChanges(
New: rSchema,
})
- // AdditionalProperties (only if not an object)
- if !utils.IsNodeMap(lSchema.AdditionalProperties.ValueNode) &&
- !utils.IsNodeMap(rSchema.AdditionalProperties.ValueNode) {
- props = append(props, &PropertyCheck{
- LeftNode: lSchema.AdditionalProperties.ValueNode,
- RightNode: rSchema.AdditionalProperties.ValueNode,
- Label: v3.AdditionalPropertiesLabel,
- Changes: changes,
- Breaking: false,
- Original: lSchema,
- New: rSchema,
- })
+ // AdditionalProperties
+ if lSchema.AdditionalProperties.Value != nil && rSchema.AdditionalProperties.Value != nil {
+ if lSchema.AdditionalProperties.Value.IsA() && rSchema.AdditionalProperties.Value.IsA() {
+ if !low.AreEqual(lSchema.AdditionalProperties.Value.A, rSchema.AdditionalProperties.Value.A) {
+ sc.AdditionalPropertiesChanges = CompareSchemas(lSchema.AdditionalProperties.Value.A, rSchema.AdditionalProperties.Value.A)
+ }
+ } else {
+ if lSchema.AdditionalProperties.Value.IsB() && rSchema.AdditionalProperties.Value.IsB() {
+ if lSchema.AdditionalProperties.Value.B != rSchema.AdditionalProperties.Value.B {
+ CreateChange(changes, Modified, v3.AdditionalPropertiesLabel,
+ lSchema.AdditionalProperties.ValueNode, rSchema.AdditionalProperties.ValueNode, true,
+ lSchema.AdditionalProperties.Value.B, rSchema.AdditionalProperties.Value.B)
+ }
+ } else {
+ CreateChange(changes, Modified, v3.AdditionalPropertiesLabel,
+ lSchema.AdditionalProperties.ValueNode, rSchema.AdditionalProperties.ValueNode, true,
+ lSchema.AdditionalProperties.Value.B, rSchema.AdditionalProperties.Value.B)
+ }
+ }
+ }
+
+ // added AdditionalProperties
+ if lSchema.AdditionalProperties.Value == nil && rSchema.AdditionalProperties.Value != nil {
+ CreateChange(changes, ObjectAdded, v3.AdditionalPropertiesLabel,
+ nil, rSchema.AdditionalProperties.ValueNode, true, nil, rSchema.AdditionalProperties.Value)
+ }
+ // removed AdditionalProperties
+ if lSchema.AdditionalProperties.Value != nil && rSchema.AdditionalProperties.Value == nil {
+ CreateChange(changes, ObjectRemoved, v3.AdditionalPropertiesLabel,
+ lSchema.AdditionalProperties.ValueNode, nil, true, lSchema.AdditionalProperties.Value, nil)
}
// Description
@@ -1109,20 +1137,6 @@ func checkSchemaPropertyChanges(
// check extensions
sc.ExtensionChanges = CompareExtensions(lSchema.Extensions, rSchema.Extensions)
- // if additional properties is an object, then hash it
- // AdditionalProperties (only if not an object)
- if utils.IsNodeMap(lSchema.AdditionalProperties.ValueNode) ||
- utils.IsNodeMap(rSchema.AdditionalProperties.ValueNode) {
-
- lHash := low.GenerateHashString(lSchema.AdditionalProperties.ValueNode)
- rHash := low.GenerateHashString(rSchema.AdditionalProperties.ValueNode)
- if lHash != rHash {
- CreateChange(changes, Modified, v3.AdditionalPropertiesLabel,
- lSchema.AdditionalProperties.ValueNode, rSchema.AdditionalProperties.ValueNode, false,
- lSchema.AdditionalProperties.Value, rSchema.AdditionalProperties.Value)
- }
- }
-
// check core properties
CheckProperties(props)
}
diff --git a/what-changed/model/schema_test.go b/what-changed/model/schema_test.go
index 10c3a54..578f825 100644
--- a/what-changed/model/schema_test.go
+++ b/what-changed/model/schema_test.go
@@ -5,13 +5,15 @@ package model
import (
"fmt"
+ "github.com/pb33f/libopenapi/utils"
+ "testing"
+
"github.com/pb33f/libopenapi/datamodel"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
- "testing"
)
// These tests require full documents to be tested properly. schemas are perhaps the most complex
@@ -158,28 +160,27 @@ components:
}
func test_BuildDoc(l, r string) (*v3.Document, *v3.Document) {
-
leftInfo, _ := datamodel.ExtractSpecInfo([]byte(l))
rightInfo, _ := datamodel.ExtractSpecInfo([]byte(r))
- leftDoc, _ := v3.CreateDocument(leftInfo)
- rightDoc, _ := v3.CreateDocument(rightInfo)
+ leftDoc, _ := v3.CreateDocumentFromConfig(leftInfo, datamodel.NewDocumentConfiguration())
+ rightDoc, _ := v3.CreateDocumentFromConfig(rightInfo, datamodel.NewDocumentConfiguration())
return leftDoc, rightDoc
}
func test_BuildDocv2(l, r string) (*v2.Swagger, *v2.Swagger) {
-
leftInfo, _ := datamodel.ExtractSpecInfo([]byte(l))
rightInfo, _ := datamodel.ExtractSpecInfo([]byte(r))
- var err []error
+ var err error
var leftDoc, rightDoc *v2.Swagger
- leftDoc, err = v2.CreateDocument(leftInfo)
- rightDoc, err = v2.CreateDocument(rightInfo)
+ leftDoc, err = v2.CreateDocumentFromConfig(leftInfo, datamodel.NewDocumentConfiguration())
+ rightDoc, err = v2.CreateDocumentFromConfig(rightInfo, datamodel.NewDocumentConfiguration())
- if len(err) > 0 {
- for i := range err {
- fmt.Printf("error: %v\n", err[i])
+ uErr := utils.UnwrapErrors(err)
+ if len(uErr) > 0 {
+ for i := range uErr {
+ fmt.Printf("error: %v\n", uErr[i])
}
panic("failed to create doc")
}
@@ -278,7 +279,6 @@ components:
assert.Equal(t, Modified, changes.Changes[0].ChangeType)
assert.Equal(t, v3.RefLabel, changes.Changes[0].Property)
assert.Equal(t, "#/components/schemas/Yo", changes.Changes[0].Original)
-
}
func TestCompareSchemas_InlineToRef(t *testing.T) {
@@ -311,7 +311,6 @@ components:
assert.Equal(t, Modified, changes.Changes[0].ChangeType)
assert.Equal(t, v3.RefLabel, changes.Changes[0].Property)
assert.Equal(t, "#/components/schemas/Yo", changes.Changes[0].New)
-
}
func TestCompareSchemas_Identical(t *testing.T) {
@@ -1222,6 +1221,93 @@ components:
assert.Equal(t, v3.UnevaluatedPropertiesLabel, changes.Changes[0].Property)
}
+func TestCompareSchemas_AdditionalProperties(t *testing.T) {
+ left := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ additionalProperties:
+ type: string`
+
+ right := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ additionalProperties:
+ type: int`
+
+ leftDoc, rightDoc := test_BuildDoc(left, right)
+
+ // extract left reference schema and non reference schema.
+ lSchemaProxy := leftDoc.Components.Value.FindSchema("OK").Value
+ rSchemaProxy := rightDoc.Components.Value.FindSchema("OK").Value
+
+ changes := CompareSchemas(lSchemaProxy, rSchemaProxy)
+ assert.NotNil(t, changes)
+ assert.Equal(t, 1, changes.TotalChanges())
+ assert.Len(t, changes.GetAllChanges(), 1)
+ assert.Equal(t, 1, changes.TotalBreakingChanges())
+ assert.Equal(t, 1, changes.AdditionalPropertiesChanges.PropertyChanges.TotalChanges())
+}
+
+func TestCompareSchemas_AdditionalProperties_Added(t *testing.T) {
+ left := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ type: string`
+
+ right := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ type: string
+ additionalProperties:
+ type: int`
+
+ leftDoc, rightDoc := test_BuildDoc(left, right)
+
+ // extract left reference schema and non reference schema.
+ lSchemaProxy := leftDoc.Components.Value.FindSchema("OK").Value
+ rSchemaProxy := rightDoc.Components.Value.FindSchema("OK").Value
+
+ changes := CompareSchemas(lSchemaProxy, rSchemaProxy)
+ assert.NotNil(t, changes)
+ assert.Equal(t, 1, changes.TotalChanges())
+ assert.Len(t, changes.GetAllChanges(), 1)
+ assert.Equal(t, 1, changes.TotalBreakingChanges())
+ assert.Equal(t, v3.AdditionalPropertiesLabel, changes.Changes[0].Property)
+}
+
+func TestCompareSchemas_AdditionalProperties_Removed(t *testing.T) {
+ left := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ type: string`
+
+ right := `openapi: 3.1
+components:
+ schemas:
+ OK:
+ type: string
+ additionalProperties:
+ type: int`
+
+ leftDoc, rightDoc := test_BuildDoc(left, right)
+
+ // extract left reference schema and non reference schema.
+ lSchemaProxy := leftDoc.Components.Value.FindSchema("OK").Value
+ rSchemaProxy := rightDoc.Components.Value.FindSchema("OK").Value
+
+ changes := CompareSchemas(rSchemaProxy, lSchemaProxy)
+ assert.NotNil(t, changes)
+ assert.Equal(t, 1, changes.TotalChanges())
+ assert.Len(t, changes.GetAllChanges(), 1)
+ assert.Equal(t, 1, changes.TotalBreakingChanges())
+ assert.Equal(t, v3.AdditionalPropertiesLabel, changes.Changes[0].Property)
+}
+
func TestCompareSchemas_UnevaluatedItems(t *testing.T) {
left := `openapi: 3.1
components:
@@ -1611,7 +1697,6 @@ components:
assert.Equal(t, Modified, changes.AnyOfChanges[0].Changes[0].ChangeType)
assert.Equal(t, "string", changes.AnyOfChanges[0].Changes[0].New)
assert.Equal(t, "bool", changes.AnyOfChanges[0].Changes[0].Original)
-
}
func TestCompareSchemas_OneOfModifyAndAddItem(t *testing.T) {
@@ -1848,7 +1933,6 @@ components:
assert.Equal(t, ObjectAdded, changes.Changes[0].ChangeType)
assert.Equal(t, "0e563831440581c713657dd857a0ec3af1bd7308a43bd3cae9184f61d61b288f",
low.HashToString(changes.Changes[0].NewObject.(*base.Discriminator).Hash()))
-
}
func TestCompareSchemas_DiscriminatorRemove(t *testing.T) {
@@ -1881,7 +1965,6 @@ components:
assert.Equal(t, ObjectRemoved, changes.Changes[0].ChangeType)
assert.Equal(t, "0e563831440581c713657dd857a0ec3af1bd7308a43bd3cae9184f61d61b288f",
low.HashToString(changes.Changes[0].OriginalObject.(*base.Discriminator).Hash()))
-
}
func TestCompareSchemas_ExternalDocsChange(t *testing.T) {
@@ -1948,7 +2031,6 @@ components:
assert.Equal(t, ObjectAdded, changes.Changes[0].ChangeType)
assert.Equal(t, "2b7adf30f2ea3a7617ccf429a099617a9c03e8b5f3a23a89dba4b90f760010d7",
low.HashToString(changes.Changes[0].NewObject.(*base.ExternalDoc).Hash()))
-
}
func TestCompareSchemas_ExternalDocsRemove(t *testing.T) {
@@ -1981,7 +2063,6 @@ components:
assert.Equal(t, ObjectRemoved, changes.Changes[0].ChangeType)
assert.Equal(t, "2b7adf30f2ea3a7617ccf429a099617a9c03e8b5f3a23a89dba4b90f760010d7",
low.HashToString(changes.Changes[0].OriginalObject.(*base.ExternalDoc).Hash()))
-
}
func TestCompareSchemas_AddExtension(t *testing.T) {
@@ -2402,7 +2483,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 1, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_SchemaAdditionalPropertiesCheck(t *testing.T) {
@@ -2432,7 +2512,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 0, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_Schema_DeletePoly(t *testing.T) {
@@ -2466,7 +2545,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 1, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_Schema_AddExamplesArray_AllOf(t *testing.T) {
@@ -2499,7 +2577,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 0, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_Schema_AddExampleMap_AllOf(t *testing.T) {
@@ -2566,7 +2643,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 0, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_Schema_AddExamplesMap(t *testing.T) {
@@ -2601,7 +2677,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 0, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_Schema_AddExamples(t *testing.T) {
@@ -2661,7 +2736,6 @@ components:
assert.Equal(t, 1, changes.TotalChanges())
assert.Len(t, changes.GetAllChanges(), 1)
assert.Equal(t, 0, changes.TotalBreakingChanges())
-
}
func TestCompareSchemas_CheckIssue_170(t *testing.T) {
diff --git a/what-changed/model/scopes_test.go b/what-changed/model/scopes_test.go
index acb7e62..c154d23 100644
--- a/what-changed/model/scopes_test.go
+++ b/what-changed/model/scopes_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/stretchr/testify/assert"
@@ -29,8 +30,8 @@ x-nugget: chicken`
var rDoc v2.Scopes
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareScopes(&lDoc, &rDoc)
@@ -55,8 +56,8 @@ x-nugget: chicken`
var rDoc v2.Scopes
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareScopes(&lDoc, &rDoc)
@@ -84,8 +85,8 @@ x-nugget: chicken`
var rDoc v2.Scopes
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareScopes(&lDoc, &rDoc)
@@ -114,8 +115,8 @@ x-nugget: soup`
var rDoc v2.Scopes
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareScopes(&rDoc, &lDoc)
diff --git a/what-changed/model/security_requirement_test.go b/what-changed/model/security_requirement_test.go
index 6c3cc50..b55963f 100644
--- a/what-changed/model/security_requirement_test.go
+++ b/what-changed/model/security_requirement_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/base"
"github.com/stretchr/testify/assert"
@@ -30,8 +31,8 @@ func TestCompareSecurityRequirement_V2(t *testing.T) {
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -63,8 +64,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -96,8 +97,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&rDoc, &lDoc)
@@ -129,8 +130,8 @@ milk:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -166,8 +167,8 @@ milk:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -201,8 +202,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -239,8 +240,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -273,8 +274,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -307,8 +308,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
@@ -339,8 +340,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&rDoc, &lDoc)
@@ -375,8 +376,8 @@ biscuit:
var rDoc base.SecurityRequirement
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecurityRequirement(&lDoc, &rDoc)
diff --git a/what-changed/model/security_scheme_test.go b/what-changed/model/security_scheme_test.go
index fd8fb75..989ae28 100644
--- a/what-changed/model/security_scheme_test.go
+++ b/what-changed/model/security_scheme_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
"github.com/pb33f/libopenapi/datamodel/low/v2"
"github.com/pb33f/libopenapi/datamodel/low/v3"
@@ -37,8 +38,8 @@ x-beer: tasty`
var rDoc v2.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -66,8 +67,8 @@ x-beer: very tasty`
var rDoc v2.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -98,8 +99,8 @@ scopes:
var rDoc v2.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -128,8 +129,8 @@ scopes:
var rDoc v2.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&rDoc, &lDoc)
@@ -158,8 +159,8 @@ func TestCompareSecuritySchemes_v2_ModifyScope(t *testing.T) {
var rDoc v2.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -193,8 +194,8 @@ description: a thing`
var rDoc v3.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -224,8 +225,8 @@ x-beer: cool`
var rDoc v3.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -257,8 +258,8 @@ flows:
var rDoc v3.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
@@ -286,8 +287,8 @@ flows:
var rDoc v3.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&rDoc, &lDoc)
@@ -318,8 +319,8 @@ flows:
var rDoc v3.SecurityScheme
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare
extChanges := CompareSecuritySchemes(&lDoc, &rDoc)
diff --git a/what-changed/model/server_test.go b/what-changed/model/server_test.go
index a4961fe..8e83be9 100644
--- a/what-changed/model/server_test.go
+++ b/what-changed/model/server_test.go
@@ -4,6 +4,7 @@
package model
import (
+ "context"
"github.com/pb33f/libopenapi/datamodel/low"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
@@ -40,8 +41,8 @@ variables:
var rDoc v3.Server
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareServers(&lDoc, &rDoc)
@@ -77,8 +78,8 @@ variables:
var rDoc v3.Server
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareServers(&lDoc, &rDoc)
@@ -115,8 +116,8 @@ variables:
var rDoc v3.Server
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareServers(&lDoc, &rDoc)
@@ -155,8 +156,8 @@ variables:
var rDoc v3.Server
_ = low.BuildModel(lNode.Content[0], &lDoc)
_ = low.BuildModel(rNode.Content[0], &rDoc)
- _ = lDoc.Build(nil, lNode.Content[0], nil)
- _ = rDoc.Build(nil, rNode.Content[0], nil)
+ _ = lDoc.Build(context.Background(), nil, lNode.Content[0], nil)
+ _ = rDoc.Build(context.Background(), nil, rNode.Content[0], nil)
// compare.
extChanges := CompareServers(&rDoc, &lDoc)
diff --git a/what-changed/reports/summary_test.go b/what-changed/reports/summary_test.go
index 0fc6c34..6ba45ea 100644
--- a/what-changed/reports/summary_test.go
+++ b/what-changed/reports/summary_test.go
@@ -4,17 +4,18 @@
package reports
import (
+ "os"
+ "testing"
+
"github.com/pb33f/libopenapi"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/pb33f/libopenapi/what-changed/model"
"github.com/stretchr/testify/assert"
- "io/ioutil"
- "testing"
)
func createDiff() *model.DocumentChanges {
- burgerShopOriginal, _ := ioutil.ReadFile("../../test_specs/burgershop.openapi.yaml")
- burgerShopUpdated, _ := ioutil.ReadFile("../../test_specs/burgershop.openapi-modified.yaml")
+ burgerShopOriginal, _ := os.ReadFile("../../test_specs/burgershop.openapi.yaml")
+ burgerShopUpdated, _ := os.ReadFile("../../test_specs/burgershop.openapi-modified.yaml")
originalDoc, _ := libopenapi.NewDocument(burgerShopOriginal)
updatedDoc, _ := libopenapi.NewDocument(burgerShopUpdated)
documentChanges, _ := libopenapi.CompareDocuments(originalDoc, updatedDoc)
diff --git a/what-changed/what_changed_test.go b/what-changed/what_changed_test.go
index a23e9e8..a2c8fcb 100644
--- a/what-changed/what_changed_test.go
+++ b/what-changed/what_changed_test.go
@@ -5,59 +5,60 @@ package what_changed
import (
"fmt"
+ "os"
+ "testing"
+
"github.com/pb33f/libopenapi/datamodel"
v2 "github.com/pb33f/libopenapi/datamodel/low/v2"
v3 "github.com/pb33f/libopenapi/datamodel/low/v3"
"github.com/stretchr/testify/assert"
- "io/ioutil"
- "testing"
)
func TestCompareOpenAPIDocuments(t *testing.T) {
- original, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
+ original, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
+ modified, _ := os.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v3.CreateDocument(infoOrig)
- modDoc, _ := v3.CreateDocument(infoMod)
+ origDoc, _ := v3.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v3.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
changes := CompareOpenAPIDocuments(origDoc, modDoc)
assert.Equal(t, 75, changes.TotalChanges())
assert.Equal(t, 19, changes.TotalBreakingChanges())
//out, _ := json.MarshalIndent(changes, "", " ")
- //_ = ioutil.WriteFile("outputv3.json", out, 0776)
+ //_ = os.WriteFile("outputv3.json", out, 0776)
}
func TestCompareSwaggerDocuments(t *testing.T) {
- original, _ := ioutil.ReadFile("../test_specs/petstorev2-complete.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/petstorev2-complete-modified.yaml")
+ original, _ := os.ReadFile("../test_specs/petstorev2-complete.yaml")
+ modified, _ := os.ReadFile("../test_specs/petstorev2-complete-modified.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v2.CreateDocument(infoOrig)
- modDoc, _ := v2.CreateDocument(infoMod)
+ origDoc, _ := v2.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v2.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
changes := CompareSwaggerDocuments(origDoc, modDoc)
assert.Equal(t, 52, changes.TotalChanges())
assert.Equal(t, 27, changes.TotalBreakingChanges())
//out, _ := json.MarshalIndent(changes, "", " ")
- //_ = ioutil.WriteFile("output.json", out, 0776)
+ //_ = os.WriteFile("output.json", out, 0776)
}
func Benchmark_CompareOpenAPIDocuments(b *testing.B) {
- original, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
+ original, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
+ modified, _ := os.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v3.CreateDocument(infoOrig)
- modDoc, _ := v3.CreateDocument(infoMod)
+ origDoc, _ := v3.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v3.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
for i := 0; i < b.N; i++ {
CompareOpenAPIDocuments(origDoc, modDoc)
@@ -66,13 +67,13 @@ func Benchmark_CompareOpenAPIDocuments(b *testing.B) {
func Benchmark_CompareSwaggerDocuments(b *testing.B) {
- original, _ := ioutil.ReadFile("../test_specs/petstorev2-complete.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/petstorev2-complete-modified.yaml")
+ original, _ := os.ReadFile("../test_specs/petstorev2-complete.yaml")
+ modified, _ := os.ReadFile("../test_specs/petstorev2-complete-modified.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v2.CreateDocument(infoOrig)
- modDoc, _ := v2.CreateDocument(infoMod)
+ origDoc, _ := v2.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v2.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
for i := 0; i < b.N; i++ {
CompareSwaggerDocuments(origDoc, modDoc)
@@ -81,13 +82,13 @@ func Benchmark_CompareSwaggerDocuments(b *testing.B) {
func Benchmark_CompareOpenAPIDocuments_NoChange(b *testing.B) {
- original, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
+ original, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
+ modified, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v3.CreateDocument(infoOrig)
- modDoc, _ := v3.CreateDocument(infoMod)
+ origDoc, _ := v3.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v3.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
for i := 0; i < b.N; i++ {
CompareOpenAPIDocuments(origDoc, modDoc)
@@ -96,13 +97,13 @@ func Benchmark_CompareOpenAPIDocuments_NoChange(b *testing.B) {
func Benchmark_CompareK8s(b *testing.B) {
- original, _ := ioutil.ReadFile("../test_specs/k8s.json")
- modified, _ := ioutil.ReadFile("../test_specs/k8s.json")
+ original, _ := os.ReadFile("../test_specs/k8s.json")
+ modified, _ := os.ReadFile("../test_specs/k8s.json")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v2.CreateDocument(infoOrig)
- modDoc, _ := v2.CreateDocument(infoMod)
+ origDoc, _ := v2.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v2.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
for i := 0; i < b.N; i++ {
CompareSwaggerDocuments(origDoc, modDoc)
@@ -111,13 +112,13 @@ func Benchmark_CompareK8s(b *testing.B) {
func Benchmark_CompareStripe(b *testing.B) {
- original, _ := ioutil.ReadFile("../test_specs/stripe.yaml")
- modified, _ := ioutil.ReadFile("../test_specs/stripe.yaml")
+ original, _ := os.ReadFile("../test_specs/stripe.yaml")
+ modified, _ := os.ReadFile("../test_specs/stripe.yaml")
infoOrig, _ := datamodel.ExtractSpecInfo(original)
infoMod, _ := datamodel.ExtractSpecInfo(modified)
- origDoc, _ := v3.CreateDocument(infoOrig)
- modDoc, _ := v3.CreateDocument(infoMod)
+ origDoc, _ := v3.CreateDocumentFromConfig(infoOrig, datamodel.NewDocumentConfiguration())
+ modDoc, _ := v3.CreateDocumentFromConfig(infoMod, datamodel.NewDocumentConfiguration())
for i := 0; i < b.N; i++ {
CompareOpenAPIDocuments(origDoc, modDoc)
@@ -127,18 +128,18 @@ func Benchmark_CompareStripe(b *testing.B) {
func ExampleCompareOpenAPIDocuments() {
// Read in a 'left' (original) OpenAPI specification
- original, _ := ioutil.ReadFile("../test_specs/burgershop.openapi.yaml")
+ original, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
// Read in a 'right' (modified) OpenAPI specification
- modified, _ := ioutil.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
+ modified, _ := os.ReadFile("../test_specs/burgershop.openapi-modified.yaml")
// Extract SpecInfo from bytes
infoOriginal, _ := datamodel.ExtractSpecInfo(original)
infoModified, _ := datamodel.ExtractSpecInfo(modified)
// Build OpenAPI Documents from SpecInfo
- origDocument, _ := v3.CreateDocument(infoOriginal)
- modDocDocument, _ := v3.CreateDocument(infoModified)
+ origDocument, _ := v3.CreateDocumentFromConfig(infoOriginal, datamodel.NewDocumentConfiguration())
+ modDocDocument, _ := v3.CreateDocumentFromConfig(infoModified, datamodel.NewDocumentConfiguration())
// Compare OpenAPI Documents and extract to *DocumentChanges
changes := CompareOpenAPIDocuments(origDocument, modDocDocument)