mirror of
https://github.com/LukeHagar/libopenapi.git
synced 2025-12-07 20:47:45 +00:00
Another massive surgical strike with the rolodex and index reshuffle.
Signed-off-by: quobix <dave@quobix.com>
This commit is contained in:
@@ -186,25 +186,48 @@ func (index *SpecIndex) ExtractRefs(node, parent *yaml.Node, seenPath []string,
|
|||||||
|
|
||||||
segs := strings.Split(value, "/")
|
segs := strings.Split(value, "/")
|
||||||
name := segs[len(segs)-1]
|
name := segs[len(segs)-1]
|
||||||
_, p := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
|
|
||||||
|
var p string
|
||||||
|
uri := strings.Split(value, "#/")
|
||||||
|
if strings.HasPrefix(value, "http") || filepath.IsAbs(value) {
|
||||||
|
if len(uri) == 2 {
|
||||||
|
_, p = utils.ConvertComponentIdIntoFriendlyPathSearch(fmt.Sprintf("#/%s", uri[1]))
|
||||||
|
} else {
|
||||||
|
_, p = utils.ConvertComponentIdIntoFriendlyPathSearch(uri[0])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if len(uri) == 2 {
|
||||||
|
_, p = utils.ConvertComponentIdIntoFriendlyPathSearch(fmt.Sprintf("#/%s", uri[1]))
|
||||||
|
} else {
|
||||||
|
_, p = utils.ConvertComponentIdIntoFriendlyPathSearch(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// determine absolute path to this definition
|
// determine absolute path to this definition
|
||||||
iroot := filepath.Dir(index.specAbsolutePath)
|
iroot := filepath.Dir(index.specAbsolutePath)
|
||||||
uri := strings.Split(value, "#/")
|
|
||||||
var componentName string
|
var componentName string
|
||||||
var fullDefinitionPath string
|
var fullDefinitionPath string
|
||||||
if len(uri) == 2 {
|
if len(uri) == 2 {
|
||||||
if uri[0] == "" {
|
if uri[0] == "" {
|
||||||
fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, uri[1])
|
fullDefinitionPath = fmt.Sprintf("%s#/%s", index.specAbsolutePath, uri[1])
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
if strings.HasPrefix(uri[0], "http") {
|
||||||
|
fullDefinitionPath = value
|
||||||
|
} else {
|
||||||
abs, _ := filepath.Abs(filepath.Join(iroot, uri[0]))
|
abs, _ := filepath.Abs(filepath.Join(iroot, uri[0]))
|
||||||
fullDefinitionPath = fmt.Sprintf("%s#/%s", abs, uri[1])
|
fullDefinitionPath = fmt.Sprintf("%s#/%s", abs, uri[1])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
componentName = fmt.Sprintf("#/%s", uri[1])
|
componentName = fmt.Sprintf("#/%s", uri[1])
|
||||||
|
} else {
|
||||||
|
if strings.HasPrefix(uri[0], "http") {
|
||||||
|
fullDefinitionPath = value
|
||||||
} else {
|
} else {
|
||||||
fullDefinitionPath = fmt.Sprintf("%s#/%s", iroot, uri[0])
|
fullDefinitionPath = fmt.Sprintf("%s#/%s", iroot, uri[0])
|
||||||
componentName = fmt.Sprintf("#/%s", uri[0])
|
componentName = fmt.Sprintf("#/%s", uri[0])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ref := &Reference{
|
ref := &Reference{
|
||||||
FullDefinition: fullDefinitionPath,
|
FullDefinition: fullDefinitionPath,
|
||||||
@@ -470,6 +493,7 @@ func (index *SpecIndex) ExtractComponentsFromRefs(refs []*Reference) []*Referenc
|
|||||||
located := index.FindComponent(ref.FullDefinition, ref.Node)
|
located := index.FindComponent(ref.FullDefinition, ref.Node)
|
||||||
if located != nil {
|
if located != nil {
|
||||||
index.refLock.Lock()
|
index.refLock.Lock()
|
||||||
|
// have we already mapped this?
|
||||||
if index.allMappedRefs[ref.Definition] == nil {
|
if index.allMappedRefs[ref.Definition] == nil {
|
||||||
found = append(found, located)
|
found = append(found, located)
|
||||||
index.allMappedRefs[ref.Definition] = located
|
index.allMappedRefs[ref.Definition] = located
|
||||||
@@ -478,8 +502,22 @@ func (index *SpecIndex) ExtractComponentsFromRefs(refs []*Reference) []*Referenc
|
|||||||
Definition: ref.Definition,
|
Definition: ref.Definition,
|
||||||
FullDefinition: ref.FullDefinition,
|
FullDefinition: ref.FullDefinition,
|
||||||
}
|
}
|
||||||
|
|
||||||
sequence[refIndex] = rm
|
sequence[refIndex] = rm
|
||||||
|
} else {
|
||||||
|
// it exists, but is it a component with the same ID?
|
||||||
|
d := index.allMappedRefs[ref.Definition]
|
||||||
|
|
||||||
|
// if the full definition matches, we're good and can skip this.
|
||||||
|
if d.FullDefinition != ref.FullDefinition {
|
||||||
|
found = append(found, located)
|
||||||
|
index.allMappedRefs[ref.FullDefinition] = located
|
||||||
|
rm := &ReferenceMapped{
|
||||||
|
Reference: located,
|
||||||
|
Definition: ref.Definition,
|
||||||
|
FullDefinition: ref.FullDefinition,
|
||||||
|
}
|
||||||
|
sequence[refIndex] = rm
|
||||||
|
}
|
||||||
}
|
}
|
||||||
index.refLock.Unlock()
|
index.refLock.Unlock()
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -45,10 +45,24 @@ func (index *SpecIndex) FindComponent(componentId string, parent *yaml.Node) *Re
|
|||||||
|
|
||||||
//witch DetermineReferenceResolveType(componentId) {
|
//witch DetermineReferenceResolveType(componentId) {
|
||||||
//case LocalResolve: // ideally, every single ref in every single spec is local. however, this is not the case.
|
//case LocalResolve: // ideally, every single ref in every single spec is local. however, this is not the case.
|
||||||
// return index.FindComponentInRoot(componentId)
|
//return index.FindComponentInRoot(componentId)
|
||||||
|
|
||||||
//case HttpResolve, FileResolve:
|
//case HttpResolve, FileResolve:
|
||||||
return index.performExternalLookup(strings.Split(componentId, "#/"))
|
|
||||||
|
uri := strings.Split(componentId, "#/")
|
||||||
|
if len(uri) == 2 {
|
||||||
|
if uri[0] != "" {
|
||||||
|
if index.specAbsolutePath == uri[0] {
|
||||||
|
return index.FindComponentInRoot(fmt.Sprintf("#/%s", uri[1]))
|
||||||
|
} else {
|
||||||
|
return index.lookupRolodex(uri)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return index.FindComponentInRoot(fmt.Sprintf("#/%s", uri[1]))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return index.FindComponentInRoot(fmt.Sprintf("#/%s", uri[0]))
|
||||||
|
}
|
||||||
|
|
||||||
//}
|
//}
|
||||||
//return nil
|
//return nil
|
||||||
@@ -326,23 +340,26 @@ func FindComponent(root *yaml.Node, componentId, absoluteFilePath string) *Refer
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//func (index *SpecIndex) FindComponentInRoot(componentId string) *Reference {
|
func (index *SpecIndex) FindComponentInRoot(componentId string) *Reference {
|
||||||
// if index.root != nil {
|
if index.root != nil {
|
||||||
// return FindComponent(index.root, componentId, )
|
return FindComponent(index.root, componentId, index.specAbsolutePath)
|
||||||
// }
|
}
|
||||||
// return nil
|
return nil
|
||||||
//}
|
}
|
||||||
|
|
||||||
func (index *SpecIndex) performExternalLookup(uri []string) *Reference {
|
func (index *SpecIndex) lookupRolodex(uri []string) *Reference {
|
||||||
|
|
||||||
if len(uri) > 0 {
|
if len(uri) > 0 {
|
||||||
|
|
||||||
// split string to remove file reference
|
// split string to remove file reference
|
||||||
file := strings.ReplaceAll(uri[0], "file:", "")
|
file := strings.ReplaceAll(uri[0], "file:", "")
|
||||||
fileName := filepath.Base(file)
|
|
||||||
|
|
||||||
var absoluteFileLocation string
|
var absoluteFileLocation, fileName string
|
||||||
if filepath.IsAbs(file) {
|
|
||||||
|
// is this a local or a remote file?
|
||||||
|
|
||||||
|
fileName = filepath.Base(file)
|
||||||
|
if filepath.IsAbs(file) || strings.HasPrefix(file, "http") {
|
||||||
absoluteFileLocation = file
|
absoluteFileLocation = file
|
||||||
} else {
|
} else {
|
||||||
absoluteFileLocation, _ = filepath.Abs(filepath.Join(filepath.Dir(index.specAbsolutePath), file))
|
absoluteFileLocation, _ = filepath.Abs(filepath.Join(filepath.Dir(index.specAbsolutePath), file))
|
||||||
@@ -363,6 +380,9 @@ func (index *SpecIndex) performExternalLookup(uri []string) *Reference {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if rFile == nil {
|
||||||
|
panic("FUCK")
|
||||||
|
}
|
||||||
parsedDocument, err = rFile.GetContentAsYAMLNode()
|
parsedDocument, err = rFile.GetContentAsYAMLNode()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("unable to parse rolodex file", "file", absoluteFileLocation, "error", err)
|
logger.Error("unable to parse rolodex file", "file", absoluteFileLocation, "error", err)
|
||||||
|
|||||||
@@ -50,8 +50,6 @@ func TestSpecIndex_CheckCircularIndex(t *testing.T) {
|
|||||||
rolo.SetRootNode(&rootNode)
|
rolo.SetRootNode(&rootNode)
|
||||||
cf.Rolodex = rolo
|
cf.Rolodex = rolo
|
||||||
|
|
||||||
// TODO: pick up here.
|
|
||||||
|
|
||||||
fsCfg := LocalFSConfig{
|
fsCfg := LocalFSConfig{
|
||||||
BaseDirectory: cf.BasePath,
|
BaseDirectory: cf.BasePath,
|
||||||
FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
|
FileFilters: []string{"first.yaml", "second.yaml", "third.yaml", "fourth.yaml"},
|
||||||
|
|||||||
@@ -96,8 +96,8 @@ type SpecIndexConfig struct {
|
|||||||
// exploits, but it's better to be safe than sorry.
|
// exploits, but it's better to be safe than sorry.
|
||||||
//
|
//
|
||||||
// To read more about this, you can find a discussion here: https://github.com/pb33f/libopenapi/pull/64
|
// To read more about this, you can find a discussion here: https://github.com/pb33f/libopenapi/pull/64
|
||||||
//AllowRemoteLookup bool // Allow remote lookups for references. Defaults to false
|
AllowRemoteLookup bool // Allow remote lookups for references. Defaults to false
|
||||||
//AllowFileLookup bool // Allow file lookups for references. Defaults to false
|
AllowFileLookup bool // Allow file lookups for references. Defaults to false
|
||||||
|
|
||||||
// ParentIndex allows the index to be created with knowledge of a parent, before being parsed. This allows
|
// ParentIndex allows the index to be created with knowledge of a parent, before being parsed. This allows
|
||||||
// a breakglass to be used to prevent loops, checking the tree before cursing down.
|
// a breakglass to be used to prevent loops, checking the tree before cursing down.
|
||||||
@@ -280,6 +280,8 @@ type SpecIndex struct {
|
|||||||
specAbsolutePath string
|
specAbsolutePath string
|
||||||
resolver *Resolver
|
resolver *Resolver
|
||||||
|
|
||||||
|
built bool
|
||||||
|
|
||||||
//parentIndex *SpecIndex
|
//parentIndex *SpecIndex
|
||||||
uri []string
|
uri []string
|
||||||
//children []*SpecIndex
|
//children []*SpecIndex
|
||||||
|
|||||||
@@ -269,7 +269,7 @@ func (resolver *Resolver) VisitReference(ref *Reference, seen map[string]bool, j
|
|||||||
if j.Definition == r.Definition {
|
if j.Definition == r.Definition {
|
||||||
|
|
||||||
var foundDup *Reference
|
var foundDup *Reference
|
||||||
foundRefs := resolver.specIndex.SearchIndexForReference(r.Definition)
|
foundRefs := resolver.specIndex.SearchIndexForReferenceByReference(r)
|
||||||
if len(foundRefs) > 0 {
|
if len(foundRefs) > 0 {
|
||||||
foundDup = foundRefs[0]
|
foundDup = foundRefs[0]
|
||||||
}
|
}
|
||||||
@@ -311,7 +311,7 @@ func (resolver *Resolver) VisitReference(ref *Reference, seen map[string]bool, j
|
|||||||
|
|
||||||
if !skip {
|
if !skip {
|
||||||
var original *Reference
|
var original *Reference
|
||||||
foundRefs := resolver.specIndex.SearchIndexForReference(r.Definition)
|
foundRefs := resolver.specIndex.SearchIndexForReferenceByReference(r)
|
||||||
if len(foundRefs) > 0 {
|
if len(foundRefs) > 0 {
|
||||||
original = foundRefs[0]
|
original = foundRefs[0]
|
||||||
}
|
}
|
||||||
@@ -408,8 +408,27 @@ func (resolver *Resolver) extractRelatives(ref *Reference, node, parent *yaml.No
|
|||||||
}
|
}
|
||||||
|
|
||||||
value := node.Content[i+1].Value
|
value := node.Content[i+1].Value
|
||||||
|
var locatedRef []*Reference
|
||||||
|
searchRef := &Reference{
|
||||||
|
Definition: value,
|
||||||
|
FullDefinition: ref.FullDefinition,
|
||||||
|
RemoteLocation: ref.RemoteLocation,
|
||||||
|
IsRemote: true,
|
||||||
|
}
|
||||||
|
|
||||||
locatedRef := resolver.specIndex.SearchIndexForReference(value)
|
// we're searching a remote document, we need to build a full path to the reference
|
||||||
|
if ref.IsRemote {
|
||||||
|
if ref.RemoteLocation != "" {
|
||||||
|
searchRef = &Reference{
|
||||||
|
Definition: value,
|
||||||
|
FullDefinition: fmt.Sprintf("%s%s", ref.RemoteLocation, value),
|
||||||
|
RemoteLocation: ref.RemoteLocation,
|
||||||
|
IsRemote: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
locatedRef = resolver.specIndex.SearchIndexForReferenceByReference(searchRef)
|
||||||
|
|
||||||
if locatedRef == nil {
|
if locatedRef == nil {
|
||||||
_, path := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
|
_, path := utils.ConvertComponentIdIntoFriendlyPathSearch(value)
|
||||||
|
|||||||
@@ -407,7 +407,7 @@ func TestResolver_ResolveComponents_Stripe(t *testing.T) {
|
|||||||
|
|
||||||
resolveFile, _ := os.ReadFile(baseDir)
|
resolveFile, _ := os.ReadFile(baseDir)
|
||||||
|
|
||||||
info, err := datamodel.ExtractSpecInfoWithDocumentCheck(resolveFile, true)
|
info, _ := datamodel.ExtractSpecInfoWithDocumentCheck(resolveFile, true)
|
||||||
|
|
||||||
fileFS, err := NewLocalFS(baseDir, os.DirFS(filepath.Dir(baseDir)))
|
fileFS, err := NewLocalFS(baseDir, os.DirFS(filepath.Dir(baseDir)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ package index
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"github.com/pb33f/libopenapi/datamodel"
|
"github.com/pb33f/libopenapi/datamodel"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
"io"
|
"io"
|
||||||
@@ -17,9 +18,12 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type HasIndex interface {
|
||||||
|
GetIndex() *SpecIndex
|
||||||
|
}
|
||||||
|
|
||||||
type CanBeIndexed interface {
|
type CanBeIndexed interface {
|
||||||
Index(config *SpecIndexConfig) (*SpecIndex, error)
|
Index(config *SpecIndexConfig) (*SpecIndex, error)
|
||||||
GetIndex() *SpecIndex
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type RolodexFile interface {
|
type RolodexFile interface {
|
||||||
@@ -86,10 +90,10 @@ func (rf *rolodexFile) Name() string {
|
|||||||
|
|
||||||
func (rf *rolodexFile) GetIndex() *SpecIndex {
|
func (rf *rolodexFile) GetIndex() *SpecIndex {
|
||||||
if rf.localFile != nil {
|
if rf.localFile != nil {
|
||||||
return rf.localFile.index
|
return rf.localFile.GetIndex()
|
||||||
}
|
}
|
||||||
if rf.remoteFile != nil {
|
if rf.remoteFile != nil {
|
||||||
// TODO: remote file index
|
return rf.remoteFile.GetIndex()
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -207,7 +211,6 @@ func (rf *rolodexFile) GetErrors() []error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewRolodex(indexConfig *SpecIndexConfig) *Rolodex {
|
func NewRolodex(indexConfig *SpecIndexConfig) *Rolodex {
|
||||||
|
|
||||||
r := &Rolodex{
|
r := &Rolodex{
|
||||||
indexConfig: indexConfig,
|
indexConfig: indexConfig,
|
||||||
localFS: make(map[string]fs.FS),
|
localFS: make(map[string]fs.FS),
|
||||||
@@ -304,16 +307,23 @@ func (r *Rolodex) IndexTheRolodex() error {
|
|||||||
indexChan <- idx
|
indexChan <- idx
|
||||||
}
|
}
|
||||||
|
|
||||||
if lfs, ok := fs.(*LocalFS); ok {
|
if lfs, ok := fs.(RolodexFS); ok {
|
||||||
for _, f := range lfs.Files {
|
wait := false
|
||||||
|
for _, f := range lfs.GetFiles() {
|
||||||
if idxFile, ko := f.(CanBeIndexed); ko {
|
if idxFile, ko := f.(CanBeIndexed); ko {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
wait = true
|
||||||
go indexFileFunc(idxFile, f.GetFullPath())
|
go indexFileFunc(idxFile, f.GetFullPath())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if wait {
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
}
|
||||||
doneChan <- true
|
doneChan <- true
|
||||||
return
|
return
|
||||||
|
} else {
|
||||||
|
errChan <- errors.New("rolodex file system is not a RolodexFS")
|
||||||
|
doneChan <- true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -440,15 +450,12 @@ func (r *Rolodex) Open(location string) (RolodexFile, error) {
|
|||||||
var errorStack []error
|
var errorStack []error
|
||||||
|
|
||||||
var localFile *LocalFile
|
var localFile *LocalFile
|
||||||
//var remoteFile *RemoteFile
|
var remoteFile *RemoteFile
|
||||||
|
|
||||||
if r == nil || r.localFS == nil && r.remoteFS == nil {
|
if r == nil || r.localFS == nil && r.remoteFS == nil {
|
||||||
panic("WHAT NO....")
|
panic("WHAT NO....")
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, v := range r.localFS {
|
|
||||||
|
|
||||||
// check if this is a URL or an abs/rel reference.
|
|
||||||
fileLookup := location
|
fileLookup := location
|
||||||
isUrl := false
|
isUrl := false
|
||||||
u, _ := url.Parse(location)
|
u, _ := url.Parse(location)
|
||||||
@@ -456,8 +463,12 @@ func (r *Rolodex) Open(location string) (RolodexFile, error) {
|
|||||||
isUrl = true
|
isUrl = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO handle URLs.
|
|
||||||
if !isUrl {
|
if !isUrl {
|
||||||
|
|
||||||
|
for k, v := range r.localFS {
|
||||||
|
|
||||||
|
// check if this is a URL or an abs/rel reference.
|
||||||
|
|
||||||
if !filepath.IsAbs(location) {
|
if !filepath.IsAbs(location) {
|
||||||
fileLookup, _ = filepath.Abs(filepath.Join(k, location))
|
fileLookup, _ = filepath.Abs(filepath.Join(k, location))
|
||||||
}
|
}
|
||||||
@@ -504,8 +515,52 @@ func (r *Rolodex) Open(location string) (RolodexFile, error) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
|
||||||
|
if !r.indexConfig.AllowRemoteLookup {
|
||||||
|
return nil, fmt.Errorf("remote lookup for '%s' not allowed, please set the index configuration to "+
|
||||||
|
"AllowRemoteLookup to true", fileLookup)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range r.remoteFS {
|
||||||
|
f, err := v.Open(fileLookup)
|
||||||
|
if err == nil {
|
||||||
|
|
||||||
|
if rf, ok := interface{}(f).(*RemoteFile); ok {
|
||||||
|
remoteFile = rf
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bytes, rErr := io.ReadAll(f)
|
||||||
|
if rErr != nil {
|
||||||
|
errorStack = append(errorStack, rErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
s, sErr := f.Stat()
|
||||||
|
if sErr != nil {
|
||||||
|
errorStack = append(errorStack, sErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(bytes) > 0 {
|
||||||
|
remoteFile = &RemoteFile{
|
||||||
|
filename: filepath.Base(fileLookup),
|
||||||
|
name: filepath.Base(fileLookup),
|
||||||
|
extension: ExtractFileType(fileLookup),
|
||||||
|
data: bytes,
|
||||||
|
fullPath: fileLookup,
|
||||||
|
lastModified: s.ModTime(),
|
||||||
|
index: r.rootIndex,
|
||||||
|
}
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if localFile != nil {
|
if localFile != nil {
|
||||||
return &rolodexFile{
|
return &rolodexFile{
|
||||||
rolodex: r,
|
rolodex: r,
|
||||||
@@ -514,5 +569,13 @@ func (r *Rolodex) Open(location string) (RolodexFile, error) {
|
|||||||
}, errors.Join(errorStack...)
|
}, errors.Join(errorStack...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if remoteFile != nil {
|
||||||
|
return &rolodexFile{
|
||||||
|
rolodex: r,
|
||||||
|
location: remoteFile.fullPath,
|
||||||
|
remoteFile: remoteFile,
|
||||||
|
}, errors.Join(errorStack...)
|
||||||
|
}
|
||||||
|
|
||||||
return nil, errors.Join(errorStack...)
|
return nil, errors.Join(errorStack...)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,10 +20,8 @@ type LocalFS struct {
|
|||||||
entryPointDirectory string
|
entryPointDirectory string
|
||||||
baseDirectory string
|
baseDirectory string
|
||||||
Files map[string]RolodexFile
|
Files map[string]RolodexFile
|
||||||
parseTime int64
|
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
readingErrors []error
|
readingErrors []error
|
||||||
filters []string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LocalFS) GetFiles() map[string]RolodexFile {
|
func (l *LocalFS) GetFiles() map[string]RolodexFile {
|
||||||
@@ -180,26 +178,23 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
|
|||||||
switch extension {
|
switch extension {
|
||||||
case YAML, JSON:
|
case YAML, JSON:
|
||||||
|
|
||||||
file, readErr := config.DirFS.Open(p)
|
dirFile, readErr := config.DirFS.Open(p)
|
||||||
modTime := time.Now()
|
modTime := time.Now()
|
||||||
if readErr != nil {
|
if readErr != nil {
|
||||||
readingErrors = append(readingErrors, readErr)
|
|
||||||
allErrors = append(allErrors, readErr)
|
allErrors = append(allErrors, readErr)
|
||||||
logger.Error("[rolodex] cannot open file: ", "file", abs, "error", readErr.Error())
|
logger.Error("[rolodex] cannot open file: ", "file", abs, "error", readErr.Error())
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
stat, statErr := file.Stat()
|
stat, statErr := dirFile.Stat()
|
||||||
if statErr != nil {
|
if statErr != nil {
|
||||||
readingErrors = append(readingErrors, statErr)
|
|
||||||
allErrors = append(allErrors, statErr)
|
allErrors = append(allErrors, statErr)
|
||||||
logger.Error("[rolodex] cannot stat file: ", "file", abs, "error", statErr.Error())
|
logger.Error("[rolodex] cannot stat file: ", "file", abs, "error", statErr.Error())
|
||||||
}
|
}
|
||||||
if stat != nil {
|
if stat != nil {
|
||||||
modTime = stat.ModTime()
|
modTime = stat.ModTime()
|
||||||
}
|
}
|
||||||
fileData, readErr = io.ReadAll(file)
|
fileData, readErr = io.ReadAll(dirFile)
|
||||||
if readErr != nil {
|
if readErr != nil {
|
||||||
readingErrors = append(readingErrors, readErr)
|
|
||||||
allErrors = append(allErrors, readErr)
|
allErrors = append(allErrors, readErr)
|
||||||
logger.Error("cannot read file data: ", "file", abs, "error", readErr.Error())
|
logger.Error("cannot read file data: ", "file", abs, "error", readErr.Error())
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -12,17 +12,6 @@ import (
|
|||||||
// var refRegex = regexp.MustCompile(`['"]?\$ref['"]?\s*:\s*['"]?([^'"]*?)['"]`)
|
// var refRegex = regexp.MustCompile(`['"]?\$ref['"]?\s*:\s*['"]?([^'"]*?)['"]`)
|
||||||
var refRegex = regexp.MustCompile(`('\$ref'|"\$ref"|\$ref)\s*:\s*('[^']*'|"[^"]*"|\S*)`)
|
var refRegex = regexp.MustCompile(`('\$ref'|"\$ref"|\$ref)\s*:\s*('[^']*'|"[^"]*"|\S*)`)
|
||||||
|
|
||||||
/*
|
|
||||||
r := regexp.MustCompile(`('\$ref'|"\$ref"|\$ref)\s*:\s*('[^']*'|"[^"]*"|\S*)`)
|
|
||||||
matches := r.FindAllStringSubmatch(text, -1)
|
|
||||||
for _, submatches := range matches {
|
|
||||||
if len(submatches) > 2 {
|
|
||||||
fmt.Println("Full match:", submatches[0])
|
|
||||||
fmt.Println("JSON Schema reference: ", submatches[2])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
type RefType int
|
type RefType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -108,12 +97,12 @@ func ExtractRefType(ref string) RefType {
|
|||||||
|
|
||||||
func ExtractRefs(content string) [][]string {
|
func ExtractRefs(content string) [][]string {
|
||||||
|
|
||||||
res := refRegex.FindAllStringSubmatch(content, -1)
|
return refRegex.FindAllStringSubmatch(content, -1)
|
||||||
|
|
||||||
|
//var results []*ExtractedRef
|
||||||
|
//for _, r := range res {
|
||||||
|
// results = append(results, &ExtractedRef{Location: r[1], Type: ExtractRefType(r[1])})
|
||||||
|
//}
|
||||||
|
|
||||||
var results []*ExtractedRef
|
|
||||||
for _, r := range res {
|
|
||||||
results = append(results, &ExtractedRef{Location: r[1], Type: ExtractRefType(r[1])})
|
|
||||||
}
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ package index
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/pb33f/libopenapi/datamodel"
|
||||||
"golang.org/x/exp/slog"
|
"golang.org/x/exp/slog"
|
||||||
"golang.org/x/sync/syncmap"
|
"golang.org/x/sync/syncmap"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
@@ -19,6 +20,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type RemoteFS struct {
|
type RemoteFS struct {
|
||||||
|
indexConfig *SpecIndexConfig
|
||||||
rootURL string
|
rootURL string
|
||||||
rootURLParsed *url.URL
|
rootURLParsed *url.URL
|
||||||
RemoteHandlerFunc RemoteURLHandler
|
RemoteHandlerFunc RemoteURLHandler
|
||||||
@@ -41,6 +43,9 @@ type RemoteFile struct {
|
|||||||
URL *url.URL
|
URL *url.URL
|
||||||
lastModified time.Time
|
lastModified time.Time
|
||||||
seekingErrors []error
|
seekingErrors []error
|
||||||
|
index *SpecIndex
|
||||||
|
parsed *yaml.Node
|
||||||
|
offset int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *RemoteFile) GetFileName() string {
|
func (f *RemoteFile) GetFileName() string {
|
||||||
@@ -52,7 +57,25 @@ func (f *RemoteFile) GetContent() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *RemoteFile) GetContentAsYAMLNode() (*yaml.Node, error) {
|
func (f *RemoteFile) GetContentAsYAMLNode() (*yaml.Node, error) {
|
||||||
return nil, errors.New("not implemented")
|
if f.parsed != nil {
|
||||||
|
return f.parsed, nil
|
||||||
|
}
|
||||||
|
if f.index != nil && f.index.root != nil {
|
||||||
|
return f.index.root, nil
|
||||||
|
}
|
||||||
|
if f.data == nil {
|
||||||
|
return nil, fmt.Errorf("no data to parse for file: %s", f.fullPath)
|
||||||
|
}
|
||||||
|
var root yaml.Node
|
||||||
|
err := yaml.Unmarshal(f.data, &root)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if f.index != nil && f.index.root == nil {
|
||||||
|
f.index.root = &root
|
||||||
|
}
|
||||||
|
f.parsed = &root
|
||||||
|
return &root, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *RemoteFile) GetFileExtension() FileExtension {
|
func (f *RemoteFile) GetFileExtension() FileExtension {
|
||||||
@@ -71,6 +94,8 @@ func (f *RemoteFile) GetFullPath() string {
|
|||||||
return f.fullPath
|
return f.fullPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fs.FileInfo interfaces
|
||||||
|
|
||||||
func (f *RemoteFile) Name() string {
|
func (f *RemoteFile) Name() string {
|
||||||
return f.name
|
return f.name
|
||||||
}
|
}
|
||||||
@@ -91,40 +116,52 @@ func (f *RemoteFile) IsDir() bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fs.File interfaces
|
||||||
|
|
||||||
func (f *RemoteFile) Sys() interface{} {
|
func (f *RemoteFile) Sys() interface{} {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *RemoteFile) Index(config *SpecIndexConfig) (*SpecIndex, error) {
|
func (f *RemoteFile) Close() error {
|
||||||
|
|
||||||
// TODO
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
func (f *RemoteFile) GetIndex() *SpecIndex {
|
|
||||||
|
|
||||||
// TODO
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func (f *RemoteFile) Stat() (fs.FileInfo, error) {
|
||||||
type remoteRolodexFile struct {
|
return f, nil
|
||||||
f *RemoteFile
|
|
||||||
offset int64
|
|
||||||
}
|
}
|
||||||
|
func (f *RemoteFile) Read(b []byte) (int, error) {
|
||||||
func (f *remoteRolodexFile) Close() error { return nil }
|
if f.offset >= int64(len(f.data)) {
|
||||||
func (f *remoteRolodexFile) Stat() (fs.FileInfo, error) { return f.f, nil }
|
|
||||||
func (f *remoteRolodexFile) Read(b []byte) (int, error) {
|
|
||||||
if f.offset >= int64(len(f.f.data)) {
|
|
||||||
return 0, io.EOF
|
return 0, io.EOF
|
||||||
}
|
}
|
||||||
if f.offset < 0 {
|
if f.offset < 0 {
|
||||||
return 0, &fs.PathError{Op: "read", Path: f.f.name, Err: fs.ErrInvalid}
|
return 0, &fs.PathError{Op: "read", Path: f.name, Err: fs.ErrInvalid}
|
||||||
}
|
}
|
||||||
n := copy(b, f.f.data[f.offset:])
|
n := copy(b, f.data[f.offset:])
|
||||||
f.offset += int64(n)
|
f.offset += int64(n)
|
||||||
return n, nil
|
return n, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *RemoteFile) Index(config *SpecIndexConfig) (*SpecIndex, error) {
|
||||||
|
|
||||||
|
if f.index != nil {
|
||||||
|
return f.index, nil
|
||||||
|
}
|
||||||
|
content := f.data
|
||||||
|
|
||||||
|
// first, we must parse the content of the file
|
||||||
|
info, err := datamodel.ExtractSpecInfoWithDocumentCheck(content, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
index := NewSpecIndexWithConfig(info.RootNode, config)
|
||||||
|
index.specAbsolutePath = f.fullPath
|
||||||
|
f.index = index
|
||||||
|
return index, nil
|
||||||
|
}
|
||||||
|
func (f *RemoteFile) GetIndex() *SpecIndex {
|
||||||
|
return f.index
|
||||||
|
}
|
||||||
|
|
||||||
type FileExtension int
|
type FileExtension int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -133,19 +170,39 @@ const (
|
|||||||
UNSUPPORTED
|
UNSUPPORTED
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewRemoteFS(rootURL string) (*RemoteFS, error) {
|
func NewRemoteFSWithConfig(specIndexConfig *SpecIndexConfig) (*RemoteFS, error) {
|
||||||
|
remoteRootURL := specIndexConfig.BaseURL
|
||||||
|
rfs := &RemoteFS{
|
||||||
|
logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
|
||||||
|
Level: slog.LevelDebug,
|
||||||
|
})),
|
||||||
|
|
||||||
|
rootURLParsed: remoteRootURL,
|
||||||
|
FetchChannel: make(chan *RemoteFile),
|
||||||
|
}
|
||||||
|
if remoteRootURL != nil {
|
||||||
|
rfs.rootURL = remoteRootURL.String()
|
||||||
|
}
|
||||||
|
return rfs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRemoteFS() (*RemoteFS, error) {
|
||||||
|
config := CreateOpenAPIIndexConfig()
|
||||||
|
return NewRemoteFSWithConfig(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRemoteFSWithRootURL(rootURL string) (*RemoteFS, error) {
|
||||||
remoteRootURL, err := url.Parse(rootURL)
|
remoteRootURL, err := url.Parse(rootURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &RemoteFS{
|
config := CreateOpenAPIIndexConfig()
|
||||||
logger: slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
|
config.BaseURL = remoteRootURL
|
||||||
Level: slog.LevelDebug,
|
return NewRemoteFSWithConfig(config)
|
||||||
})),
|
}
|
||||||
rootURL: rootURL,
|
|
||||||
rootURLParsed: remoteRootURL,
|
func (i *RemoteFS) SetIndexConfig(config *SpecIndexConfig) {
|
||||||
FetchChannel: make(chan *RemoteFile),
|
i.indexConfig = config
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *RemoteFS) GetFiles() map[string]RolodexFile {
|
func (i *RemoteFS) GetFiles() map[string]RolodexFile {
|
||||||
@@ -200,7 +257,7 @@ func (i *RemoteFS) seekRelatives(file *RemoteFile) {
|
|||||||
fmt.Printf("Found relative HTTP reference: %s\n", ref[1])
|
fmt.Printf("Found relative HTTP reference: %s\n", ref[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if i.remoteRunning == false {
|
if !i.remoteRunning {
|
||||||
i.remoteRunning = true
|
i.remoteRunning = true
|
||||||
i.remoteWg.Wait()
|
i.remoteWg.Wait()
|
||||||
i.remoteRunning = false
|
i.remoteRunning = false
|
||||||
@@ -215,15 +272,29 @@ func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
remoteParsedOrig, _ := url.Parse(remoteURL)
|
||||||
|
|
||||||
|
// try path first
|
||||||
|
if r, ok := i.Files.Load(remoteParsedURL.Path); ok {
|
||||||
|
return r.(*RemoteFile), nil
|
||||||
|
}
|
||||||
|
|
||||||
fileExt := ExtractFileType(remoteParsedURL.Path)
|
fileExt := ExtractFileType(remoteParsedURL.Path)
|
||||||
|
|
||||||
if fileExt == UNSUPPORTED {
|
if fileExt == UNSUPPORTED {
|
||||||
return nil, &fs.PathError{Op: "open", Path: remoteURL, Err: fs.ErrInvalid}
|
return nil, &fs.PathError{Op: "open", Path: remoteURL, Err: fs.ErrInvalid}
|
||||||
}
|
}
|
||||||
|
|
||||||
i.logger.Debug("Loading remote file", "file", remoteParsedURL.Path)
|
// if the remote URL is absolute (http:// or https://), and we have a rootURL defined, we need to override
|
||||||
|
// the host being defined by this URL, and use the rootURL instead, but keep the path.
|
||||||
|
if i.rootURLParsed != nil && remoteParsedURL.Host != "" {
|
||||||
|
remoteParsedURL.Host = i.rootURLParsed.Host
|
||||||
|
remoteParsedURL.Scheme = i.rootURLParsed.Scheme
|
||||||
|
}
|
||||||
|
|
||||||
response, clientErr := i.RemoteHandlerFunc(i.rootURL + remoteURL)
|
i.logger.Debug("Loading remote file", "file", remoteURL, "remoteURL", remoteParsedURL.String())
|
||||||
|
|
||||||
|
response, clientErr := i.RemoteHandlerFunc(remoteParsedURL.String())
|
||||||
if clientErr != nil {
|
if clientErr != nil {
|
||||||
i.logger.Error("client error", "error", response.StatusCode)
|
i.logger.Error("client error", "error", response.StatusCode)
|
||||||
|
|
||||||
@@ -238,7 +309,7 @@ func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
|
|||||||
if response.StatusCode >= 400 {
|
if response.StatusCode >= 400 {
|
||||||
i.logger.Error("Unable to fetch remote document %s",
|
i.logger.Error("Unable to fetch remote document %s",
|
||||||
"file", remoteParsedURL.Path, "status", response.StatusCode, "resp", string(responseBytes))
|
"file", remoteParsedURL.Path, "status", response.StatusCode, "resp", string(responseBytes))
|
||||||
return nil, errors.New(fmt.Sprintf("Unable to fetch remote document: %s", string(responseBytes)))
|
return nil, fmt.Errorf("unable to fetch remote document: %s", string(responseBytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
absolutePath, pathErr := filepath.Abs(remoteParsedURL.Path)
|
absolutePath, pathErr := filepath.Abs(remoteParsedURL.Path)
|
||||||
@@ -253,10 +324,12 @@ func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
|
|||||||
lastModifiedTime, parseErr := time.Parse(time.RFC1123, lastModified)
|
lastModifiedTime, parseErr := time.Parse(time.RFC1123, lastModified)
|
||||||
|
|
||||||
if parseErr != nil {
|
if parseErr != nil {
|
||||||
return nil, parseErr
|
// can't extract last modified, so use now
|
||||||
|
lastModifiedTime = time.Now()
|
||||||
}
|
}
|
||||||
|
|
||||||
filename := filepath.Base(remoteParsedURL.Path)
|
filename := filepath.Base(remoteParsedURL.Path)
|
||||||
|
|
||||||
remoteFile := &RemoteFile{
|
remoteFile := &RemoteFile{
|
||||||
filename: filename,
|
filename: filename,
|
||||||
name: remoteParsedURL.Path,
|
name: remoteParsedURL.Path,
|
||||||
@@ -266,14 +339,31 @@ func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
|
|||||||
URL: remoteParsedURL,
|
URL: remoteParsedURL,
|
||||||
lastModified: lastModifiedTime,
|
lastModified: lastModifiedTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
copiedCfg := *i.indexConfig
|
||||||
|
|
||||||
|
newBase := fmt.Sprintf("%s://%s%s", remoteParsedOrig.Scheme, remoteParsedOrig.Host,
|
||||||
|
filepath.Dir(remoteParsedOrig.Path))
|
||||||
|
newBaseURL, _ := url.Parse(newBase)
|
||||||
|
|
||||||
|
copiedCfg.BaseURL = newBaseURL
|
||||||
|
copiedCfg.SpecAbsolutePath = remoteURL
|
||||||
|
idx, _ := remoteFile.Index(&copiedCfg)
|
||||||
|
|
||||||
|
// for each index, we need a resolver
|
||||||
|
resolver := NewResolver(idx)
|
||||||
|
idx.resolver = resolver
|
||||||
|
|
||||||
i.Files.Store(absolutePath, remoteFile)
|
i.Files.Store(absolutePath, remoteFile)
|
||||||
|
|
||||||
i.logger.Debug("successfully loaded file", "file", absolutePath)
|
i.logger.Debug("successfully loaded file", "file", absolutePath)
|
||||||
i.seekRelatives(remoteFile)
|
i.seekRelatives(remoteFile)
|
||||||
|
|
||||||
if i.remoteRunning == false {
|
idx.BuildIndex()
|
||||||
return &remoteRolodexFile{remoteFile, 0}, errors.Join(i.remoteErrors...)
|
|
||||||
|
if !i.remoteRunning {
|
||||||
|
return remoteFile, errors.Join(i.remoteErrors...)
|
||||||
} else {
|
} else {
|
||||||
return &remoteRolodexFile{remoteFile, 0}, nil
|
return remoteFile, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ func TestNewRemoteFS_BasicCheck(t *testing.T) {
|
|||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
//remoteFS := NewRemoteFS("https://raw.githubusercontent.com/digitalocean/openapi/main/specification/")
|
//remoteFS := NewRemoteFS("https://raw.githubusercontent.com/digitalocean/openapi/main/specification/")
|
||||||
remoteFS, _ := NewRemoteFS(server.URL)
|
remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
|
||||||
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
||||||
|
|
||||||
file, err := remoteFS.Open("/file1.yaml")
|
file, err := remoteFS.Open("/file1.yaml")
|
||||||
@@ -112,7 +112,7 @@ func TestNewRemoteFS_BasicCheck_Relative(t *testing.T) {
|
|||||||
server := test_buildServer()
|
server := test_buildServer()
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
remoteFS, _ := NewRemoteFS(server.URL)
|
remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
|
||||||
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
||||||
|
|
||||||
file, err := remoteFS.Open("/deeper/file2.yaml")
|
file, err := remoteFS.Open("/deeper/file2.yaml")
|
||||||
@@ -138,7 +138,7 @@ func TestNewRemoteFS_BasicCheck_Relative_Deeper(t *testing.T) {
|
|||||||
server := test_buildServer()
|
server := test_buildServer()
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
remoteFS, _ := NewRemoteFS(server.URL)
|
remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
|
||||||
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
||||||
|
|
||||||
file, err := remoteFS.Open("/deeper/even_deeper/file3.yaml")
|
file, err := remoteFS.Open("/deeper/even_deeper/file3.yaml")
|
||||||
@@ -164,7 +164,7 @@ func TestNewRemoteFS_BasicCheck_SeekRelatives(t *testing.T) {
|
|||||||
server := test_buildServer()
|
server := test_buildServer()
|
||||||
defer server.Close()
|
defer server.Close()
|
||||||
|
|
||||||
remoteFS, _ := NewRemoteFS(server.URL)
|
remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
|
||||||
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
remoteFS.RemoteHandlerFunc = test_httpClient.Get
|
||||||
|
|
||||||
file, err := remoteFS.Open("/bag/list.yaml")
|
file, err := remoteFS.Open("/bag/list.yaml")
|
||||||
|
|||||||
@@ -9,10 +9,9 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SearchIndexForReference searches the index for a reference, first looking through the mapped references
|
func (index *SpecIndex) SearchIndexForReferenceByReference(fullRef *Reference) []*Reference {
|
||||||
// and then externalSpecIndex for a match. If no match is found, it will recursively search the child indexes
|
|
||||||
// extracted when parsing the OpenAPI Spec.
|
ref := fullRef.FullDefinition
|
||||||
func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
|
|
||||||
|
|
||||||
absPath := index.specAbsolutePath
|
absPath := index.specAbsolutePath
|
||||||
if absPath == "" {
|
if absPath == "" {
|
||||||
@@ -22,8 +21,12 @@ func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
|
|||||||
uri := strings.Split(ref, "#/")
|
uri := strings.Split(ref, "#/")
|
||||||
if len(uri) == 2 {
|
if len(uri) == 2 {
|
||||||
if uri[0] != "" {
|
if uri[0] != "" {
|
||||||
|
if strings.HasPrefix(uri[0], "http") {
|
||||||
|
roloLookup = fullRef.FullDefinition
|
||||||
|
} else {
|
||||||
roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
|
roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
ref = fmt.Sprintf("#/%s", uri[1])
|
ref = fmt.Sprintf("#/%s", uri[1])
|
||||||
} else {
|
} else {
|
||||||
roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
|
roloLookup, _ = filepath.Abs(filepath.Join(absPath, uri[0]))
|
||||||
@@ -38,7 +41,6 @@ func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
|
|||||||
return []*Reference{r}
|
return []*Reference{r}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: look in the rolodex.
|
|
||||||
if roloLookup != "" {
|
if roloLookup != "" {
|
||||||
rFile, err := index.rolodex.Open(roloLookup)
|
rFile, err := index.rolodex.Open(roloLookup)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -70,33 +72,17 @@ func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
panic("should not be here")
|
fmt.Printf("unable to locate reference: %s, within index: %s\n", ref, index.specAbsolutePath)
|
||||||
fmt.Println(roloLookup)
|
|
||||||
return nil
|
|
||||||
|
|
||||||
//if r, ok := index.allMappedRefs[ref]; ok {
|
|
||||||
// return []*Reference{r}jh
|
|
||||||
//}
|
|
||||||
//for c := range index.children {
|
|
||||||
// found := goFindMeSomething(index.children[c], ref)
|
|
||||||
// if found != nil {
|
|
||||||
// return found
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
//return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (index *SpecIndex) SearchAncestryForSeenURI(uri string) *SpecIndex {
|
|
||||||
//if index.parentIndex == nil {
|
|
||||||
// return nil
|
|
||||||
//}
|
|
||||||
//if index.uri[0] != uri {
|
|
||||||
// return index.parentIndex.SearchAncestryForSeenURI(uri)
|
|
||||||
//}
|
|
||||||
//return index
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func goFindMeSomething(i *SpecIndex, ref string) []*Reference {
|
// SearchIndexForReference searches the index for a reference, first looking through the mapped references
|
||||||
return i.SearchIndexForReference(ref)
|
// and then externalSpecIndex for a match. If no match is found, it will recursively search the child indexes
|
||||||
|
// extracted when parsing the OpenAPI Spec.
|
||||||
|
func (index *SpecIndex) SearchIndexForReference(ref string) []*Reference {
|
||||||
|
return index.SearchIndexForReferenceByReference(&Reference{FullDefinition: ref})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (index *SpecIndex) SearchIndexForReferenceWithParent(ref string, reference *Reference) []*Reference {
|
||||||
|
return index.SearchIndexForReferenceByReference(&Reference{FullDefinition: ref})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,6 +103,9 @@ func createNewIndex(rootNode *yaml.Node, index *SpecIndex, avoidBuildOut bool) *
|
|||||||
// useful for looking up things, the count operations are all run in parallel and then the final calculations are run
|
// useful for looking up things, the count operations are all run in parallel and then the final calculations are run
|
||||||
// the index is ready.
|
// the index is ready.
|
||||||
func (index *SpecIndex) BuildIndex() {
|
func (index *SpecIndex) BuildIndex() {
|
||||||
|
if index.built {
|
||||||
|
return
|
||||||
|
}
|
||||||
countFuncs := []func() int{
|
countFuncs := []func() int{
|
||||||
index.GetOperationCount,
|
index.GetOperationCount,
|
||||||
index.GetComponentSchemaCount,
|
index.GetComponentSchemaCount,
|
||||||
@@ -132,6 +135,7 @@ func (index *SpecIndex) BuildIndex() {
|
|||||||
index.GetInlineDuplicateParamCount()
|
index.GetInlineDuplicateParamCount()
|
||||||
index.GetAllDescriptionsCount()
|
index.GetAllDescriptionsCount()
|
||||||
index.GetTotalTagsCount()
|
index.GetTotalTagsCount()
|
||||||
|
index.built = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetRootNode returns document root node.
|
// GetRootNode returns document root node.
|
||||||
@@ -998,7 +1002,6 @@ func (index *SpecIndex) GetOperationCount() int {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if valid {
|
if valid {
|
||||||
fmt.Sprint(p)
|
|
||||||
ref := &Reference{
|
ref := &Reference{
|
||||||
Definition: m.Value,
|
Definition: m.Value,
|
||||||
Name: m.Value,
|
Name: m.Value,
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ package index
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
@@ -433,18 +435,81 @@ func TestSpecIndex_NoRoot(t *testing.T) {
|
|||||||
assert.Equal(t, -1, index.GetGlobalLinksCount())
|
assert.Equal(t, -1, index.GetGlobalLinksCount())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func test_buildMixedRefServer() *httptest.Server {
|
||||||
|
|
||||||
|
bs, _ := os.ReadFile("../test_specs/burgershop.openapi.yaml")
|
||||||
|
return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||||
|
if req.URL.String() == "/daveshanley/vacuum/main/model/test_files/burgershop.openapi.yaml" {
|
||||||
|
rw.Header().Set("Last-Modified", "Wed, 21 Oct 2015 07:28:00 GMT")
|
||||||
|
_, _ = rw.Write(bs)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _ = rw.Write([]byte(`OK`))
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
func TestSpecIndex_BurgerShopMixedRef(t *testing.T) {
|
func TestSpecIndex_BurgerShopMixedRef(t *testing.T) {
|
||||||
spec, _ := os.ReadFile("../test_specs/mixedref-burgershop.openapi.yaml")
|
|
||||||
|
// create a test server.
|
||||||
|
server := test_buildMixedRefServer()
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
// create a new config that allows local and remote to be mixed up.
|
||||||
|
cf := CreateOpenAPIIndexConfig()
|
||||||
|
cf.AvoidBuildIndex = true
|
||||||
|
cf.AllowRemoteLookup = true
|
||||||
|
cf.AvoidCircularReferenceCheck = true
|
||||||
|
cf.BasePath = "../test_specs"
|
||||||
|
|
||||||
|
// setting this baseURL will override the base
|
||||||
|
//cf.BaseURL, _ = url.Parse(server.URL)
|
||||||
|
|
||||||
|
cFile := "../test_specs/mixedref-burgershop.openapi.yaml"
|
||||||
|
yml, _ := os.ReadFile(cFile)
|
||||||
var rootNode yaml.Node
|
var rootNode yaml.Node
|
||||||
_ = yaml.Unmarshal(spec, &rootNode)
|
_ = yaml.Unmarshal([]byte(yml), &rootNode)
|
||||||
|
|
||||||
cwd, _ := os.Getwd()
|
// create a new rolodex
|
||||||
|
rolo := NewRolodex(cf)
|
||||||
|
|
||||||
index := NewSpecIndexWithConfig(&rootNode, &SpecIndexConfig{
|
// set the rolodex root node to the root node of the spec.
|
||||||
//AllowRemoteLookup: true,
|
rolo.SetRootNode(&rootNode)
|
||||||
// AllowFileLookup: true,
|
|
||||||
BasePath: cwd,
|
// create a new remote fs and set the config for indexing.
|
||||||
})
|
//remoteFS, _ := NewRemoteFSWithRootURL(server.URL)
|
||||||
|
remoteFS, _ := NewRemoteFS()
|
||||||
|
remoteFS.SetIndexConfig(cf)
|
||||||
|
|
||||||
|
// set our remote handler func
|
||||||
|
|
||||||
|
c := http.Client{}
|
||||||
|
|
||||||
|
remoteFS.RemoteHandlerFunc = c.Get
|
||||||
|
|
||||||
|
// configure the local filesystem.
|
||||||
|
fsCfg := LocalFSConfig{
|
||||||
|
BaseDirectory: cf.BasePath,
|
||||||
|
FileFilters: []string{"burgershop.openapi.yaml"},
|
||||||
|
DirFS: os.DirFS(cf.BasePath),
|
||||||
|
}
|
||||||
|
|
||||||
|
// create a new local filesystem.
|
||||||
|
fileFS, err := NewLocalFSWithConfig(&fsCfg)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// add file systems to the rolodex
|
||||||
|
rolo.AddLocalFS(cf.BasePath, fileFS)
|
||||||
|
rolo.AddRemoteFS(server.URL, remoteFS)
|
||||||
|
|
||||||
|
// index the rolodex.
|
||||||
|
indexedErr := rolo.IndexTheRolodex()
|
||||||
|
rolo.BuildIndexes()
|
||||||
|
|
||||||
|
assert.NoError(t, indexedErr)
|
||||||
|
|
||||||
|
index := rolo.GetRootIndex()
|
||||||
|
rolo.CheckForCircularReferences()
|
||||||
|
|
||||||
assert.Len(t, index.allRefs, 5)
|
assert.Len(t, index.allRefs, 5)
|
||||||
assert.Len(t, index.allMappedRefs, 5)
|
assert.Len(t, index.allMappedRefs, 5)
|
||||||
@@ -459,6 +524,8 @@ func TestSpecIndex_BurgerShopMixedRef(t *testing.T) {
|
|||||||
assert.Equal(t, 2, index.GetOperationsParameterCount())
|
assert.Equal(t, 2, index.GetOperationsParameterCount())
|
||||||
assert.Equal(t, 1, index.GetInlineDuplicateParamCount())
|
assert.Equal(t, 1, index.GetInlineDuplicateParamCount())
|
||||||
assert.Equal(t, 1, index.GetInlineUniqueParamCount())
|
assert.Equal(t, 1, index.GetInlineUniqueParamCount())
|
||||||
|
assert.Len(t, index.refErrors, 0)
|
||||||
|
assert.Len(t, index.GetCircularReferences(), 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSpecIndex_TestEmptyBrokenReferences(t *testing.T) {
|
func TestSpecIndex_TestEmptyBrokenReferences(t *testing.T) {
|
||||||
@@ -630,7 +697,7 @@ func TestSpecIndex_TestPathsNodeAsArray(t *testing.T) {
|
|||||||
_ = yaml.Unmarshal([]byte(yml), &rootNode)
|
_ = yaml.Unmarshal([]byte(yml), &rootNode)
|
||||||
|
|
||||||
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
|
index := NewSpecIndexWithConfig(&rootNode, CreateOpenAPIIndexConfig())
|
||||||
assert.Nil(t, index.performExternalLookup(nil))
|
assert.Nil(t, index.lookupRolodex(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_Error(t *testing.T) {
|
func TestSpecIndex_lookupRemoteReference_SeenSourceSimulation_Error(t *testing.T) {
|
||||||
|
|||||||
@@ -441,9 +441,5 @@ func GenerateCleanSpecConfigBaseURL(baseURL *url.URL, dir string, includeFile bo
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
if strings.HasSuffix(p, "/") {
|
return strings.TrimSuffix(p, "/")
|
||||||
p = p[:len(p)-1]
|
|
||||||
}
|
|
||||||
return p
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -234,7 +234,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: 'https://raw.githubusercontent.com/daveshanley/vacuum/main/model/test_files/burgershop.openapi.yaml'
|
$ref: 'https://raw.githubusercontent.com/daveshanley/vacuum/main/model/test_files/burgershop.openapi.yaml#/components/schemas/Error'
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
Error:
|
Error:
|
||||||
|
|||||||
Reference in New Issue
Block a user