Another round of cleaning.

Signed-off-by: quobix <dave@quobix.com>
This commit is contained in:
quobix
2023-10-21 18:41:53 -04:00
parent 28047d08d2
commit 3bf830c2b3
8 changed files with 34 additions and 77 deletions

View File

@@ -145,18 +145,18 @@ func (index *SpecIndex) lookupRolodex(uri []string) *Reference {
rFile, rError := index.rolodex.Open(absoluteFileLocation)
if rError != nil {
logger.Error("unable to open rolodex file", "file", absoluteFileLocation, "error", rError)
index.logger.Error("unable to open rolodex file", "file", absoluteFileLocation, "error", rError)
return nil
}
if rFile == nil {
logger.Error("rolodex file is empty!", "file", absoluteFileLocation)
index.logger.Error("rolodex file is empty!", "file", absoluteFileLocation)
return nil
}
parsedDocument, err = rFile.GetContentAsYAMLNode()
if err != nil {
logger.Error("unable to parse rolodex file", "file", absoluteFileLocation, "error", err)
index.logger.Error("unable to parse rolodex file", "file", absoluteFileLocation, "error", err)
return nil
}
} else {

View File

@@ -266,6 +266,7 @@ type SpecIndex struct {
cache syncmap.Map
built bool
uri []string
logger *slog.Logger
}
// GetResolver returns the resolver for this index.

View File

@@ -10,7 +10,6 @@ import (
"gopkg.in/yaml.v3"
"io"
"io/fs"
"log/slog"
"net/url"
"os"
"path/filepath"
@@ -41,15 +40,6 @@ type RolodexFile interface {
Mode() os.FileMode
}
var logger *slog.Logger
func init() {
logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
Level: slog.LevelDebug,
}))
}
type RolodexFS interface {
Open(name string) (fs.File, error)
GetFiles() map[string]RolodexFile

View File

@@ -11,6 +11,7 @@ import (
"io"
"io/fs"
"log/slog"
"os"
"path/filepath"
"strings"
"time"
@@ -132,6 +133,7 @@ func (l *LocalFile) GetErrors() []error {
type LocalFSConfig struct {
// the base directory to index
BaseDirectory string
Logger *slog.Logger
FileFilters []string
DirFS fs.FS
}
@@ -140,6 +142,13 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
localFiles := make(map[string]RolodexFile)
var allErrors []error
log := config.Logger
if log == nil {
log = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
Level: slog.LevelError,
}))
}
// if the basedir is an absolute file, we're just going to index that file.
ext := filepath.Ext(config.BaseDirectory)
file := filepath.Base(config.BaseDirectory)
@@ -182,7 +191,7 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
abs, absErr := filepath.Abs(filepath.Join(config.BaseDirectory, p))
if absErr != nil {
readingErrors = append(readingErrors, absErr)
logger.Error("cannot create absolute path for file: ", "file", p, "error", absErr.Error())
log.Error("cannot create absolute path for file: ", "file", p, "error", absErr.Error())
}
var fileData []byte
@@ -194,13 +203,13 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
modTime := time.Now()
if readErr != nil {
allErrors = append(allErrors, readErr)
logger.Error("[rolodex] cannot open file: ", "file", abs, "error", readErr.Error())
log.Error("[rolodex] cannot open file: ", "file", abs, "error", readErr.Error())
return nil
}
stat, statErr := dirFile.Stat()
if statErr != nil {
allErrors = append(allErrors, statErr)
logger.Error("[rolodex] cannot stat file: ", "file", abs, "error", statErr.Error())
log.Error("[rolodex] cannot stat file: ", "file", abs, "error", statErr.Error())
}
if stat != nil {
modTime = stat.ModTime()
@@ -208,11 +217,11 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
fileData, readErr = io.ReadAll(dirFile)
if readErr != nil {
allErrors = append(allErrors, readErr)
logger.Error("cannot read file data: ", "file", abs, "error", readErr.Error())
log.Error("cannot read file data: ", "file", abs, "error", readErr.Error())
return nil
}
logger.Debug("collecting JSON/YAML file", "file", abs)
log.Debug("collecting JSON/YAML file", "file", abs)
localFiles[abs] = &LocalFile{
filename: p,
name: filepath.Base(p),
@@ -223,7 +232,7 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
readingErrors: readingErrors,
}
case UNSUPPORTED:
logger.Debug("skipping non JSON/YAML file", "file", abs)
log.Debug("skipping non JSON/YAML file", "file", abs)
}
return nil
})
@@ -234,7 +243,7 @@ func NewLocalFSWithConfig(config *LocalFSConfig) (*LocalFS, error) {
return &LocalFS{
Files: localFiles,
logger: logger,
logger: log,
baseDirectory: absBaseDir,
entryPointDirectory: config.BaseDirectory,
readingErrors: allErrors,

View File

@@ -5,13 +5,9 @@ package index
import (
"fmt"
"regexp"
"strings"
)
// var refRegex = regexp.MustCompile(`['"]?\$ref['"]?\s*:\s*['"]?([^'"]*?)['"]`)
var refRegex = regexp.MustCompile(`('\$ref'|"\$ref"|\$ref)\s*:\s*('[^']*'|"[^"]*"|\S*)`)
type RefType int
const (

View File

@@ -241,57 +241,6 @@ func (i *RemoteFS) GetErrors() []error {
return i.remoteErrors
}
//func (i *RemoteFS) seekRelatives(file *RemoteFile) {
//
// extractedRefs := ExtractRefs(string(file.data))
// if len(extractedRefs) == 0 {
// return
// }
//
// fetchChild := func(url string) {
// _, err := i.Open(url)
// if err != nil {
// file.seekingErrors = append(file.seekingErrors, err)
// i.remoteErrorLock.Lock()
// i.remoteErrors = append(i.remoteErrors, err)
// i.remoteErrorLock.Unlock()
// }
// defer i.remoteWg.Done()
// }
//
// for _, ref := range extractedRefs {
// refType := ExtractRefType(ref[1])
// switch refType {
// case File:
// fileLocation, _ := ExtractRefValues(ref[1])
// //parentDir, _ := filepath.Abs(filepath.Dir(file.fullPath))
// var fullPath string
// if filepath.IsAbs(fileLocation) {
// fullPath = fileLocation
// } else {
// fullPath, _ = filepath.Abs(filepath.Join(filepath.Dir(file.fullPath), fileLocation))
// }
//
// if f, ok := i.Files.Load(fullPath); ok {
// i.logger.Debug("file already loaded, skipping", "file", f.(*RemoteFile).fullPath)
// continue
// } else {
// i.remoteWg.Add(1)
// go fetchChild(fullPath)
// }
//
// case HTTP:
// fmt.Printf("Found relative HTTP reference: %s\n", ref[1])
// }
// }
// if !i.remoteRunning {
// i.remoteRunning = true
// i.remoteWg.Wait()
// i.remoteRunning = false
// }
//
//}
func (i *RemoteFS) Open(remoteURL string) (fs.File, error) {
if i.indexConfig != nil && !i.indexConfig.AllowRemoteLookup {

View File

@@ -15,6 +15,8 @@ package index
import (
"context"
"fmt"
"log/slog"
"os"
"sort"
"strings"
"sync"
@@ -42,6 +44,15 @@ func NewSpecIndexWithConfig(rootNode *yaml.Node, config *SpecIndexConfig) *SpecI
if rootNode == nil || len(rootNode.Content) <= 0 {
return index
}
if config.Logger != nil {
index.logger = config.Logger
} else {
index.logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
Level: slog.LevelError,
}))
}
boostrapIndexCollections(rootNode, index)
return createNewIndex(rootNode, index, config.AvoidBuildIndex)
}

View File

@@ -103,7 +103,7 @@ func TestSpecIndex_DigitalOcean(t *testing.T) {
cf.AllowRemoteLookup = true
cf.AvoidCircularReferenceCheck = true
cf.Logger = slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
Level: slog.LevelDebug,
Level: slog.LevelError,
}))
// setting this baseURL will override the base
@@ -188,6 +188,7 @@ func TestSpecIndex_DigitalOcean_FullCheckoutLocalResolve(t *testing.T) {
fsCfg := LocalFSConfig{
BaseDirectory: cf.BasePath,
DirFS: os.DirFS(cf.BasePath),
Logger: cf.Logger,
}
// create a new local filesystem.
@@ -682,7 +683,7 @@ func TestSpecIndex_TestEmptyBrokenReferences(t *testing.T) {
assert.Equal(t, 2, index.GetOperationsParameterCount())
assert.Equal(t, 1, index.GetInlineDuplicateParamCount())
assert.Equal(t, 1, index.GetInlineUniqueParamCount())
assert.Len(t, index.refErrors, 5)
assert.Len(t, index.refErrors, 6)
}
func TestTagsNoDescription(t *testing.T) {