mirror of
https://github.com/LukeHagar/slinky.git
synced 2025-12-06 04:21:20 +00:00
Introduce a new .slinkignore file format to allow users to specify paths and URLs to ignore during scanning. Update the CollectURLs and CollectURLsProgress functions to respect these ignore rules. Add tests to verify the functionality of the .slinkignore file, ensuring that specified paths and URLs are excluded from results. Update README.md to document the new feature and its usage.
177 lines
4.6 KiB
Go
177 lines
4.6 KiB
Go
package report
|
|
|
|
import (
|
|
"bytes"
|
|
"fmt"
|
|
"html"
|
|
"os"
|
|
"path/filepath"
|
|
"sort"
|
|
"strings"
|
|
"time"
|
|
|
|
"slinky/internal/web"
|
|
)
|
|
|
|
// Summary captures high-level run details for the report.
|
|
type Summary struct {
|
|
RootPath string
|
|
StartedAt time.Time
|
|
FinishedAt time.Time
|
|
Processed int
|
|
OK int
|
|
Fail int
|
|
AvgRPS float64
|
|
PeakRPS float64
|
|
LowRPS float64
|
|
FilesScanned int
|
|
JSONPath string
|
|
RepoBlobBaseURL string // e.g. https://github.com/owner/repo/blob/<sha>
|
|
}
|
|
|
|
// WriteMarkdown writes a GitHub-flavored Markdown report to path. If path is empty,
|
|
// it derives a safe filename from s.RootPath.
|
|
func WriteMarkdown(path string, results []web.Result, s Summary) (string, error) {
|
|
if strings.TrimSpace(path) == "" {
|
|
base := filepath.Base(s.RootPath)
|
|
if strings.TrimSpace(base) == "" || base == "." || base == string(filepath.Separator) {
|
|
base = "results"
|
|
}
|
|
var b strings.Builder
|
|
for _, r := range strings.ToLower(base) {
|
|
if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' || r == '_' || r == '.' {
|
|
b.WriteRune(r)
|
|
} else {
|
|
b.WriteByte('_')
|
|
}
|
|
}
|
|
path = fmt.Sprintf("%s.md", b.String())
|
|
}
|
|
|
|
var buf bytes.Buffer
|
|
// Title
|
|
buf.WriteString("## Slinky Test Report\n\n")
|
|
|
|
// Last run (not in the bullet list). Render in US Central Time.
|
|
dur := s.FinishedAt.Sub(s.StartedAt)
|
|
if dur < 0 {
|
|
dur = 0
|
|
}
|
|
loc, tzErr := time.LoadLocation("America/Chicago")
|
|
if tzErr != nil || loc == nil {
|
|
loc = time.FixedZone("CST", -6*60*60)
|
|
}
|
|
buf.WriteString(fmt.Sprintf("Last Run: %s (Duration: %s)\n\n", s.StartedAt.In(loc).Format("2006-01-02 15:04:05 MST"), dur.Truncate(time.Millisecond)))
|
|
|
|
// Summary list: Pass, Fail, Total
|
|
buf.WriteString(fmt.Sprintf("- **Pass**: %d\n", s.OK))
|
|
buf.WriteString(fmt.Sprintf("- **Fail**: %d\n", s.Fail))
|
|
buf.WriteString(fmt.Sprintf("- **Total**: %d\n", s.Processed))
|
|
if s.FilesScanned > 0 {
|
|
buf.WriteString(fmt.Sprintf("- **Files Scanned**: %d\n", s.FilesScanned))
|
|
}
|
|
|
|
// Optional root
|
|
if strings.TrimSpace(s.RootPath) != "." && strings.TrimSpace(s.RootPath) != "" && s.RootPath != string(filepath.Separator) {
|
|
buf.WriteString(fmt.Sprintf("- **Root**: %s\n", escapeMD(s.RootPath)))
|
|
}
|
|
|
|
// Rates only if non-zero
|
|
if !(s.AvgRPS == 0 && s.PeakRPS == 0 && s.LowRPS == 0) {
|
|
buf.WriteString(fmt.Sprintf("- **Rates**: avg %.1f/s • peak %.1f/s • low %.1f/s\n", s.AvgRPS, s.PeakRPS, s.LowRPS))
|
|
}
|
|
|
|
buf.WriteString("\n")
|
|
|
|
// If no failures, show message and finish
|
|
if len(results) == 0 {
|
|
buf.WriteString("No issues found. ✅\n")
|
|
f, err := os.Create(path)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
defer f.Close()
|
|
if _, err := f.Write(buf.Bytes()); err != nil {
|
|
return "", err
|
|
}
|
|
return path, nil
|
|
}
|
|
|
|
// Failures by URL
|
|
buf.WriteString("### Failures by URL\n\n")
|
|
|
|
// Gather issues per URL with list of files
|
|
type fileRef struct{ Path string }
|
|
type urlIssue struct {
|
|
Status int
|
|
Method string
|
|
ErrMsg string
|
|
Files []fileRef
|
|
}
|
|
byURL := make(map[string]*urlIssue)
|
|
for _, r := range results {
|
|
ui, ok := byURL[r.URL]
|
|
if !ok {
|
|
ui = &urlIssue{Status: r.Status, Method: r.Method, ErrMsg: r.ErrMsg}
|
|
byURL[r.URL] = ui
|
|
}
|
|
for _, src := range r.Sources {
|
|
ui.Files = append(ui.Files, fileRef{Path: src})
|
|
}
|
|
}
|
|
|
|
// Sort URLs
|
|
var urls []string
|
|
for u := range byURL {
|
|
urls = append(urls, u)
|
|
}
|
|
sort.Strings(urls)
|
|
|
|
for _, u := range urls {
|
|
ui := byURL[u]
|
|
if ui.Status > 0 {
|
|
buf.WriteString(fmt.Sprintf("- %d %s `%s` — %s\n", ui.Status, escapeMD(ui.Method), escapeMD(u), escapeMD(ui.ErrMsg)))
|
|
} else {
|
|
buf.WriteString(fmt.Sprintf("- %s `%s` — %s\n", escapeMD(ui.Method), escapeMD(u), escapeMD(ui.ErrMsg)))
|
|
}
|
|
seen := make(map[string]struct{})
|
|
var files []string
|
|
for _, fr := range ui.Files {
|
|
if _, ok := seen[fr.Path]; ok {
|
|
continue
|
|
}
|
|
seen[fr.Path] = struct{}{}
|
|
files = append(files, fr.Path)
|
|
}
|
|
sort.Strings(files)
|
|
for _, fn := range files {
|
|
if strings.TrimSpace(s.RepoBlobBaseURL) != "" {
|
|
buf.WriteString(fmt.Sprintf(" - [%s](%s/%s)\n", escapeMD(fn), strings.TrimRight(s.RepoBlobBaseURL, "/"), escapeLinkPath(fn)))
|
|
} else {
|
|
buf.WriteString(fmt.Sprintf(" - [%s](./%s)\n", escapeMD(fn), escapeLinkPath(fn)))
|
|
}
|
|
}
|
|
}
|
|
|
|
f, err := os.Create(path)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
defer f.Close()
|
|
if _, err := f.Write(buf.Bytes()); err != nil {
|
|
return "", err
|
|
}
|
|
return path, nil
|
|
}
|
|
|
|
func escapeMD(s string) string {
|
|
return html.EscapeString(s)
|
|
}
|
|
|
|
func escapeLinkPath(p string) string {
|
|
p = strings.ReplaceAll(p, " ", "%20")
|
|
p = strings.ReplaceAll(p, "(", "%28")
|
|
p = strings.ReplaceAll(p, ")", "%29")
|
|
return p
|
|
}
|