mirror of
https://github.com/LukeHagar/slinky.git
synced 2025-12-06 04:21:20 +00:00
Add watch mode functionality to Slinky for real-time link checking
This update introduces a watch mode that monitors file changes and automatically re-scans for broken links. The feature enhances the user experience during development by providing real-time updates and ensuring links remain valid as files are modified. Additionally, the README has been updated to include usage instructions and details about the new watch mode capabilities.
This commit is contained in:
39
README.md
39
README.md
@@ -36,6 +36,7 @@ jobs:
|
||||
- **fail-on-failures**: Fail job on any broken links. Default: `true`
|
||||
- **comment-pr**: Post Markdown as a PR comment when applicable. Default: `true`
|
||||
- **step-summary**: Append report to the job summary. Default: `true`
|
||||
- **watch**: Watch for file changes and automatically re-scan (CLI only). Default: `false`
|
||||
|
||||
### Output links in PRs
|
||||
|
||||
@@ -58,11 +59,49 @@ slinky check ./docs/**/* ./markdown/**/*
|
||||
|
||||
# TUI mode: same targets
|
||||
slinky run **/*
|
||||
|
||||
# Watch mode: automatically re-scan on file changes
|
||||
slinky run --watch **/*
|
||||
```
|
||||
|
||||
Notes:
|
||||
- Targets can be files, directories, or doublestar globs. Multiple targets are allowed.
|
||||
- If no targets are provided, the default is `**/*` relative to the current working directory.
|
||||
- Watch mode monitors file changes and automatically re-scans when files are modified.
|
||||
|
||||
### Watch Mode
|
||||
|
||||
Watch mode provides real-time link checking by monitoring file changes and automatically re-scanning when files are modified. This is particularly useful during development when you want to ensure links remain valid as you edit files.
|
||||
|
||||
**Features:**
|
||||
- **Automatic Re-scanning**: Detects file changes and triggers new scans automatically
|
||||
- **Sequential Processing**: Completes file scanning before starting URL checking for accurate counts
|
||||
- **Real-time Updates**: Shows live progress as files are scanned and URLs are checked
|
||||
- **Configuration Monitoring**: Watches `.slinkignore` files and re-scans when configuration changes
|
||||
- **Clean State Management**: Each re-scan starts with a fresh state and accurate file counts
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Watch all files in current directory
|
||||
slinky run --watch
|
||||
|
||||
# Watch specific directories or files
|
||||
slinky run --watch docs/ README.md
|
||||
|
||||
# Watch with glob patterns
|
||||
slinky run --watch "**/*.md" "**/*.yaml"
|
||||
```
|
||||
|
||||
**Controls:**
|
||||
- `q` or `Ctrl+C`: Quit watch mode
|
||||
- `f`: Toggle display of failed links only
|
||||
|
||||
**How it works:**
|
||||
1. **Initial Scan**: Performs a complete scan of all target files
|
||||
2. **File Monitoring**: Watches for changes to files matching the target patterns
|
||||
3. **Configuration Monitoring**: Also watches `.slinkignore` files for configuration changes
|
||||
4. **Automatic Re-scan**: When changes are detected, cancels the current scan and starts a fresh one
|
||||
5. **Clean Restart**: Each re-scan resets counters and provides accurate file counts
|
||||
|
||||
### Notes
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ func init() {
|
||||
}
|
||||
}
|
||||
|
||||
return tui.Run(root, gl, cfg, jsonOut, mdOut)
|
||||
return tui.Run(root, gl, cfg, jsonOut, mdOut, watchMode)
|
||||
},
|
||||
}
|
||||
|
||||
@@ -53,6 +53,7 @@ func init() {
|
||||
runCmd.Flags().StringVar(&jsonOut, "json-out", "", "path to write full JSON results (array)")
|
||||
runCmd.Flags().StringVar(&mdOut, "md-out", "", "path to write Markdown report for PR comment")
|
||||
runCmd.Flags().StringVar(&repoBlobBase, "repo-blob-base", "", "override GitHub blob base URL (e.g. https://github.com/owner/repo/blob/<sha>)")
|
||||
runCmd.Flags().BoolVar(&watchMode, "watch", false, "watch for file changes and automatically re-scan")
|
||||
rootCmd.AddCommand(runCmd)
|
||||
}
|
||||
|
||||
@@ -60,4 +61,5 @@ var (
|
||||
maxConcurrency int
|
||||
jsonOut string
|
||||
mdOut string
|
||||
watchMode bool
|
||||
)
|
||||
|
||||
1
go.mod
1
go.mod
@@ -21,6 +21,7 @@ require (
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
||||
2
go.sum
2
go.sum
@@ -23,6 +23,8 @@ github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
|
||||
@@ -377,7 +377,13 @@ func CollectURLsProgressWithIgnoreConfig(rootPath string, globs []string, respec
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
rel, rerr := filepath.Rel(cleanRoot, path)
|
||||
// Compute relative path from current working directory, not from cleanRoot
|
||||
// This ensures file paths in the report are relative to where the command was run
|
||||
wd, wderr := os.Getwd()
|
||||
if wderr != nil {
|
||||
wd = "."
|
||||
}
|
||||
rel, rerr := filepath.Rel(wd, path)
|
||||
if rerr != nil {
|
||||
rel = path
|
||||
}
|
||||
@@ -934,7 +940,13 @@ func CollectURLsV2(rootPath string, globs []string, respectGitignore bool, ignor
|
||||
return nil
|
||||
}
|
||||
|
||||
rel, rerr := filepath.Rel(cleanRoot, path)
|
||||
// Compute relative path from current working directory, not from cleanRoot
|
||||
// This ensures file paths in the report are relative to where the command was run
|
||||
wd, wderr := os.Getwd()
|
||||
if wderr != nil {
|
||||
wd = "."
|
||||
}
|
||||
rel, rerr := filepath.Rel(wd, path)
|
||||
if rerr != nil {
|
||||
rel = path
|
||||
}
|
||||
|
||||
@@ -162,7 +162,10 @@ func WriteMarkdown(path string, results []web.Result, s Summary) (string, error)
|
||||
if strings.TrimSpace(s.RepoBlobBaseURL) != "" {
|
||||
buf.WriteString(fmt.Sprintf(" - [%s](%s/%s)\n", escapeMD(display), strings.TrimRight(s.RepoBlobBaseURL, "/"), linkPath))
|
||||
} else {
|
||||
buf.WriteString(fmt.Sprintf(" - [%s](./%s)\n", escapeMD(display), linkPath))
|
||||
// For local file links, the file paths in Sources are already relative to the working directory
|
||||
// They are computed by the merge function in check.go which combines the target directory with the relative file path
|
||||
// So we can use the linkPath directly
|
||||
buf.WriteString(fmt.Sprintf(" - [%s](%s)\n", escapeMD(display), linkPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,11 +10,13 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/charmbracelet/bubbles/progress"
|
||||
"github.com/charmbracelet/bubbles/spinner"
|
||||
"github.com/charmbracelet/bubbles/viewport"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/fsnotify/fsnotify"
|
||||
|
||||
"slinky/internal/report"
|
||||
"slinky/internal/web"
|
||||
@@ -26,13 +28,16 @@ type statsMsg struct{ s web.Stats }
|
||||
type tickMsg struct{ t time.Time }
|
||||
|
||||
type fileScannedMsg struct{ rel string }
|
||||
type fileChangedMsg struct{ path string }
|
||||
type watchErrorMsg struct{ err error }
|
||||
|
||||
type model struct {
|
||||
rootPath string
|
||||
cfg web.Config
|
||||
jsonOut string
|
||||
mdOut string
|
||||
globs []string
|
||||
rootPath string
|
||||
cfg web.Config
|
||||
jsonOut string
|
||||
mdOut string
|
||||
globs []string
|
||||
watchMode bool
|
||||
|
||||
results chan web.Result
|
||||
stats chan web.Stats
|
||||
@@ -64,11 +69,21 @@ type model struct {
|
||||
mdPath string
|
||||
|
||||
showFail bool
|
||||
|
||||
// Context for canceling scans
|
||||
scanCtx context.Context
|
||||
scanCancel context.CancelFunc
|
||||
|
||||
// Flag to prevent file counting after scan is done
|
||||
scanDone bool
|
||||
|
||||
// Channel to signal when scan is completely done
|
||||
scanComplete chan struct{}
|
||||
}
|
||||
|
||||
// Run scans files under rootPath matching globs, extracts URLs, and checks them.
|
||||
func Run(rootPath string, globs []string, cfg web.Config, jsonOut string, mdOut string) error {
|
||||
m := &model{rootPath: rootPath, cfg: cfg, jsonOut: jsonOut, mdOut: mdOut, globs: globs}
|
||||
func Run(rootPath string, globs []string, cfg web.Config, jsonOut string, mdOut string, watchMode bool) error {
|
||||
m := &model{rootPath: rootPath, cfg: cfg, jsonOut: jsonOut, mdOut: mdOut, globs: globs, watchMode: watchMode}
|
||||
p := tea.NewProgram(m, tea.WithAltScreen())
|
||||
return p.Start()
|
||||
}
|
||||
@@ -80,24 +95,18 @@ func (m *model) Init() tea.Cmd {
|
||||
m.prog = progress.New(progress.WithDefaultGradient())
|
||||
m.started = time.Now()
|
||||
m.lowRPS = -1
|
||||
m.scanDone = false
|
||||
m.scanComplete = make(chan struct{})
|
||||
m.results = make(chan web.Result, 256)
|
||||
m.stats = make(chan web.Stats, 64)
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
go func() {
|
||||
defer cancel()
|
||||
urlsMap, _ := fsCollectProgress(m.rootPath, m.globs, func(rel string) {
|
||||
m.filesScanned++
|
||||
// Emit a short event line per file to show activity
|
||||
m.lines = append(m.lines, fmt.Sprintf("📄 %s", rel))
|
||||
m.refreshViewport()
|
||||
})
|
||||
var urls []string
|
||||
for u := range urlsMap {
|
||||
urls = append(urls, u)
|
||||
}
|
||||
web.CheckURLs(ctx, urls, urlsMap, m.results, m.stats, m.cfg)
|
||||
}()
|
||||
// Start initial scan
|
||||
m.startScan()
|
||||
|
||||
// Start file watcher if in watch mode
|
||||
if m.watchMode {
|
||||
return tea.Batch(m.spin.Tick, m.waitForEvent(), tickCmd(), m.startWatcher())
|
||||
}
|
||||
|
||||
return tea.Batch(m.spin.Tick, m.waitForEvent(), tickCmd())
|
||||
}
|
||||
@@ -106,6 +115,168 @@ func tickCmd() tea.Cmd {
|
||||
return tea.Tick(time.Second, func(t time.Time) tea.Msg { return tickMsg{t: t} })
|
||||
}
|
||||
|
||||
func (m *model) startScan() {
|
||||
// Cancel previous scan if it exists
|
||||
if m.scanCancel != nil {
|
||||
m.scanCancel()
|
||||
}
|
||||
|
||||
// Create new context for this scan
|
||||
m.scanCtx, m.scanCancel = context.WithCancel(context.Background())
|
||||
m.scanComplete = make(chan struct{})
|
||||
|
||||
go func() {
|
||||
defer func() {
|
||||
m.scanCancel()
|
||||
// Signal that scan is completely done
|
||||
if m.scanComplete != nil {
|
||||
close(m.scanComplete)
|
||||
}
|
||||
}()
|
||||
|
||||
// Phase 1: Complete file scanning first
|
||||
select {
|
||||
case <-m.scanCtx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
m.lines = append(m.lines, "🔍 Scanning files...")
|
||||
m.refreshViewport()
|
||||
|
||||
urlsMap, _ := fsCollectProgress(m.rootPath, m.globs, func(rel string) {
|
||||
// Check context before processing each file
|
||||
select {
|
||||
case <-m.scanCtx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
m.filesScanned++
|
||||
// Emit a short event line per file to show activity
|
||||
m.lines = append(m.lines, fmt.Sprintf("📄 %s", rel))
|
||||
m.refreshViewport()
|
||||
})
|
||||
|
||||
// File scanning is complete - set the flag
|
||||
m.scanDone = true
|
||||
m.lines = append(m.lines, fmt.Sprintf("✅ File scanning complete: %d files scanned", m.filesScanned))
|
||||
m.refreshViewport()
|
||||
|
||||
// Check context before starting URL checking
|
||||
select {
|
||||
case <-m.scanCtx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
// Phase 2: Now start URL checking
|
||||
m.lines = append(m.lines, "🌐 Checking URLs...")
|
||||
m.refreshViewport()
|
||||
|
||||
var urls []string
|
||||
for u := range urlsMap {
|
||||
urls = append(urls, u)
|
||||
}
|
||||
web.CheckURLs(m.scanCtx, urls, urlsMap, m.results, m.stats, m.cfg)
|
||||
}()
|
||||
}
|
||||
|
||||
func (m *model) findSlinkyConfig(root string) string {
|
||||
cur := root
|
||||
for {
|
||||
cfg := filepath.Join(cur, ".slinkignore")
|
||||
if st, err := os.Stat(cfg); err == nil && !st.IsDir() {
|
||||
return cfg
|
||||
}
|
||||
parent := filepath.Dir(cur)
|
||||
if parent == cur || strings.TrimSpace(parent) == "" {
|
||||
break
|
||||
}
|
||||
cur = parent
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *model) startWatcher() tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
watcher, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
return watchErrorMsg{err: err}
|
||||
}
|
||||
defer watcher.Close()
|
||||
|
||||
// Add the root path and all subdirectories to the watcher
|
||||
err = filepath.Walk(m.rootPath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
return watcher.Add(path)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return watchErrorMsg{err: err}
|
||||
}
|
||||
|
||||
// Also watch for .slinkignore files by searching upward from root
|
||||
slinkignorePath := m.findSlinkyConfig(m.rootPath)
|
||||
if slinkignorePath != "" {
|
||||
// Watch the directory containing the .slinkignore file
|
||||
slinkignoreDir := filepath.Dir(slinkignorePath)
|
||||
if err := watcher.Add(slinkignoreDir); err != nil {
|
||||
// If we can't watch the .slinkignore directory, continue without it
|
||||
// This is not a critical error
|
||||
}
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case event, ok := <-watcher.Events:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// Only watch for write events (file modifications)
|
||||
if event.Op&fsnotify.Write == fsnotify.Write {
|
||||
// Check if it's a .slinkignore file change
|
||||
if filepath.Base(event.Name) == ".slinkignore" {
|
||||
return fileChangedMsg{path: event.Name}
|
||||
}
|
||||
|
||||
// Check if the file matches our glob patterns
|
||||
rel, err := filepath.Rel(m.rootPath, event.Name)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
rel = filepath.ToSlash(rel)
|
||||
|
||||
// Check if the file matches any of our glob patterns
|
||||
matches := false
|
||||
if len(m.globs) == 0 {
|
||||
matches = true
|
||||
} else {
|
||||
for _, pattern := range m.globs {
|
||||
if matched, _ := doublestar.PathMatch(pattern, rel); matched {
|
||||
matches = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches {
|
||||
return fileChangedMsg{path: event.Name}
|
||||
}
|
||||
}
|
||||
case err, ok := <-watcher.Errors:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return watchErrorMsg{err: err}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *model) waitForEvent() tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
if m.results == nil {
|
||||
@@ -128,11 +299,60 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
case tea.KeyMsg:
|
||||
switch msg.String() {
|
||||
case "q", "ctrl+c":
|
||||
// Clean up resources before quitting
|
||||
if m.scanCancel != nil {
|
||||
m.scanCancel()
|
||||
}
|
||||
return m, tea.Quit
|
||||
case "f":
|
||||
m.showFail = !m.showFail
|
||||
m.refreshViewport()
|
||||
return m, nil
|
||||
case "r":
|
||||
// Manual rescan - only available when scan is done
|
||||
if m.done {
|
||||
m.lines = append(m.lines, "🔄 Manual rescan triggered")
|
||||
m.refreshViewport()
|
||||
|
||||
// Cancel previous scan and wait for cleanup
|
||||
if m.scanCancel != nil {
|
||||
m.scanCancel()
|
||||
// Wait for the previous scan to completely finish
|
||||
if m.scanComplete != nil {
|
||||
select {
|
||||
case <-m.scanComplete:
|
||||
// Previous scan is done
|
||||
case <-time.After(2 * time.Second):
|
||||
// Timeout after 2 seconds
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset counters and start new scan
|
||||
m.total = 0
|
||||
m.ok = 0
|
||||
m.fail = 0
|
||||
m.processed = 0
|
||||
m.lastProcessed = 0
|
||||
m.filesScanned = 0
|
||||
m.allResults = nil
|
||||
m.started = time.Now()
|
||||
m.finishedAt = time.Time{}
|
||||
m.done = false
|
||||
m.scanDone = false
|
||||
m.results = make(chan web.Result, 256)
|
||||
m.stats = make(chan web.Stats, 64)
|
||||
|
||||
// Clear the lines to start fresh (but keep the rescan notification)
|
||||
rescanLine := m.lines[len(m.lines)-1] // Keep the last line (the rescan notification)
|
||||
m.lines = []string{rescanLine}
|
||||
// Reset viewport to prevent slice bounds error
|
||||
m.vp.SetContent("")
|
||||
m.vp.GotoTop()
|
||||
m.startScan()
|
||||
return m, m.waitForEvent()
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
case tea.WindowSizeMsg:
|
||||
// Reserve space for header (1), stats (1), progress (1), spacer (1), footer (1)
|
||||
@@ -188,7 +408,62 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
m.results = nil
|
||||
m.writeJSON()
|
||||
m.writeMarkdown()
|
||||
if m.watchMode {
|
||||
// In watch mode, don't quit, just wait for more changes
|
||||
return m, m.startWatcher()
|
||||
}
|
||||
return m, tea.Quit
|
||||
case fileChangedMsg:
|
||||
// File changed, restart the scan
|
||||
fileName := filepath.Base(msg.path)
|
||||
if fileName == ".slinkignore" {
|
||||
m.lines = append(m.lines, fmt.Sprintf("⚙️ .slinkignore changed: %s", msg.path))
|
||||
} else {
|
||||
m.lines = append(m.lines, fmt.Sprintf("🔄 File changed: %s", msg.path))
|
||||
}
|
||||
m.refreshViewport()
|
||||
|
||||
// Cancel previous scan and wait for cleanup
|
||||
if m.scanCancel != nil {
|
||||
m.scanCancel()
|
||||
// Wait for the previous scan to completely finish
|
||||
if m.scanComplete != nil {
|
||||
select {
|
||||
case <-m.scanComplete:
|
||||
// Previous scan is done
|
||||
case <-time.After(2 * time.Second):
|
||||
// Timeout after 2 seconds
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset counters and start new scan
|
||||
m.total = 0
|
||||
m.ok = 0
|
||||
m.fail = 0
|
||||
m.processed = 0
|
||||
m.lastProcessed = 0
|
||||
m.filesScanned = 0
|
||||
m.allResults = nil
|
||||
m.started = time.Now()
|
||||
m.finishedAt = time.Time{}
|
||||
m.done = false
|
||||
m.scanDone = false
|
||||
m.results = make(chan web.Result, 256)
|
||||
m.stats = make(chan web.Stats, 64)
|
||||
|
||||
// Clear the lines to start fresh (but keep the change notification)
|
||||
changeLine := m.lines[len(m.lines)-1] // Keep the last line (the change notification)
|
||||
m.lines = []string{changeLine}
|
||||
// Reset viewport to prevent slice bounds error
|
||||
m.vp.SetContent("")
|
||||
m.vp.GotoTop()
|
||||
m.startScan()
|
||||
return m, m.waitForEvent()
|
||||
case watchErrorMsg:
|
||||
m.lines = append(m.lines, fmt.Sprintf("❌ Watch error: %v", msg.err))
|
||||
m.refreshViewport()
|
||||
return m, nil
|
||||
}
|
||||
|
||||
var cmd tea.Cmd
|
||||
@@ -274,7 +549,11 @@ func (m *model) writeMarkdown() {
|
||||
}
|
||||
|
||||
func (m *model) View() string {
|
||||
header := lipgloss.NewStyle().Bold(true).Render(fmt.Sprintf(" Scanning %s ", m.rootPath))
|
||||
headerText := fmt.Sprintf(" Scanning %s ", m.rootPath)
|
||||
if m.watchMode {
|
||||
headerText = fmt.Sprintf(" Scanning %s (WATCH MODE) ", m.rootPath)
|
||||
}
|
||||
header := lipgloss.NewStyle().Bold(true).Render(headerText)
|
||||
if m.done {
|
||||
dur := time.Since(m.started)
|
||||
if !m.finishedAt.IsZero() {
|
||||
@@ -296,7 +575,8 @@ func (m *model) View() string {
|
||||
if m.mdPath != "" {
|
||||
summary = append(summary, fmt.Sprintf("Markdown: %s", m.mdPath))
|
||||
}
|
||||
footer := lipgloss.NewStyle().Faint(true).Render("Controls: [q] quit [f] toggle fails")
|
||||
footerText := "Controls: [q] quit [f] toggle fails [r] rescan"
|
||||
footer := lipgloss.NewStyle().Faint(true).Render(footerText)
|
||||
container := lipgloss.NewStyle().Padding(1)
|
||||
return container.Render(strings.Join(append([]string{header}, append(summary, footer)...), "\n"))
|
||||
}
|
||||
@@ -308,7 +588,8 @@ func (m *model) View() string {
|
||||
progressLine := m.prog.ViewAs(percent)
|
||||
stats := fmt.Sprintf("%s total:%d ok:%d fail:%d pending:%d processed:%d rps:%.1f/s files:%d", m.spin.View(), m.total, m.ok, m.fail, m.pending, m.processed, m.rps, m.filesScanned)
|
||||
body := m.vp.View()
|
||||
footer := lipgloss.NewStyle().Faint(true).Render("Controls: [q] quit [f] toggle fails")
|
||||
footerText := "Controls: [q] quit [f] toggle fails"
|
||||
footer := lipgloss.NewStyle().Faint(true).Render(footerText)
|
||||
container := lipgloss.NewStyle().Padding(1)
|
||||
return container.Render(strings.Join([]string{header, stats, progressLine, "", body, footer}, "\n"))
|
||||
}
|
||||
|
||||
@@ -68,11 +68,25 @@ func CheckURLs(ctx context.Context, urls []string, sources map[string][]string,
|
||||
ok = true
|
||||
err = nil
|
||||
}
|
||||
// Check context before sending result
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
var srcs []string
|
||||
if sources != nil {
|
||||
srcs = sources[j.url]
|
||||
}
|
||||
out <- Result{URL: j.url, OK: ok, Status: status, Err: err, ErrMsg: errString(err), Method: http.MethodGet, Sources: cloneAndSort(srcs)}
|
||||
|
||||
// Send result with context check
|
||||
select {
|
||||
case out <- Result{URL: j.url, OK: ok, Status: status, Err: err, ErrMsg: errString(err), Method: http.MethodGet, Sources: cloneAndSort(srcs)}:
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
|
||||
processed++
|
||||
pending--
|
||||
if stats != nil {
|
||||
|
||||
Reference in New Issue
Block a user