Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.1.4] - 2026-01-15

### Changed
- Optimized Dumpster Fire signal by implementing time-based scanning for improved efficiency


## [1.1.3] - 2026-01-03

### Changed
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ $ dashlights --details

### Security Checks

Dashlights performs **38 concurrent security checks** across five categories: Identity & Access Management, Operational Security, Repository Hygiene, System Health, and Infrastructure Security.
Dashlights performs **37 concurrent security checks** across five categories: Identity & Access Management, Operational Security, Repository Hygiene, System Health, and Infrastructure Security.

👉 **[View the complete list of security signals →](SIGNALS.md)**

Expand Down
1 change: 0 additions & 1 deletion SIGNALS.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,3 @@ Dashlights performs over 35 concurrent security checks, organized into six categ
## Data Sprawl

37. 🗑️ **[Dumpster Fire](docs/signals/dumpster_fire.md)** - Detects sensitive files (dumps, logs, keys) in hot zones (Downloads, Desktop, $PWD, /tmp) [[code](src/signals/dumpster_fire.go)]
38. 🦴 **[Rotting Secrets](docs/signals/rotting_secrets.md)** - Detects old (>7 days) sensitive files that may have been forgotten [[code](src/signals/rotting_secrets.go)]
144 changes: 0 additions & 144 deletions docs/signals/rotting_secrets.md

This file was deleted.

84 changes: 59 additions & 25 deletions src/signals/dumpster_fire.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,15 @@ import (
"fmt"
"os"
"strings"
"time"

"github.com/erichs/dashlights/src/signals/internal/filestat"
)

// dumpsterFireBudget is the total time budget for scanning all hot zones.
// With parallel scanning, this is wall-clock time, not cumulative.
const dumpsterFireBudget = 8 * time.Millisecond

// DumpsterFireSignal detects sensitive-looking files in user "hot zones"
// where data sprawl commonly accumulates: Downloads, Desktop, $PWD, and /tmp.
// This is a coarse-grained check using name-only pattern matching for performance.
Expand Down Expand Up @@ -48,7 +53,16 @@ func (s *DumpsterFireSignal) Remediation() string {
return "Review and remove/secure database dumps, logs, and key files from these locations"
}

// dirScanResult holds results from scanning a single directory.
type dirScanResult struct {
dir string
result filestat.ScanResult
err error
}

// Check scans hot-zone directories for sensitive-looking files.
// Directories are scanned in parallel with a global 8ms time budget.
// This is adaptive: fast systems scan more entries, slow systems scan fewer.
func (s *DumpsterFireSignal) Check(ctx context.Context) bool {
// Check if this signal is disabled via environment variable
if os.Getenv("DASHLIGHTS_DISABLE_DUMPSTER_FIRE") != "" {
Expand All @@ -59,41 +73,61 @@ func (s *DumpsterFireSignal) Check(ctx context.Context) bool {
s.dirCounts = make(map[string]int)
s.foundPaths = nil

// Create a time-budgeted context for all parallel scans
scanCtx, cancel := context.WithTimeout(ctx, dumpsterFireBudget)
defer cancel()

patterns := filestat.DefaultSensitivePatterns()
dirs := filestat.GetHotZoneDirectories()
config := filestat.DefaultScanConfig()

// Track unique files to avoid double-counting when $PWD overlaps with other dirs
seenPaths := make(map[string]bool)
// Use time-based config: no entry limits, just the context deadline
config := filestat.ScanConfig{
MaxMatches: 10, // Still cap matches per directory (we've proven the point)
MaxEntries: 0, // No entry limit - use time budget instead
Timeout: 0, // No per-dir timeout - use global budget via context
}

// Launch parallel scans for all directories
resultCh := make(chan dirScanResult, len(dirs))

for _, dir := range dirs {
// Check context cancellation
select {
case <-ctx.Done():
return false
default:
}
go func(d string) {
// Skip directories that don't exist
if _, err := os.Stat(d); os.IsNotExist(err) {
resultCh <- dirScanResult{d, filestat.ScanResult{}, err}
return
}

// Skip directories that don't exist
if _, err := os.Stat(dir); os.IsNotExist(err) {
continue
}
result, err := patterns.ScanDirectory(scanCtx, d, config)
resultCh <- dirScanResult{d, result, err}
}(dir)
}

result, err := patterns.ScanDirectory(ctx, dir, config)
if err != nil {
continue // Skip directories we can't read
}
// Track unique files to avoid double-counting when $PWD overlaps with other dirs
seenPaths := make(map[string]bool)

for _, match := range result.Matches {
// Deduplicate paths (in case $PWD is ~/Downloads, etc.)
if seenPaths[match.Path] {
continue
// Collect results from all goroutines (with context timeout)
for i := 0; i < len(dirs); i++ {
select {
case r := <-resultCh:
if r.err != nil {
continue // Skip directories we can't read
}
seenPaths[match.Path] = true

s.dirCounts[dir]++
s.totalCount++
s.foundPaths = append(s.foundPaths, match.Path)
for _, match := range r.result.Matches {
// Deduplicate paths (in case $PWD is ~/Downloads, etc.)
if seenPaths[match.Path] {
continue
}
seenPaths[match.Path] = true

s.dirCounts[r.dir]++
s.totalCount++
s.foundPaths = append(s.foundPaths, match.Path)
}
case <-scanCtx.Done():
// Time budget exhausted - return what we have so far
return s.totalCount > 0
}
}

Expand Down
24 changes: 17 additions & 7 deletions src/signals/internal/filestat/filestat.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,20 @@ import (
)

// Performance limits for sensitive file scanning.
// These cap worst-case behavior when scanning large directories.
// These provide defaults for callers that don't specify their own config.
// Callers can use time-based gating (via context deadline) instead of entry limits.
const (
// maxMatchesPerDir limits how many files to stat per directory.
// After this many matches, we've proven the directory has issues.
maxMatchesPerDir = 10

// maxEntriesPerDir limits directory entries to process before giving up.
// Handles pathological cases like /tmp with thousands of files.
maxEntriesPerDir = 500
// maxEntriesPerDir is the default entry limit for backwards compatibility.
// Set to 0 in ScanConfig to disable and use time-based gating instead.
maxEntriesPerDir = 0

// perDirTimeout is the maximum time budget for scanning a single directory.
// With 4 hot zones, allows ~8ms total leaving 2ms buffer for 10ms budget.
perDirTimeout = 2 * time.Millisecond
// perDirTimeout is the default per-directory timeout.
// Set to 0 in ScanConfig to use caller's context deadline instead.
perDirTimeout = 0
)

// ScanConfig contains configuration for directory scanning.
Expand Down Expand Up @@ -231,6 +232,15 @@ func (p *SensitiveFilePatterns) ScanDirectory(ctx context.Context, dirPath strin
continue // Skip files we can't stat
}

// Check context again after expensive syscall for responsive timeout
select {
case <-scanCtx.Done():
result.Truncated = true
result.Reason = "timeout"
return result, nil
default:
}

// Skip non-regular files (symlinks, devices, etc.)
if !info.Mode().IsRegular() {
continue
Expand Down
3 changes: 1 addition & 2 deletions src/signals/registry.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ func GetAllSignals() []Signal {
NewDangerousTFVarSignal(), // Env var check

// Data sprawl signals
NewDumpsterFireSignal(), // Directory scan for sensitive files
NewRottingSecretsSignal(), // Old sensitive files detection
NewDumpsterFireSignal(), // Directory scan for sensitive files
}
}
Loading
Loading