use std::path::Path;
use crate::rules::Issue;
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
pub struct BaselineEntry {
pub rule: String,
pub file: String,
pub line: u32,
pub column: u32,
}
pub fn load_baseline(path: &Path) -> Vec<BaselineEntry> {
let text = match std::fs::read_to_string(path) {
Ok(t) => t,
Err(e) => {
eprintln!("⚠ --baseline: could not read '{}': {e}", path.display());
return vec![];
}
};
match serde_json::from_str::<Vec<BaselineEntry>>(&text) {
Ok(entries) => {
eprintln!(
" 📋 Baseline loaded: {} known issue(s) suppressed",
entries.len()
);
entries
}
Err(e) => {
eprintln!(
"⚠ --baseline: could not parse '{}' as JSON: {e}. \
Ignoring baseline.",
path.display()
);
vec![]
}
}
}
fn paths_match(issue_path: &Path, entry_path: &str) -> bool {
use std::path::Component;
let normalise = |p: &Path| -> Vec<String> {
p.components()
.filter(|c| !matches!(c, Component::Prefix(_) | Component::RootDir))
.map(|c| c.as_os_str().to_string_lossy().into_owned())
.collect()
};
let a = normalise(issue_path);
let b = normalise(Path::new(entry_path));
let (longer, shorter) = if a.len() >= b.len() {
(&a, &b)
} else {
(&b, &a)
};
let offset = longer.len() - shorter.len();
longer[offset..] == shorter[..]
}
fn entry_matches(issue: &Issue, entry: &BaselineEntry) -> bool {
issue.rule == entry.rule
&& issue.line == entry.line
&& issue.column == entry.column
&& paths_match(&issue.file, &entry.file)
}
pub fn filter_baseline(issues: Vec<Issue>, entries: &[BaselineEntry]) -> Vec<Issue> {
if entries.is_empty() {
return issues;
}
let before = issues.len();
let new_issues: Vec<Issue> = issues
.into_iter()
.filter(|i| !entries.iter().any(|e| entry_matches(i, e)))
.collect();
let suppressed = before - new_issues.len();
if suppressed > 0 {
eprintln!(" 🔕 Baseline suppressed {suppressed} known issue(s).");
}
new_issues
}
#[cfg(test)]
mod tests {
use super::*;
use crate::rules::{IssueCategory, IssueSource, Severity};
use std::path::PathBuf;
fn make_issue(rule: &str, file: &str, line: u32, col: u32) -> Issue {
Issue {
rule: rule.to_string(),
message: "test".to_string(),
file: PathBuf::from(file),
line,
column: col,
severity: Severity::Medium,
source: IssueSource::ReactPerfAnalyzer,
category: IssueCategory::Performance,
}
}
fn make_entry(rule: &str, file: &str, line: u32, col: u32) -> BaselineEntry {
BaselineEntry {
rule: rule.to_string(),
file: file.to_string(),
line,
column: col,
}
}
#[test]
fn known_issue_is_suppressed() {
let issues = vec![make_issue("no_inline_jsx_fn", "src/App.tsx", 10, 5)];
let baseline = vec![make_entry("no_inline_jsx_fn", "src/App.tsx", 10, 5)];
let result = filter_baseline(issues, &baseline);
assert!(result.is_empty(), "known issue should be suppressed");
}
#[test]
fn new_issue_survives() {
let issues = vec![
make_issue("no_inline_jsx_fn", "src/App.tsx", 10, 5),
make_issue("no_inline_jsx_fn", "src/App.tsx", 99, 1), ];
let baseline = vec![make_entry("no_inline_jsx_fn", "src/App.tsx", 10, 5)];
let result = filter_baseline(issues, &baseline);
assert_eq!(result.len(), 1);
assert_eq!(result[0].line, 99);
}
#[test]
fn absolute_vs_relative_path_match() {
let issues = vec![make_issue(
"large_component",
"/home/user/project/src/Dashboard.tsx",
1,
1,
)];
let baseline = vec![make_entry("large_component", "src/Dashboard.tsx", 1, 1)];
let result = filter_baseline(issues, &baseline);
assert!(
result.is_empty(),
"absolute and relative paths should match via suffix"
);
}
#[test]
fn empty_baseline_returns_all() {
let issues = vec![make_issue("some_rule", "x.tsx", 1, 1)];
let result = filter_baseline(issues, &[]);
assert_eq!(result.len(), 1);
}
}