use crate::analyzer::{Classification, FunctionAnalysis};
use crate::report::AnalysisResult;
#[derive(Debug, Clone)]
pub struct FindingEntry {
pub file: String,
pub line: usize,
pub category: &'static str,
pub detail: String,
pub function_name: String,
}
impl FindingEntry {
fn new(
file: &str,
line: usize,
category: &'static str,
detail: String,
context: String,
) -> Self {
Self {
file: file.to_string(),
line,
category,
detail,
function_name: context,
}
}
}
pub fn collect_all_findings(analysis: &AnalysisResult) -> Vec<FindingEntry> {
let mut entries = Vec::new();
collect_function_findings(&analysis.results, &mut entries);
collect_dry_findings(analysis, &mut entries);
collect_srp_findings(analysis, &mut entries);
collect_coupling_findings(analysis, &mut entries);
collect_tq_findings(analysis, &mut entries);
collect_structural_findings(analysis, &mut entries);
entries.sort_by(|a, b| a.file.cmp(&b.file).then(a.line.cmp(&b.line)));
entries
}
pub fn print_findings(entries: &[FindingEntry]) {
if entries.is_empty() {
return;
}
let n = entries.len();
let heading = format!("═══ {} Finding{} ═══", n, if n == 1 { "" } else { "s" });
println!("\n{}", colored::Colorize::bold(heading.as_str()));
entries.iter().for_each(|e| {
let detail = if e.function_name.is_empty() {
e.detail.clone()
} else if e.detail.is_empty() {
format!("in {}", e.function_name)
} else {
format!("{} in {}", e.detail, e.function_name)
};
if e.file.is_empty() {
println!(" {} {}", e.category, detail);
} else {
println!(" {}:{} {} {}", e.file, e.line, e.category, detail);
}
});
}
fn collect_function_findings(results: &[FunctionAnalysis], entries: &mut Vec<FindingEntry>) {
results.iter().filter(|f| !f.suppressed).for_each(|f| {
let e = |cat, detail: String| {
FindingEntry::new(&f.file, f.line, cat, detail, f.qualified_name.clone())
};
if matches!(f.classification, Classification::Violation { .. }) {
entries.push(e("VIOLATION", "logic + calls".to_string()));
}
if f.cognitive_warning {
let cx = f.complexity.as_ref().map_or(0, |m| m.cognitive_complexity);
entries.push(e("COGNITIVE", format!("complexity {cx}")));
}
if f.cyclomatic_warning {
let cx = f.complexity.as_ref().map_or(0, |m| m.cyclomatic_complexity);
entries.push(e("CYCLOMATIC", format!("complexity {cx}")));
}
if let Some(ref m) = f.complexity {
m.magic_numbers.iter().for_each(|mn| {
entries.push(FindingEntry::new(
&f.file,
mn.line,
"MAGIC_NUMBER",
mn.value.clone(),
f.qualified_name.clone(),
));
});
}
if f.nesting_depth_warning {
let depth = f.complexity.as_ref().map_or(0, |m| m.max_nesting);
entries.push(e("NESTING", format!("depth {depth}")));
}
if f.function_length_warning {
let lines = f.complexity.as_ref().map_or(0, |m| m.function_lines);
entries.push(e("LONG_FN", format!("{lines} lines")));
}
if f.unsafe_warning {
let blocks = f.complexity.as_ref().map_or(0, |m| m.unsafe_blocks);
entries.push(e("UNSAFE", format!("{blocks} blocks")));
}
if f.error_handling_warning {
entries.push(e("ERROR_HANDLING", "unwrap/panic/todo".to_string()));
}
});
}
fn collect_dry_findings(analysis: &AnalysisResult, entries: &mut Vec<FindingEntry>) {
analysis
.duplicates
.iter()
.filter(|g| !g.suppressed)
.for_each(|group| {
let kind = match &group.kind {
crate::dry::DuplicateKind::Exact => "exact".to_string(),
crate::dry::DuplicateKind::NearDuplicate { similarity } => {
format!("{:.0}% similar", similarity * 100.0)
}
};
group.entries.iter().for_each(|e| {
entries.push(FindingEntry::new(
&e.file,
e.line,
"DUPLICATE",
kind.clone(),
e.qualified_name.clone(),
));
});
});
analysis.dead_code.iter().for_each(|w| {
let detail = format!("{:?}", w.kind).to_lowercase();
entries.push(FindingEntry::new(
&w.file,
w.line,
"DEAD_CODE",
detail,
w.qualified_name.clone(),
));
});
analysis
.fragments
.iter()
.filter(|g| !g.suppressed)
.for_each(|group| {
group.entries.iter().for_each(|e| {
let detail = format!("{} stmts", group.statement_count);
entries.push(FindingEntry::new(
&e.file,
e.start_line,
"FRAGMENT",
detail,
e.function_name.clone(),
));
});
});
analysis
.boilerplate
.iter()
.filter(|b| !b.suppressed)
.for_each(|b| {
let name = b.struct_name.clone().unwrap_or_default();
entries.push(FindingEntry::new(
&b.file,
b.line,
"BOILERPLATE",
b.pattern_id.clone(),
name,
));
});
analysis
.wildcard_warnings
.iter()
.filter(|w| !w.suppressed)
.for_each(|w| {
entries.push(FindingEntry::new(
&w.file,
w.line,
"WILDCARD",
w.module_path.clone(),
String::new(),
));
});
analysis
.repeated_matches
.iter()
.filter(|g| !g.suppressed)
.for_each(|group| {
group.entries.iter().for_each(|e| {
entries.push(FindingEntry::new(
&e.file,
e.line,
"REPEATED_MATCH",
group.enum_name.clone(),
e.function_name.clone(),
));
});
});
}
fn collect_srp_findings(analysis: &AnalysisResult, entries: &mut Vec<FindingEntry>) {
let Some(srp) = &analysis.srp else { return };
srp.struct_warnings
.iter()
.filter(|w| !w.suppressed)
.for_each(|w| {
entries.push(FindingEntry::new(
&w.file,
w.line,
"SRP_STRUCT",
format!("LCOM4={}", w.lcom4),
w.struct_name.clone(),
));
});
srp.module_warnings
.iter()
.filter(|w| !w.suppressed)
.for_each(|w| {
entries.push(FindingEntry::new(
&w.file,
1,
"SRP_MODULE",
format!("{} lines", w.production_lines),
w.module.clone(),
));
});
srp.param_warnings
.iter()
.filter(|w| !w.suppressed)
.for_each(|w| {
entries.push(FindingEntry::new(
&w.file,
w.line,
"SRP_PARAMS",
format!("{} params", w.parameter_count),
w.function_name.clone(),
));
});
}
fn collect_coupling_findings(analysis: &AnalysisResult, entries: &mut Vec<FindingEntry>) {
let Some(ca) = &analysis.coupling else { return };
ca.metrics.iter().filter(|m| m.warning).for_each(|m| {
let detail = format!("I={:.2} Ca={} Ce={}", m.instability, m.afferent, m.efferent);
entries.push(FindingEntry::new(
"",
0,
"COUPLING",
detail,
m.module_name.clone(),
));
});
ca.cycles.iter().for_each(|c| {
let detail = c.modules.join(" > ");
entries.push(FindingEntry::new("", 0, "CYCLE", detail, String::new()));
});
ca.sdp_violations
.iter()
.filter(|v| !v.suppressed)
.for_each(|v| {
let detail = format!("{} -> {}", v.from_module, v.to_module);
entries.push(FindingEntry::new(
"",
0,
"SDP",
detail,
v.from_module.clone(),
));
});
}
fn collect_tq_findings(analysis: &AnalysisResult, entries: &mut Vec<FindingEntry>) {
let Some(tq) = &analysis.tq else { return };
tq.warnings.iter().filter(|w| !w.suppressed).for_each(|w| {
let cat = match &w.kind {
crate::tq::TqWarningKind::NoAssertion => "TQ_NO_ASSERT",
crate::tq::TqWarningKind::NoSut => "TQ_NO_SUT",
crate::tq::TqWarningKind::Untested => "TQ_UNTESTED",
crate::tq::TqWarningKind::Uncovered => "TQ_UNCOVERED",
crate::tq::TqWarningKind::UntestedLogic { .. } => "TQ_UNTESTED_LOGIC",
};
entries.push(FindingEntry::new(
&w.file,
w.line,
cat,
String::new(),
w.function_name.clone(),
));
});
}
fn collect_structural_findings(analysis: &AnalysisResult, entries: &mut Vec<FindingEntry>) {
let Some(st) = &analysis.structural else {
return;
};
st.warnings.iter().filter(|w| !w.suppressed).for_each(|w| {
entries.push(FindingEntry::new(
&w.file,
w.line,
"STRUCTURAL",
w.kind.code().to_string(),
w.name.clone(),
));
});
}
#[cfg(test)]
mod tests {
use super::*;
use crate::analyzer::{
Classification, ComplexityMetrics, FunctionAnalysis, MagicNumberOccurrence,
};
use crate::report::{AnalysisResult, Summary};
fn make_fa(name: &str, file: &str, line: usize) -> FunctionAnalysis {
FunctionAnalysis {
name: name.to_string(),
file: file.to_string(),
line,
classification: Classification::Operation,
parent_type: None,
suppressed: false,
complexity: None,
qualified_name: name.to_string(),
severity: None,
cognitive_warning: false,
cyclomatic_warning: false,
nesting_depth_warning: false,
function_length_warning: false,
unsafe_warning: false,
error_handling_warning: false,
complexity_suppressed: false,
own_calls: vec![],
parameter_count: 0,
is_trait_impl: false,
is_test: false,
effort_score: None,
}
}
fn empty_analysis() -> AnalysisResult {
AnalysisResult {
results: vec![],
summary: Summary::default(),
coupling: None,
duplicates: vec![],
dead_code: vec![],
fragments: vec![],
boilerplate: vec![],
wildcard_warnings: vec![],
repeated_matches: vec![],
srp: None,
tq: None,
structural: None,
}
}
#[test]
fn test_collect_empty_analysis() {
let analysis = empty_analysis();
let findings = collect_all_findings(&analysis);
assert!(findings.is_empty());
}
#[test]
fn test_collect_magic_numbers() {
let mut analysis = empty_analysis();
let mut fa = make_fa("test_fn", "src/lib.rs", 10);
fa.complexity = Some(ComplexityMetrics {
magic_numbers: vec![
MagicNumberOccurrence {
line: 12,
value: "42".to_string(),
},
MagicNumberOccurrence {
line: 15,
value: "99".to_string(),
},
],
..Default::default()
});
analysis.results = vec![fa];
let findings = collect_all_findings(&analysis);
assert_eq!(findings.len(), 2);
assert_eq!(findings[0].category, "MAGIC_NUMBER");
assert_eq!(findings[0].detail, "42");
assert_eq!(findings[1].detail, "99");
}
#[test]
fn test_collect_violation() {
let mut analysis = empty_analysis();
let mut fa = make_fa("bad_fn", "src/lib.rs", 5);
fa.classification = Classification::Violation {
has_logic: true,
has_own_calls: true,
logic_locations: vec![],
call_locations: vec![],
};
analysis.results = vec![fa];
let findings = collect_all_findings(&analysis);
assert_eq!(findings.len(), 1);
assert_eq!(findings[0].category, "VIOLATION");
}
#[test]
fn test_sorted_by_file_and_line() {
let mut analysis = empty_analysis();
let mut fa1 = make_fa("fn_b", "src/b.rs", 20);
fa1.error_handling_warning = true;
fa1.complexity = Some(ComplexityMetrics::default());
let mut fa2 = make_fa("fn_a", "src/a.rs", 10);
fa2.error_handling_warning = true;
fa2.complexity = Some(ComplexityMetrics::default());
analysis.results = vec![fa1, fa2];
let findings = collect_all_findings(&analysis);
assert_eq!(findings[0].file, "src/a.rs");
assert_eq!(findings[1].file, "src/b.rs");
}
#[test]
fn test_suppressed_not_collected() {
let mut analysis = empty_analysis();
let mut fa = make_fa("suppressed_fn", "src/lib.rs", 5);
fa.suppressed = true;
fa.classification = Classification::Violation {
has_logic: true,
has_own_calls: true,
logic_locations: vec![],
call_locations: vec![],
};
analysis.results = vec![fa];
let findings = collect_all_findings(&analysis);
assert!(findings.is_empty());
}
#[test]
fn test_total_findings_consistent_magic_numbers() {
let mut analysis = empty_analysis();
let mut fa = make_fa("fn1", "src/lib.rs", 10);
fa.complexity = Some(ComplexityMetrics {
magic_numbers: vec![
MagicNumberOccurrence {
line: 12,
value: "42".to_string(),
},
MagicNumberOccurrence {
line: 15,
value: "99".to_string(),
},
],
..Default::default()
});
analysis.results = vec![fa];
analysis.summary.magic_number_warnings = 2;
let findings = collect_all_findings(&analysis);
assert_eq!(
analysis.summary.total_findings(),
findings.len(),
"total_findings() must equal collect_all_findings().len()"
);
}
#[test]
fn test_total_findings_consistent_duplicates() {
use crate::dry::functions::{DuplicateEntry, DuplicateGroup, DuplicateKind};
let mut analysis = empty_analysis();
analysis.duplicates = vec![DuplicateGroup {
entries: vec![
DuplicateEntry {
name: "fn_a".to_string(),
qualified_name: "mod::fn_a".to_string(),
file: "src/a.rs".to_string(),
line: 10,
},
DuplicateEntry {
name: "fn_b".to_string(),
qualified_name: "mod::fn_b".to_string(),
file: "src/b.rs".to_string(),
line: 20,
},
],
kind: DuplicateKind::Exact,
suppressed: false,
}];
analysis.summary.duplicate_groups = 2;
let findings = collect_all_findings(&analysis);
assert_eq!(
analysis.summary.total_findings(),
findings.len(),
"total_findings() must equal collect_all_findings().len()"
);
}
#[test]
fn test_total_findings_consistent_fragments() {
use crate::dry::fragments::{FragmentEntry, FragmentGroup};
let mut analysis = empty_analysis();
analysis.fragments = vec![FragmentGroup {
entries: vec![
FragmentEntry {
function_name: "fn_a".to_string(),
qualified_name: "mod::fn_a".to_string(),
file: "src/a.rs".to_string(),
start_line: 10,
end_line: 15,
},
FragmentEntry {
function_name: "fn_b".to_string(),
qualified_name: "mod::fn_b".to_string(),
file: "src/b.rs".to_string(),
start_line: 20,
end_line: 25,
},
FragmentEntry {
function_name: "fn_c".to_string(),
qualified_name: "mod::fn_c".to_string(),
file: "src/c.rs".to_string(),
start_line: 30,
end_line: 35,
},
],
statement_count: 3,
suppressed: false,
}];
analysis.summary.fragment_groups = 3;
let findings = collect_all_findings(&analysis);
assert_eq!(
analysis.summary.total_findings(),
findings.len(),
"total_findings() must equal collect_all_findings().len()"
);
}
#[test]
fn test_total_findings_consistent_mixed() {
use crate::dry::functions::{DuplicateEntry, DuplicateGroup, DuplicateKind};
let mut analysis = empty_analysis();
let mut fa = make_fa("fn1", "src/lib.rs", 10);
fa.complexity = Some(ComplexityMetrics {
magic_numbers: vec![
MagicNumberOccurrence {
line: 12,
value: "400".to_string(),
},
MagicNumberOccurrence {
line: 13,
value: "800".to_string(),
},
],
..Default::default()
});
analysis.results = vec![fa];
analysis.duplicates = vec![DuplicateGroup {
entries: vec![
DuplicateEntry {
name: "fn_a".to_string(),
qualified_name: "mod::fn_a".to_string(),
file: "src/a.rs".to_string(),
line: 100,
},
DuplicateEntry {
name: "fn_b".to_string(),
qualified_name: "mod::fn_b".to_string(),
file: "src/b.rs".to_string(),
line: 200,
},
],
kind: DuplicateKind::Exact,
suppressed: false,
}];
analysis.summary.magic_number_warnings = 2;
analysis.summary.duplicate_groups = 2;
let findings = collect_all_findings(&analysis);
assert_eq!(findings.len(), 4);
assert_eq!(
analysis.summary.total_findings(),
findings.len(),
"total_findings() must equal collect_all_findings().len() — was the bug from issue report"
);
}
#[test]
fn test_total_findings_consistent_coupling() {
let mut analysis = empty_analysis();
analysis.coupling = Some(crate::coupling::CouplingAnalysis {
metrics: vec![crate::coupling::CouplingMetrics {
module_name: "db".to_string(),
afferent: 2,
efferent: 5,
instability: 0.71,
incoming: vec![],
outgoing: vec![],
suppressed: false,
warning: true,
}],
cycles: vec![crate::coupling::CycleReport {
modules: vec!["a".to_string(), "b".to_string()],
}],
sdp_violations: vec![],
});
analysis.summary.coupling_warnings = 1;
analysis.summary.coupling_cycles = 1;
let findings = collect_all_findings(&analysis);
assert_eq!(
analysis.summary.total_findings(),
findings.len(),
"coupling warnings and cycles must appear in findings list"
);
assert!(
findings.iter().any(|f| f.category == "COUPLING"
&& f.function_name == "db"
&& f.detail.contains("I=0.71")),
"expected a COUPLING finding for db with instability detail"
);
assert!(
findings
.iter()
.any(|f| f.category == "CYCLE" && f.detail.contains("a > b")),
"expected a CYCLE finding describing the a > b cycle"
);
}
}