use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use crate::analyzer::barrels::{BarrelAnalysis, analyze_barrel_chaos};
use crate::analyzer::cycles::{ClassifiedCycle, find_cycles_with_lazy};
use crate::analyzer::dead_parrots::{
DeadExport, DeadFilterConfig, ShadowExport, find_dead_exports,
};
use crate::analyzer::dist::DistResult;
use crate::analyzer::health_score::{HealthMetrics, calculate_health_score};
use crate::analyzer::memory_lint::{MemoryLintIssue, MemoryLintSummary, lint_memory_file};
use crate::analyzer::react_lint::{ReactLintIssue, ReactLintSummary, analyze_react_file};
use crate::analyzer::report::RankedDup;
use crate::analyzer::root_scan::ScanResults;
use crate::analyzer::ts_lint::{TsLintIssue, TsLintSummary, lint_ts_file};
use crate::analyzer::twins::{
TwinCategory, categorize_twin, detect_exact_twins, find_dead_parrots,
};
use crate::snapshot::{EntrypointDriftSummary, Snapshot};
use crate::types::FileAnalysis;
const MAX_FINDINGS_QUICK_WINS: usize = 10;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Findings {
pub loctree: String,
pub generated_at: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub git_ref: Option<String>,
pub summary: FindingsSummary,
pub dead_parrots: Vec<DeadExport>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub shadow_exports: Vec<ShadowExport>,
pub cycles: Vec<CycleEntry>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub duplicates: Vec<DuplicateGroup>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub barrel_chaos: Vec<BarrelChaosEntry>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub react_lint: Vec<ReactLintIssue>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub ts_lint: Vec<TsLintIssue>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub memory_lint: Vec<MemoryLintIssue>,
pub quick_wins: Vec<QuickWin>,
#[serde(default, skip_serializing_if = "EntrypointDriftSummary::is_empty")]
pub entrypoint_drift: EntrypointDriftSummary,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dist: Option<DistResult>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FindingsSummary {
pub files: usize,
pub loc: usize,
pub health_score: u8,
pub dead_parrots: usize,
pub shadow_exports: usize,
pub duplicate_groups: usize,
pub cycles: CycleCounts,
pub barrel_chaos: usize,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub react_lint: Option<ReactLintSummary>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ts_lint: Option<TsLintSummary>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub memory_lint: Option<MemoryLintSummary>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dist: Option<FindingsDistSummary>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FindingsDistSummary {
#[serde(rename = "sourceMaps")]
pub source_maps: usize,
#[serde(rename = "treeShakenExports")]
pub tree_shaken_exports: usize,
#[serde(rename = "coveragePct")]
pub coverage_pct: usize,
#[serde(rename = "impactedFiles")]
pub impacted_files: usize,
#[serde(rename = "analysisLevel")]
pub analysis_level: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct CycleCounts {
pub breaking: usize,
pub structural: usize,
pub diamond: usize,
pub lazy: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CycleEntry {
#[serde(rename = "type")]
pub cycle_type: String,
pub files: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub suggestion: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DuplicateGroup {
pub symbol: String,
pub files: Vec<DuplicateFile>,
pub canonical: String,
pub severity: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DuplicateFile {
pub file: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub import_count: Option<usize>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BarrelChaosEntry {
#[serde(rename = "type")]
pub chaos_type: String,
pub paths: Vec<String>,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QuickWin {
pub action: String,
pub file: String,
pub reason: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub saves_loc: Option<usize>,
}
#[derive(Debug, Clone, Default)]
pub struct FindingsConfig {
pub high_confidence: bool,
pub library_mode: bool,
pub python_library: bool,
pub example_globs: Vec<String>,
}
impl Findings {
pub fn produce(
scan_results: &ScanResults,
snapshot: &Snapshot,
config: FindingsConfig,
dist: Option<DistResult>,
) -> Self {
let version = env!("CARGO_PKG_VERSION").to_string();
let generated_at = time::OffsetDateTime::now_utc()
.format(&time::format_description::well_known::Iso8601::DEFAULT)
.unwrap_or_else(|_| "unknown".to_string());
let git_ref = snapshot.metadata.git_commit.clone();
let analyses: Vec<&FileAnalysis> = scan_results.global_analyses.iter().collect();
let mut dead_ok_globs: Vec<String> = snapshot
.metadata
.roots
.iter()
.flat_map(|root| {
crate::fs_utils::load_loctignore_dead_ok_globs(std::path::Path::new(root))
})
.collect();
dead_ok_globs.sort();
dead_ok_globs.dedup();
let dead_filter = DeadFilterConfig {
include_tests: false,
include_helpers: false,
library_mode: config.library_mode,
example_globs: config.example_globs.clone(),
python_library_mode: config.python_library,
include_ambient: false,
include_dynamic: false,
dead_ok_globs,
};
let dead_parrots = find_dead_exports(
&analyses.iter().cloned().cloned().collect::<Vec<_>>(),
config.high_confidence,
None,
dead_filter,
);
let all_edges: Vec<_> = scan_results
.contexts
.iter()
.flat_map(|ctx| ctx.graph_edges.clone())
.collect();
let (strict_cycles, lazy_cycles) = find_cycles_with_lazy(&all_edges);
let classified_strict: Vec<ClassifiedCycle> = strict_cycles
.into_iter()
.map(|nodes| ClassifiedCycle::new(nodes, &all_edges))
.collect();
let cycles = classify_cycles(&classified_strict, &lazy_cycles);
let duplicates = collect_duplicates(scan_results);
let barrel_analysis = analyze_barrel_chaos(snapshot);
let barrel_chaos = convert_barrel_chaos(&barrel_analysis);
let shadow_exports: Vec<ShadowExport> = Vec::new();
let root_path = snapshot
.metadata
.roots
.first()
.map(std::path::PathBuf::from);
let react_lint: Vec<ReactLintIssue> = if let Some(root) = &root_path {
snapshot
.files
.iter()
.filter(|f| {
matches!(
std::path::Path::new(&f.path)
.extension()
.and_then(std::ffi::OsStr::to_str),
Some("tsx") | Some("jsx") | Some("ts") | Some("js")
)
})
.flat_map(|f| {
let full_path = root.join(&f.path);
if let Ok(content) = std::fs::read_to_string(&full_path) {
analyze_react_file(&content, &full_path, f.path.clone())
} else {
Vec::new()
}
})
.collect()
} else {
Vec::new()
};
let ts_lint: Vec<TsLintIssue> = if let Some(root) = &root_path {
snapshot
.files
.iter()
.filter(|f| {
matches!(
std::path::Path::new(&f.path)
.extension()
.and_then(std::ffi::OsStr::to_str),
Some("ts") | Some("tsx")
)
})
.flat_map(|f| {
let full_path = root.join(&f.path);
if let Ok(content) = std::fs::read_to_string(&full_path) {
lint_ts_file(&full_path, &content)
} else {
Vec::new()
}
})
.collect()
} else {
Vec::new()
};
let memory_lint: Vec<MemoryLintIssue> = if let Some(root) = &root_path {
snapshot
.files
.iter()
.filter(|f| {
matches!(
std::path::Path::new(&f.path)
.extension()
.and_then(std::ffi::OsStr::to_str),
Some("ts") | Some("tsx") | Some("js") | Some("jsx")
)
})
.flat_map(|f| {
let full_path = root.join(&f.path);
if let Ok(content) = std::fs::read_to_string(&full_path) {
lint_memory_file(&full_path, &content)
} else {
Vec::new()
}
})
.collect()
} else {
Vec::new()
};
let quick_wins = generate_quick_wins(
&dead_parrots,
&cycles,
&duplicates,
&barrel_chaos,
&react_lint,
&ts_lint,
&memory_lint,
);
let analyses_vec: Vec<_> = analyses.iter().cloned().cloned().collect();
let exact_twins = detect_exact_twins(&analyses_vec, false);
let (twins_same_language, _twins_cross_language): (Vec<_>, Vec<_>) = exact_twins
.iter()
.partition(|twin| matches!(categorize_twin(twin), TwinCategory::SameLanguage(_)));
let twins_result = find_dead_parrots(&analyses_vec, false, false);
let twins_dead_parrots = twins_result.dead_parrots.len();
let twins_same_lang_count = twins_same_language.len();
let cascade_imports: usize = scan_results
.contexts
.iter()
.map(|ctx| ctx.cascades.len())
.sum();
let summary = calculate_summary(
&analyses,
&dead_parrots,
&shadow_exports,
&duplicates,
&cycles,
&barrel_chaos,
&react_lint,
&ts_lint,
&memory_lint,
twins_dead_parrots,
twins_same_lang_count,
cascade_imports,
dist.as_ref(),
);
Findings {
loctree: version,
generated_at,
git_ref,
summary,
dead_parrots,
shadow_exports,
cycles,
duplicates,
barrel_chaos,
react_lint,
ts_lint,
memory_lint,
quick_wins,
entrypoint_drift: snapshot.metadata.entrypoint_drift.clone(),
dist,
}
}
pub fn to_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
pub fn summary_only(&self) -> FindingsSummary {
self.summary.clone()
}
}
fn classify_cycles(strict: &[ClassifiedCycle], lazy: &[Vec<String>]) -> Vec<CycleEntry> {
use crate::analyzer::cycles::CycleClassification;
let mut entries = Vec::new();
for cycle in strict {
let cycle_type = match cycle.classification {
CycleClassification::HardBidirectional => "breaking",
CycleClassification::ModuleSelfReference => "structural",
CycleClassification::TraitBased => "structural",
CycleClassification::CfgGated => "structural",
CycleClassification::FanPattern => "diamond",
CycleClassification::WildcardImport => "structural",
CycleClassification::Unknown => "structural",
};
let suggestion = if cycle_type == "breaking" {
suggest_cycle_break(&cycle.nodes)
} else {
None
};
entries.push(CycleEntry {
cycle_type: cycle_type.to_string(),
files: cycle.nodes.clone(),
suggestion,
});
}
for nodes in lazy {
entries.push(CycleEntry {
cycle_type: "lazy".to_string(),
files: nodes.clone(),
suggestion: None,
});
}
entries
}
fn suggest_cycle_break(nodes: &[String]) -> Option<String> {
if nodes.len() < 2 {
return None;
}
let mid = nodes.len() / 2;
let from = &nodes[mid];
let to = &nodes[(mid + 1) % nodes.len()];
Some(format!("Break at: {} -> {}", from, to))
}
fn collect_duplicates(scan_results: &ScanResults) -> Vec<DuplicateGroup> {
let all_ranked: Vec<&RankedDup> = scan_results
.contexts
.iter()
.flat_map(|ctx| ctx.filtered_ranked.iter())
.collect();
all_ranked
.into_iter()
.map(|dup| {
let files: Vec<DuplicateFile> = dup
.locations
.iter()
.map(|loc| DuplicateFile {
file: loc.file.clone(),
line: loc.line,
import_count: None,
})
.collect();
let severity = match dup.severity {
crate::analyzer::report::DupSeverity::CrossLangExpected => "low",
crate::analyzer::report::DupSeverity::ReExportOrGeneric => "low",
crate::analyzer::report::DupSeverity::SamePackage => "medium",
crate::analyzer::report::DupSeverity::CrossModule => "medium",
crate::analyzer::report::DupSeverity::CrossCrate => "high",
};
DuplicateGroup {
symbol: dup.name.clone(),
files,
canonical: dup.canonical.clone(),
severity: severity.to_string(),
reason: if dup.reason.is_empty() {
None
} else {
Some(dup.reason.clone())
},
}
})
.collect()
}
fn convert_barrel_chaos(analysis: &BarrelAnalysis) -> Vec<BarrelChaosEntry> {
let mut entries = Vec::new();
for missing in &analysis.missing_barrels {
entries.push(BarrelChaosEntry {
chaos_type: "missing_barrel".to_string(),
paths: vec![missing.directory.clone()],
description: format!(
"Directory with {} files has {} external imports but no barrel file",
missing.file_count, missing.external_import_count
),
});
}
for chain in &analysis.deep_chains {
entries.push(BarrelChaosEntry {
chaos_type: "deep_chain".to_string(),
paths: chain.chain.clone(),
description: format!(
"Re-export chain for '{}' is {} levels deep",
chain.symbol, chain.depth
),
});
}
for inconsistent in &analysis.inconsistent_paths {
let mut paths = vec![inconsistent.canonical_path.clone()];
paths.extend(
inconsistent
.alternative_paths
.iter()
.map(|(p, _)| p.clone()),
);
entries.push(BarrelChaosEntry {
chaos_type: "inconsistent_path".to_string(),
paths,
description: format!(
"Symbol '{}' is imported via {} different paths",
inconsistent.symbol,
1 + inconsistent.alternative_paths.len()
),
});
}
entries
}
fn generate_quick_wins(
dead_parrots: &[DeadExport],
cycles: &[CycleEntry],
duplicates: &[DuplicateGroup],
barrel_chaos: &[BarrelChaosEntry],
react_lint: &[ReactLintIssue],
ts_lint: &[TsLintIssue],
memory_lint: &[MemoryLintIssue],
) -> Vec<QuickWin> {
let mut wins = Vec::new();
let mut seen_files: HashSet<String> = HashSet::new();
for dead in dead_parrots {
if dead.confidence == "high" && !seen_files.contains(&dead.file) {
seen_files.insert(dead.file.clone());
wins.push(QuickWin {
action: "delete".to_string(),
file: dead.file.clone(),
reason: dead.reason.clone(),
saves_loc: None, });
}
}
for cycle in cycles {
if cycle.cycle_type == "breaking"
&& let Some(suggestion) = &cycle.suggestion
{
wins.push(QuickWin {
action: "break_cycle".to_string(),
file: cycle.files.first().cloned().unwrap_or_default(),
reason: suggestion.clone(),
saves_loc: None,
});
}
}
for dup in duplicates {
if dup.severity == "high" {
wins.push(QuickWin {
action: "consolidate".to_string(),
file: dup.canonical.clone(),
reason: format!(
"Consolidate '{}' from {} files",
dup.symbol,
dup.files.len()
),
saves_loc: None,
});
}
}
for chaos in barrel_chaos {
if chaos.chaos_type == "missing_barrel"
&& let Some(dir) = chaos.paths.first()
{
wins.push(QuickWin {
action: "create_barrel".to_string(),
file: format!("{}/index.ts", dir),
reason: chaos.description.clone(),
saves_loc: None,
});
}
}
let mut react_seen: HashSet<String> = HashSet::new();
for issue in react_lint {
if issue.severity == "high" && !react_seen.contains(&issue.file) {
react_seen.insert(issue.file.clone());
wins.push(QuickWin {
action: "fix_race_condition".to_string(),
file: issue.file.clone(),
reason: format!("{} (line {})", issue.message, issue.line),
saves_loc: None,
});
}
}
let mut ts_seen: HashSet<String> = HashSet::new();
for issue in ts_lint {
if issue.severity == "high" && !ts_seen.contains(&issue.file) {
ts_seen.insert(issue.file.clone());
wins.push(QuickWin {
action: "fix_type_safety".to_string(),
file: issue.file.clone(),
reason: format!("{} (line {})", issue.message, issue.line),
saves_loc: None,
});
}
}
let mut mem_seen: HashSet<String> = HashSet::new();
for issue in memory_lint {
if issue.severity == "high" && !mem_seen.contains(&issue.file) {
mem_seen.insert(issue.file.clone());
wins.push(QuickWin {
action: "fix_memory_leak".to_string(),
file: issue.file.clone(),
reason: format!("{} (line {})", issue.message, issue.line),
saves_loc: None,
});
}
}
wins.truncate(MAX_FINDINGS_QUICK_WINS);
wins
}
#[allow(clippy::too_many_arguments)]
fn calculate_summary(
analyses: &[&FileAnalysis],
dead_parrots: &[DeadExport],
shadow_exports: &[ShadowExport],
duplicates: &[DuplicateGroup],
cycles: &[CycleEntry],
barrel_chaos: &[BarrelChaosEntry],
react_lint: &[ReactLintIssue],
ts_lint: &[TsLintIssue],
memory_lint: &[MemoryLintIssue],
twins_dead_parrots: usize,
twins_same_language: usize,
cascade_imports: usize,
dist: Option<&DistResult>,
) -> FindingsSummary {
let files = analyses.len();
let loc: usize = analyses.iter().map(|a| a.loc).sum();
let mut cycle_counts = CycleCounts::default();
for cycle in cycles {
match cycle.cycle_type.as_str() {
"breaking" => cycle_counts.breaking += 1,
"structural" => cycle_counts.structural += 1,
"diamond" => cycle_counts.diamond += 1,
"lazy" => cycle_counts.lazy += 1,
_ => {}
}
}
let health_metrics = HealthMetrics {
breaking_cycles: cycle_counts.breaking,
dead_exports: dead_parrots.len(),
twins_dead_parrots,
barrel_chaos_count: barrel_chaos.len(),
structural_cycles: cycle_counts.structural,
duplicate_exports: duplicates.len(),
twins_same_language,
cascade_imports,
files,
loc,
..Default::default()
};
let health = calculate_health_score(&health_metrics);
let health_score = health.health;
let react_lint_summary = if react_lint.is_empty() {
None
} else {
Some(ReactLintSummary::from_issues(react_lint))
};
let ts_lint_summary = if ts_lint.is_empty() {
None
} else {
Some(TsLintSummary::from_issues(ts_lint))
};
let memory_lint_summary = if memory_lint.is_empty() {
None
} else {
Some(crate::analyzer::memory_lint::calculate_summary(memory_lint))
};
let dist_summary = dist.map(|dist| FindingsDistSummary {
source_maps: dist.source_maps,
tree_shaken_exports: dist.tree_shaken_exports,
coverage_pct: dist.coverage_pct,
impacted_files: dist.impacted_files.len(),
analysis_level: dist.analysis_level.as_str().to_string(),
});
FindingsSummary {
files,
loc,
health_score,
dead_parrots: dead_parrots.len(),
shadow_exports: shadow_exports.len(),
duplicate_groups: duplicates.len(),
cycles: cycle_counts,
barrel_chaos: barrel_chaos.len(),
react_lint: react_lint_summary,
ts_lint: ts_lint_summary,
memory_lint: memory_lint_summary,
dist: dist_summary,
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Manifest {
pub loctree: String,
pub generated_at: String,
pub project: ManifestProject,
pub artifacts: ManifestArtifacts,
pub commands: ManifestCommands,
pub examples: Vec<ManifestExample>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dist: Option<ManifestDist>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestProject {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub languages: Vec<String>,
pub files: usize,
pub loc: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestArtifacts {
#[serde(rename = "snapshot.json")]
pub snapshot: ArtifactInfo,
#[serde(rename = "findings.json")]
pub findings: ArtifactInfo,
#[serde(rename = "agent.json")]
pub agent: ArtifactInfo,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ArtifactInfo {
pub size_kb: usize,
pub purpose: String,
pub query_with: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestCommands {
pub scan: String,
pub slice: String,
pub find: String,
pub jq: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestExample {
pub task: String,
pub cmd: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestDist {
#[serde(rename = "srcDir")]
pub src_dir: String,
#[serde(rename = "analysisLevel")]
pub analysis_level: String,
#[serde(rename = "treeShakenExports")]
pub tree_shaken_exports: usize,
#[serde(rename = "coveragePct")]
pub coverage_pct: usize,
}
impl Manifest {
pub fn produce(
snapshot: &Snapshot,
findings_size_kb: usize,
agent_size_kb: usize,
dist: Option<&DistResult>,
) -> Self {
let version = env!("CARGO_PKG_VERSION").to_string();
let generated_at = time::OffsetDateTime::now_utc()
.format(&time::format_description::well_known::Iso8601::DEFAULT)
.unwrap_or_else(|_| "unknown".to_string());
let project = ManifestProject {
name: snapshot.metadata.git_repo.clone(),
languages: snapshot.metadata.languages.iter().cloned().collect(),
files: snapshot.metadata.file_count,
loc: snapshot.metadata.total_loc,
};
let snapshot_size_kb = snapshot.files.len() * 2;
let artifacts = ManifestArtifacts {
snapshot: ArtifactInfo {
size_kb: snapshot_size_kb,
purpose: "Complete analysis graph - imports, exports, LOC per file".to_string(),
query_with: vec![
"loct slice".to_string(),
"loct find".to_string(),
"loct '<jq>'".to_string(),
],
},
findings: ArtifactInfo {
size_kb: findings_size_kb,
purpose: if dist.is_some() {
"All detected issues plus bundle distribution insights".to_string()
} else {
"All detected issues - dead code, cycles, duplicates".to_string()
},
query_with: {
let mut queries = vec![
"loct findings".to_string(),
"loct '.dead_parrots'".to_string(),
];
if dist.is_some() {
queries.push("loct '.dist'".to_string());
}
queries
},
},
agent: ArtifactInfo {
size_kb: agent_size_kb,
purpose: if dist.is_some() {
"AI-optimized context bundle with bundle distribution".to_string()
} else {
"AI-optimized context bundle".to_string()
},
query_with: {
let mut queries = vec!["loct --for-ai".to_string()];
if dist.is_some() {
queries.push("loct '.bundle.dist'".to_string());
}
queries
},
},
};
let commands = ManifestCommands {
scan: "loct".to_string(),
slice: "loct slice <file>".to_string(),
find: "loct find <pattern>".to_string(),
jq: "loct '<jq-query>'".to_string(),
};
let examples = vec![
ManifestExample {
task: "Get health score".to_string(),
cmd: "loct '.summary.health_score'".to_string(),
},
ManifestExample {
task: "List dead exports".to_string(),
cmd: "loct '.dead_parrots'".to_string(),
},
ManifestExample {
task: "Context for file".to_string(),
cmd: "loct slice src/App.tsx --json".to_string(),
},
ManifestExample {
task: "Find symbol".to_string(),
cmd: "loct find UserPreferences".to_string(),
},
ManifestExample {
task: "Count cycles".to_string(),
cmd: "loct '.cycles | length'".to_string(),
},
];
let mut examples = examples;
if dist.is_some() {
examples.push(ManifestExample {
task: "Show bundle coverage".to_string(),
cmd: "loct '.dist.coveragePct'".to_string(),
});
examples.push(ManifestExample {
task: "List tree-shaken exports".to_string(),
cmd: "loct '.dist.deadExports'".to_string(),
});
}
let dist_summary = dist.map(|dist| ManifestDist {
src_dir: dist.src_dir.clone(),
analysis_level: dist.analysis_level.as_str().to_string(),
tree_shaken_exports: dist.tree_shaken_exports,
coverage_pct: dist.coverage_pct,
});
Manifest {
loctree: version,
generated_at,
project,
artifacts,
commands,
examples,
dist: dist_summary,
}
}
pub fn to_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cycle_counts_default() {
let counts = CycleCounts::default();
assert_eq!(counts.breaking, 0);
assert_eq!(counts.structural, 0);
assert_eq!(counts.diamond, 0);
assert_eq!(counts.lazy, 0);
}
#[test]
fn test_quick_win_serialization() {
let win = QuickWin {
action: "delete".to_string(),
file: "src/dead.ts".to_string(),
reason: "Unused export".to_string(),
saves_loc: Some(100),
};
let json = serde_json::to_string(&win).expect("serialize quick win");
assert!(json.contains("delete"));
assert!(json.contains("src/dead.ts"));
assert!(json.contains("100"));
}
#[test]
fn test_findings_summary_serialization() {
let summary = FindingsSummary {
files: 100,
loc: 10000,
health_score: 85,
dead_parrots: 5,
shadow_exports: 2,
duplicate_groups: 10,
cycles: CycleCounts {
breaking: 0,
structural: 2,
diamond: 1,
lazy: 3,
},
barrel_chaos: 3,
react_lint: None,
ts_lint: None,
memory_lint: None,
dist: None,
};
let json = serde_json::to_string_pretty(&summary).expect("serialize summary");
assert!(json.contains("\"health_score\": 85"));
assert!(json.contains("\"breaking\": 0"));
}
#[test]
fn test_suggest_cycle_break() {
let nodes = vec!["a.ts".to_string(), "b.ts".to_string(), "c.ts".to_string()];
let suggestion =
suggest_cycle_break(&nodes).expect("non-empty cycle should suggest a break");
assert!(suggestion.contains("Break at:"));
}
#[test]
fn test_suggest_cycle_break_empty() {
let nodes: Vec<String> = vec![];
assert!(suggest_cycle_break(&nodes).is_none());
}
}