use serde::Serialize;
use std::collections::HashMap;
use crate::types::{FileAnalysis, OutputMode};
#[derive(Debug, Clone, Serialize)]
pub struct SymbolEntry {
pub name: String,
pub kind: String,
pub file_path: String,
pub line: usize,
pub import_count: usize,
}
#[derive(Debug, Clone, Serialize)]
pub struct TwinsResult {
pub dead_parrots: Vec<SymbolEntry>,
pub total_symbols: usize,
pub total_files: usize,
}
pub fn build_symbol_registry(
analyses: &[FileAnalysis],
include_tests: bool,
) -> HashMap<(String, String), SymbolEntry> {
use crate::analyzer::classify::should_exclude_from_reports;
let mut registry: HashMap<(String, String), SymbolEntry> = HashMap::new();
for analysis in analyses {
if !include_tests && analysis.is_test {
continue;
}
if !include_tests && should_exclude_from_reports(&analysis.path) {
continue;
}
for export in &analysis.exports {
let key = (analysis.path.clone(), export.name.clone());
registry.insert(
key,
SymbolEntry {
name: export.name.clone(),
kind: export.kind.clone(),
file_path: analysis.path.clone(),
line: export.line.unwrap_or(0),
import_count: 0,
},
);
}
}
for analysis in analyses {
for import in &analysis.imports {
let target_path = import.resolved_path.as_ref().unwrap_or(&import.source);
for symbol in &import.symbols {
let symbol_name = if symbol.is_default {
"default".to_string()
} else {
symbol.name.clone()
};
let key = (target_path.clone(), symbol_name);
if let Some(entry) = registry.get_mut(&key) {
entry.import_count += 1;
}
}
}
}
registry
}
fn is_entry_point(path: &str) -> bool {
path == "lib.rs"
|| path == "main.rs"
|| path.ends_with("/lib.rs")
|| path.ends_with("/main.rs")
|| path.ends_with("/index.ts")
|| path.ends_with("/index.tsx")
|| path.ends_with("/index.js")
|| path.ends_with("/index.jsx")
|| path.ends_with("/index.mjs")
|| path.ends_with("/App.tsx")
|| path.ends_with("/App.jsx")
|| path.ends_with("/App.ts")
|| path.ends_with("/App.js")
|| path.ends_with("/app.tsx")
|| path.ends_with("/app.jsx")
|| path.ends_with("/main.ts")
|| path.ends_with("/main.tsx")
|| path.ends_with("/main.js")
|| path.ends_with("/main.jsx")
|| path.ends_with("/_app.tsx")
|| path.ends_with("/_app.jsx")
|| path.ends_with("/_document.tsx")
|| path.ends_with("/_document.jsx")
|| path.ends_with("/layout.tsx")
|| path.ends_with("/layout.jsx")
|| path.ends_with("/page.tsx")
|| path.ends_with("/page.jsx")
|| path.ends_with("/__init__.py")
|| (path.ends_with(".go") && path.contains("/cmd/"))
|| is_application_entry_pattern(path)
}
fn is_application_entry_pattern(path: &str) -> bool {
let filename = path.rsplit('/').next().unwrap_or(path);
let name_lower = filename.to_lowercase();
let entry_patterns = [
"application.", "bootstrap.", "entry.", "appshell.", "appinit.", ];
for pattern in entry_patterns {
if name_lower.contains(pattern) {
return true;
}
}
if path.contains("/app-shell/") || path.contains("/app_shell/") {
return true;
}
false
}
fn is_mod_rs(path: &str) -> bool {
path == "mod.rs" || path.ends_with("/mod.rs")
}
fn is_framework_magic(name: &str, kind: &str) -> bool {
if name.starts_with("__") && name.ends_with("__") {
return true;
}
if kind == "default"
&& name
.chars()
.next()
.map(|c| c.is_uppercase())
.unwrap_or(false)
{
return true;
}
if name.starts_with("use")
&& name.len() > 3
&& name
.chars()
.nth(3)
.map(|c| c.is_uppercase())
.unwrap_or(false)
{
return true;
}
if kind == "class" && name.ends_with("Mixin") {
return true;
}
if kind == "impl" || kind == "trait" {
return true;
}
false
}
fn is_barrel_reexport(_name: &str, kind: &str) -> bool {
kind == "re-export" || kind == "reexport"
}
pub fn find_dead_parrots(
analyses: &[FileAnalysis],
_dead_only: bool,
include_tests: bool,
) -> TwinsResult {
let registry = build_symbol_registry(analyses, include_tests);
let tauri_handlers: std::collections::HashSet<String> = analyses
.iter()
.flat_map(|a| a.tauri_registered_handlers.iter().cloned())
.collect();
let all_local_uses: std::collections::HashSet<String> = analyses
.iter()
.flat_map(|a| a.local_uses.iter().cloned())
.collect();
let all_imported_names: std::collections::HashSet<String> = analyses
.iter()
.flat_map(|a| a.imports.iter())
.flat_map(|imp| imp.symbols.iter())
.map(|sym| {
if sym.is_default {
"default".to_string()
} else {
sym.name.clone()
}
})
.collect();
use super::root_scan::normalize_module_id;
use crate::types::ImportKind;
let mut dynamic_import_targets: std::collections::HashSet<String> =
std::collections::HashSet::new();
for analysis in analyses {
for imp in &analysis.imports {
if matches!(imp.kind, ImportKind::Dynamic) {
if let Some(resolved) = &imp.resolved_path {
dynamic_import_targets.insert(normalize_module_id(resolved).as_key());
}
dynamic_import_targets.insert(normalize_module_id(&imp.source).as_key());
}
}
}
for analysis in analyses {
for dyn_imp in &analysis.dynamic_imports {
let dyn_norm = normalize_module_id(dyn_imp);
dynamic_import_targets.insert(dyn_norm.as_key());
let dyn_alias = dyn_norm
.path
.trim_start_matches("./")
.trim_start_matches('@')
.to_string();
for a in analyses {
let a_norm = normalize_module_id(&a.path);
if a_norm.path.ends_with(&dyn_alias) {
dynamic_import_targets.insert(a_norm.as_key());
}
}
}
}
let mut dead_parrots: Vec<SymbolEntry> = registry
.values()
.filter(|entry| {
if entry.import_count > 0 {
return false;
}
if tauri_handlers.contains(&entry.name) {
return false;
}
if all_local_uses.contains(&entry.name) {
return false;
}
if all_imported_names.contains(&entry.name) {
return false;
}
if is_entry_point(&entry.file_path) {
return false;
}
if is_mod_rs(&entry.file_path) {
return false;
}
if is_framework_magic(&entry.name, &entry.kind) {
return false;
}
if is_barrel_reexport(&entry.name, &entry.kind) {
return false;
}
let file_norm = normalize_module_id(&entry.file_path).as_key();
if dynamic_import_targets.contains(&file_norm) {
return false;
}
true
})
.cloned()
.collect();
dead_parrots.sort_by(|a, b| {
a.file_path
.cmp(&b.file_path)
.then_with(|| a.name.cmp(&b.name))
});
TwinsResult {
dead_parrots,
total_symbols: registry.len(),
total_files: analyses.len(),
}
}
pub fn print_twins_human(result: &TwinsResult) {
if result.dead_parrots.is_empty() {
println!("No dead parrots found - all exports are imported!");
return;
}
println!("DEAD PARROTS ({} found)", result.dead_parrots.len());
println!();
let mut by_file: HashMap<String, Vec<&SymbolEntry>> = HashMap::new();
for entry in &result.dead_parrots {
by_file
.entry(entry.file_path.clone())
.or_default()
.push(entry);
}
let mut files: Vec<_> = by_file.keys().collect();
files.sort();
for file in files {
let entries = &by_file[file];
println!(" {}", file);
for entry in entries {
println!(
" ├─ {} ({}:{}) - {} imports",
entry.name, entry.kind, entry.line, entry.import_count
);
}
println!();
}
println!("Summary:");
println!(" Total symbols: {}", result.total_symbols);
println!(" Dead parrots: {}", result.dead_parrots.len());
println!(" Files analyzed: {}", result.total_files);
}
pub fn print_twins_json(result: &TwinsResult) {
let output = serde_json::json!({
"dead_parrots": result.dead_parrots.iter().map(|e| {
serde_json::json!({
"name": e.name,
"file": e.file_path,
"line": e.line,
"kind": e.kind,
"import_count": e.import_count,
})
}).collect::<Vec<_>>(),
"summary": {
"symbols": result.total_symbols,
"files": result.total_files,
"dead_parrots": result.dead_parrots.len(),
}
});
println!("{}", serde_json::to_string_pretty(&output).unwrap());
}
pub fn print_twins_result(result: &TwinsResult, output: OutputMode) {
match output {
OutputMode::Json | OutputMode::Jsonl => print_twins_json(result),
OutputMode::Human => print_twins_human(result),
}
}
#[derive(Clone, Debug, Serialize)]
pub struct TwinLocation {
pub file_path: String,
pub line: usize,
pub kind: String,
pub import_count: usize,
pub is_canonical: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature_fingerprint: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct ExactTwin {
pub name: String,
pub locations: Vec<TwinLocation>,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature_similarity: Option<f32>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
pub enum Language {
TypeScript,
JavaScript,
Rust,
Python,
Go,
Other,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub enum TwinCategory {
SameLanguage(Language),
CrossLanguage,
}
pub fn detect_language(path: &str) -> Language {
if path.ends_with(".ts") || path.ends_with(".tsx") || path.ends_with(".mts") {
Language::TypeScript
} else if path.ends_with(".js")
|| path.ends_with(".jsx")
|| path.ends_with(".mjs")
|| path.ends_with(".cjs")
{
Language::JavaScript
} else if path.ends_with(".rs") {
Language::Rust
} else if path.ends_with(".py") || path.ends_with(".pyi") {
Language::Python
} else if path.ends_with(".go") {
Language::Go
} else {
Language::Other
}
}
pub fn categorize_twin(twin: &ExactTwin) -> TwinCategory {
let languages: std::collections::HashSet<Language> = twin
.locations
.iter()
.map(|loc| detect_language(&loc.file_path))
.collect();
if languages.len() == 1 {
TwinCategory::SameLanguage(languages.into_iter().next().unwrap())
} else {
TwinCategory::CrossLanguage
}
}
pub const GENERIC_METHOD_NAMES: &[&str] = &[
"new",
"default",
"from",
"into",
"clone",
"drop",
"deref",
"as_ref",
"as_mut",
"try_from",
"try_into",
"with_config",
"main",
"app",
"App",
"init",
"setup",
"teardown",
"cleanup",
"dispose",
"destroy",
"mount",
"unmount",
"create",
"read",
"update",
"delete",
"get",
"set",
"load",
"save",
"fetch",
"store",
"run",
"start",
"stop",
"execute",
"process",
"handle",
"dispatch",
"open",
"close",
"write",
"send",
"receive",
"connect",
"disconnect",
"parse",
"format",
"serialize",
"deserialize",
"encode",
"decode",
"to_string",
"from_str",
"to_json",
"from_json",
"to_bytes",
"from_bytes",
"as_str",
"as_bytes",
"into_inner",
"inner",
"get_inner",
"unwrap",
"unwrap_or",
"ok",
"err",
"reset",
"clear",
"flush",
"refresh",
"render",
"draw",
"display",
"show",
"hide",
"validate",
"configure",
"build",
"test",
"fixture",
"mock",
"stub",
"spy",
"__init__",
"__new__",
"__str__",
"__repr__",
"apply",
"call",
"invoke",
"notify",
"emit",
"on",
"off",
"add",
"remove",
"insert",
"push",
"pop",
"len",
"size",
"count",
"index",
"find",
"search",
"filter",
"map",
"reduce",
"sort",
"compare",
"equals",
"hash",
"copy",
"merge",
"split",
"join",
"concat",
"append",
"extend",
"contains",
"exists",
"is_empty",
"is_valid",
"check",
"verify",
"assert",
"expect",
"should",
"must",
"can",
"will",
"with",
"label",
"name",
"id",
"key",
"value",
"data",
"info",
"error",
"warn",
"debug",
"log",
"print",
"trace",
];
fn is_generic_method(name: &str) -> bool {
GENERIC_METHOD_NAMES.contains(&name)
}
fn compute_signature_fingerprint(
analyses: &[FileAnalysis],
file_path: &str,
function_name: &str,
) -> Option<String> {
let analysis = analyses.iter().find(|a| a.path == file_path)?;
let mut param_types: Vec<String> = Vec::new();
let mut return_types: Vec<String> = Vec::new();
for sig_use in &analysis.signature_uses {
if sig_use.function == function_name {
match sig_use.usage {
crate::types::SignatureUseKind::Parameter => {
param_types.push(sig_use.type_name.clone());
}
crate::types::SignatureUseKind::Return => {
return_types.push(sig_use.type_name.clone());
}
}
}
}
if param_types.is_empty() && return_types.is_empty() {
return None;
}
param_types.sort();
return_types.sort();
let params_str = param_types.join(",");
let returns_str = return_types.join(",");
Some(format!("{}|{}", params_str, returns_str))
}
fn fingerprint_similarity(fp1: &str, fp2: &str) -> f32 {
if fp1 == fp2 {
return 1.0;
}
let types1: std::collections::HashSet<&str> =
fp1.split([',', '|']).filter(|s| !s.is_empty()).collect();
let types2: std::collections::HashSet<&str> =
fp2.split([',', '|']).filter(|s| !s.is_empty()).collect();
if types1.is_empty() && types2.is_empty() {
return 1.0; }
let intersection = types1.intersection(&types2).count();
let union = types1.union(&types2).count();
if union == 0 {
return 0.0;
}
intersection as f32 / union as f32
}
fn compute_group_similarity(fingerprints: &[Option<String>]) -> Option<f32> {
let valid_fps: Vec<&String> = fingerprints.iter().filter_map(|f| f.as_ref()).collect();
if valid_fps.len() < 2 {
return None; }
let mut total_similarity = 0.0;
let mut count = 0;
for i in 0..valid_fps.len() {
for j in (i + 1)..valid_fps.len() {
total_similarity += fingerprint_similarity(valid_fps[i], valid_fps[j]);
count += 1;
}
}
if count == 0 {
return None;
}
Some(total_similarity / count as f32)
}
pub fn detect_exact_twins(analyses: &[FileAnalysis], include_tests: bool) -> Vec<ExactTwin> {
detect_exact_twins_with_frameworks(analyses, include_tests, None)
}
pub fn detect_exact_twins_with_frameworks(
analyses: &[FileAnalysis],
include_tests: bool,
frameworks: Option<&[crate::analyzer::frameworks::Framework]>,
) -> Vec<ExactTwin> {
let registry = build_symbol_registry(analyses, include_tests);
let mut symbol_map: HashMap<String, Vec<(String, usize, String, usize)>> = HashMap::new();
for ((file_path, symbol_name), entry) in ®istry {
if entry.kind == "reexport" || entry.kind == "re-export" || entry.kind == "__all__" {
continue;
}
symbol_map.entry(symbol_name.clone()).or_default().push((
file_path.clone(),
entry.line,
entry.kind.clone(),
entry.import_count,
));
}
let fw_slice = frameworks.unwrap_or(&[]);
let mut twins: Vec<ExactTwin> = Vec::new();
for (name, locations_raw) in symbol_map {
if is_generic_method(&name) {
continue;
}
if locations_raw.len() <= 1 {
continue;
}
if !fw_slice.is_empty() {
let all_are_conventions = locations_raw.iter().all(|(file_path, _, _, _)| {
crate::analyzer::frameworks::is_framework_convention(&name, file_path, fw_slice)
});
if all_are_conventions {
continue;
}
}
let mut locations: Vec<TwinLocation> = locations_raw
.iter()
.map(|(file, line, kind, import_count)| {
let signature_fingerprint = if kind == "function"
|| kind == "var"
|| kind == "decl"
|| kind == "const"
|| kind == "named"
{
compute_signature_fingerprint(analyses, file, &name)
} else {
None
};
TwinLocation {
file_path: file.clone(),
line: *line,
kind: kind.clone(),
import_count: *import_count,
is_canonical: false, signature_fingerprint,
}
})
.collect();
let fingerprints: Vec<Option<String>> = locations
.iter()
.map(|l| l.signature_fingerprint.clone())
.collect();
let signature_similarity = compute_group_similarity(&fingerprints);
if !locations.is_empty() {
let max_imports = locations.iter().map(|l| l.import_count).max().unwrap_or(0);
let mut canonicals: Vec<&mut TwinLocation> = locations
.iter_mut()
.filter(|l| l.import_count == max_imports)
.collect();
if canonicals.len() > 1 {
canonicals.sort_by_key(|l| l.file_path.len());
}
if let Some(canonical) = canonicals.first_mut() {
canonical.is_canonical = true;
}
}
twins.push(ExactTwin {
name,
locations,
signature_similarity,
});
}
twins.sort_by(|a, b| b.locations.len().cmp(&a.locations.len()));
twins
}
pub fn print_exact_twins_human(twins: &[ExactTwin]) {
if twins.is_empty() {
println!("No exact twins found - all symbol names are unique!");
return;
}
let (same_lang, cross_lang): (Vec<_>, Vec<_>) = twins
.iter()
.partition(|twin| matches!(categorize_twin(twin), TwinCategory::SameLanguage(_)));
println!("EXACT TWINS ({} found)", twins.len());
println!();
if !same_lang.is_empty() {
println!(
" [!] SAME-LANGUAGE DUPLICATES ({} groups) - likely need consolidation:",
same_lang.len()
);
println!();
for twin in &same_lang {
print_twin_details(twin);
}
}
if !cross_lang.is_empty() {
println!(
" [i] CROSS-LANGUAGE PAIRS ({} groups) - likely intentional FE/BE mirrors:",
cross_lang.len()
);
println!();
for twin in &cross_lang {
print_twin_details(twin);
}
}
println!("Summary:");
println!(
" Same-language duplicates: {} (actionable)",
same_lang.len()
);
println!(" Cross-language pairs: {} (usually OK)", cross_lang.len());
let total_dups: usize = twins.iter().map(|t| t.locations.len()).sum();
println!(" Total duplicate definitions: {}", total_dups);
}
fn print_twin_details(twin: &ExactTwin) {
println!(" Symbol: {}", twin.name);
for loc in &twin.locations {
let canonical_marker = if loc.is_canonical { " CANONICAL" } else { "" };
println!(
" ├─ {}:{} ({}) - {} imports{}",
loc.file_path, loc.line, loc.kind, loc.import_count, canonical_marker
);
}
let zero_import_count = twin
.locations
.iter()
.filter(|l| l.import_count == 0)
.count();
if zero_import_count > 0 && zero_import_count < twin.locations.len() {
println!(
" └─ [TIP] {} location(s) have 0 imports - candidates for removal or consolidation",
zero_import_count
);
}
println!();
}
pub fn print_exact_twins_json(twins: &[ExactTwin]) {
let (same_lang, cross_lang): (Vec<_>, Vec<_>) = twins
.iter()
.partition(|twin| matches!(categorize_twin(twin), TwinCategory::SameLanguage(_)));
let high_similarity_count = twins
.iter()
.filter(|t| t.signature_similarity.map(|s| s >= 0.8).unwrap_or(false))
.count();
let twin_to_json = |twin: &ExactTwin| {
let category = categorize_twin(twin);
let mut json = serde_json::json!({
"name": twin.name,
"category": match category {
TwinCategory::SameLanguage(lang) => format!("same_language:{:?}", lang).to_lowercase(),
TwinCategory::CrossLanguage => "cross_language".to_string(),
},
"locations": twin.locations.iter().map(|loc| {
let mut loc_json = serde_json::json!({
"file": loc.file_path,
"line": loc.line,
"kind": loc.kind,
"imports": loc.import_count,
"canonical": loc.is_canonical,
"language": format!("{:?}", detect_language(&loc.file_path)).to_lowercase(),
});
if let Some(ref fp) = loc.signature_fingerprint {
loc_json["signature_fingerprint"] = serde_json::json!(fp);
}
loc_json
}).collect::<Vec<_>>(),
});
if let Some(sim) = twin.signature_similarity {
json["signature_similarity"] = serde_json::json!(sim);
}
json
};
let output = serde_json::json!({
"exact_twins": twins.iter().map(twin_to_json).collect::<Vec<_>>(),
"summary": {
"total_groups": twins.len(),
"same_language_groups": same_lang.len(),
"cross_language_groups": cross_lang.len(),
"high_similarity_groups": high_similarity_count,
"total_duplicates": twins.iter().map(|t| t.locations.len()).sum::<usize>(),
}
});
println!("{}", serde_json::to_string_pretty(&output).unwrap());
}
pub fn print_exact_twins(twins: &[ExactTwin], output: OutputMode) {
match output {
OutputMode::Json | OutputMode::Jsonl => print_exact_twins_json(twins),
OutputMode::Human => print_exact_twins_human(twins),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::{ExportSymbol, ImportEntry, ImportKind, ImportSymbol};
fn mock_file_with_exports(path: &str, exports: Vec<(&str, &str)>) -> FileAnalysis {
FileAnalysis {
path: path.to_string(),
exports: exports
.into_iter()
.enumerate()
.map(|(i, (name, kind))| ExportSymbol {
name: name.to_string(),
kind: kind.to_string(),
export_type: "named".to_string(),
line: Some(i + 1),
params: Vec::new(),
})
.collect(),
..Default::default()
}
}
#[test]
fn test_build_symbol_registry_empty() {
let analyses: Vec<FileAnalysis> = vec![];
let registry = build_symbol_registry(&analyses, false);
assert!(registry.is_empty());
}
#[test]
fn test_build_symbol_registry_no_imports() {
let analyses = vec![
mock_file_with_exports("a.ts", vec![("foo", "function")]),
mock_file_with_exports("b.ts", vec![("bar", "function")]),
];
let registry = build_symbol_registry(&analyses, false);
assert_eq!(registry.len(), 2);
let foo_entry = registry
.get(&("a.ts".to_string(), "foo".to_string()))
.unwrap();
assert_eq!(foo_entry.import_count, 0);
}
#[test]
fn test_build_symbol_registry_with_imports() {
let exporter = mock_file_with_exports("utils.ts", vec![("helper", "function")]);
let mut importer = FileAnalysis {
path: "app.ts".to_string(),
..Default::default()
};
let mut import = ImportEntry::new("./utils".to_string(), ImportKind::Static);
import.resolved_path = Some("utils.ts".to_string());
import.symbols.push(ImportSymbol {
name: "helper".to_string(),
alias: None,
is_default: false,
});
importer.imports.push(import);
let registry = build_symbol_registry(&[exporter, importer], false);
let helper_entry = registry
.get(&("utils.ts".to_string(), "helper".to_string()))
.unwrap();
assert_eq!(helper_entry.import_count, 1);
}
#[test]
fn test_build_symbol_registry_skips_tests() {
let test_file = FileAnalysis {
path: "tests/test_api_integration.py".to_string(),
is_test: true,
exports: vec![ExportSymbol {
name: "TestHealthEndpoints".to_string(),
kind: "class".to_string(),
export_type: "named".to_string(),
line: Some(10),
params: Vec::new(),
}],
..Default::default()
};
let normal_file = mock_file_with_exports("app.py", vec![("App", "class")]);
let registry = build_symbol_registry(&[test_file.clone(), normal_file.clone()], false);
assert_eq!(registry.len(), 1);
assert!(registry.contains_key(&("app.py".to_string(), "App".to_string())));
let registry_with_tests = build_symbol_registry(&[test_file, normal_file], true);
assert_eq!(registry_with_tests.len(), 2);
}
#[test]
fn test_find_dead_parrots() {
let used_file = mock_file_with_exports("used.ts", vec![("used", "function")]);
let dead_file = mock_file_with_exports("dead.ts", vec![("unused", "function")]);
let mut importer = FileAnalysis {
path: "app.ts".to_string(),
..Default::default()
};
let mut import = ImportEntry::new("./used".to_string(), ImportKind::Static);
import.resolved_path = Some("used.ts".to_string());
import.symbols.push(ImportSymbol {
name: "used".to_string(),
alias: None,
is_default: false,
});
importer.imports.push(import);
let result = find_dead_parrots(&[used_file, dead_file, importer], true, false);
assert_eq!(result.dead_parrots.len(), 1);
assert_eq!(result.dead_parrots[0].name, "unused");
assert_eq!(result.total_symbols, 2);
}
#[test]
fn test_find_dead_parrots_skips_dynamic_imports() {
let lazy_component = FileAnalysis {
path: "src/components/PasswordResetModal.tsx".to_string(),
exports: vec![ExportSymbol {
name: "PasswordResetModal".to_string(),
kind: "function".to_string(),
export_type: "default".to_string(),
line: Some(23),
params: Vec::new(),
}],
..Default::default()
};
let importer = FileAnalysis {
path: "src/App.tsx".to_string(),
dynamic_imports: vec!["./components/PasswordResetModal".to_string()],
..Default::default()
};
let dead_file = mock_file_with_exports("dead.ts", vec![("unused", "function")]);
let result = find_dead_parrots(&[lazy_component, importer, dead_file], true, false);
assert_eq!(result.dead_parrots.len(), 1);
assert_eq!(result.dead_parrots[0].name, "unused");
}
#[test]
fn test_detect_exact_twins_no_duplicates() {
let analyses = vec![
mock_file_with_exports("a.ts", vec![("foo", "function")]),
mock_file_with_exports("b.ts", vec![("bar", "function")]),
];
let twins = detect_exact_twins(&analyses, false);
assert!(twins.is_empty());
}
#[test]
fn test_detect_exact_twins_simple() {
let analyses = vec![
mock_file_with_exports("a.ts", vec![("Button", "class")]),
mock_file_with_exports("b.ts", vec![("Button", "class")]),
];
let twins = detect_exact_twins(&analyses, false);
assert_eq!(twins.len(), 1);
assert_eq!(twins[0].name, "Button");
assert_eq!(twins[0].locations.len(), 2);
}
#[test]
fn test_detect_exact_twins_canonical_by_path() {
let analyses = vec![
mock_file_with_exports("shared/types.ts", vec![("Message", "type")]),
mock_file_with_exports("hooks/useChat.ts", vec![("Message", "type")]),
];
let twins = detect_exact_twins(&analyses, false);
assert_eq!(twins.len(), 1);
let canonical = twins[0].locations.iter().find(|l| l.is_canonical).unwrap();
assert_eq!(canonical.file_path, "shared/types.ts");
}
#[test]
fn test_detect_exact_twins_canonical_by_imports() {
let a = mock_file_with_exports("a.ts", vec![("Foo", "type")]);
let b = mock_file_with_exports("b.ts", vec![("Foo", "type")]);
let mut importer = FileAnalysis {
path: "app.ts".to_string(),
..Default::default()
};
let mut import = ImportEntry::new("./a".to_string(), ImportKind::Static);
import.resolved_path = Some("a.ts".to_string());
import.symbols.push(ImportSymbol {
name: "Foo".to_string(),
alias: None,
is_default: false,
});
importer.imports.push(import);
let twins = detect_exact_twins(&[a, b, importer], false);
assert_eq!(twins.len(), 1);
let canonical = twins[0].locations.iter().find(|l| l.is_canonical).unwrap();
assert_eq!(canonical.file_path, "a.ts");
assert_eq!(canonical.import_count, 1);
}
#[test]
fn test_detect_exact_twins_three_locations() {
let analyses = vec![
mock_file_with_exports("a.ts", vec![("Common", "type")]),
mock_file_with_exports("b.ts", vec![("Common", "type")]),
mock_file_with_exports("c.ts", vec![("Common", "type")]),
];
let twins = detect_exact_twins(&analyses, false);
assert_eq!(twins.len(), 1);
assert_eq!(twins[0].locations.len(), 3);
}
}