use std::collections::HashMap;
use std::path::{Path, PathBuf};
use anyhow::Result;
use clap::Args;
use serde::Serialize;
use walkdir::WalkDir;
const MAX_FILES: usize = 10_000;
use tldr_core::analysis::dead::dead_code_analysis_refcount;
use tldr_core::analysis::refcount::count_identifiers_in_tree;
use tldr_core::ast::parser::parse_file;
use tldr_core::ast::{extract_file, extract_from_tree};
use tldr_core::types::{DeadCodeReport, ModuleInfo};
use tldr_core::{
build_project_call_graph, collect_all_functions, dead_code_analysis, FunctionRef, Language,
};
use crate::commands::daemon_router::{params_for_dead, try_daemon_route};
use crate::output::{OutputFormat, OutputWriter};
#[derive(Debug, Args)]
pub struct DeadArgs {
#[arg(default_value = ".")]
pub path: PathBuf,
#[arg(long, short = 'l')]
pub lang: Option<Language>,
#[arg(long, short = 'e', value_delimiter = ',')]
pub entry_points: Vec<String>,
#[arg(long, default_value = "100")]
pub max_items: usize,
#[arg(long)]
pub call_graph: bool,
}
impl DeadArgs {
pub fn run(&self, format: OutputFormat, quiet: bool) -> Result<()> {
let writer = OutputWriter::new(format, quiet);
let language = self
.lang
.unwrap_or_else(|| Language::from_directory(&self.path).unwrap_or(Language::Python));
let entry_points: Option<Vec<String>> = if self.entry_points.is_empty() {
None
} else {
Some(self.entry_points.clone())
};
if let Some(report) = try_daemon_route::<DeadCodeReport>(
&self.path,
"dead",
params_for_dead(Some(&self.path), entry_points.as_deref()),
) {
let (truncated_report, truncated, total_count, shown_count) =
apply_truncation(report, self.max_items);
if writer.is_text() {
let text = format_dead_code_text_truncated(
&truncated_report,
truncated,
total_count,
shown_count,
);
writer.write_text(&text)?;
return Ok(());
} else {
let output = DeadCodeOutput {
report: truncated_report,
truncated,
total_count,
shown_count,
};
writer.write(&output)?;
return Ok(());
}
}
let entry_points_for_analysis: Option<Vec<String>> = if self.entry_points.is_empty() {
None
} else {
Some(self.entry_points.clone())
};
let report = if self.call_graph {
writer.progress(&format!(
"Building call graph for {} ({:?})...",
self.path.display(),
language
));
let graph = build_project_call_graph(&self.path, language, None, true)?;
writer.progress("Extracting all functions...");
let module_infos = collect_module_infos(&self.path, language);
let all_functions: Vec<FunctionRef> = collect_all_functions(&module_infos);
writer.progress("Analyzing dead code (call graph)...");
dead_code_analysis(&graph, &all_functions, entry_points_for_analysis.as_deref())?
} else {
writer.progress(&format!(
"Scanning {} ({:?}) with reference counting...",
self.path.display(),
language
));
let (module_infos, merged_ref_counts) =
collect_module_infos_with_refcounts(&self.path, language);
let all_functions: Vec<FunctionRef> = collect_all_functions(&module_infos);
writer.progress("Analyzing dead code (refcount)...");
dead_code_analysis_refcount(
&all_functions,
&merged_ref_counts,
entry_points_for_analysis.as_deref(),
)?
};
let (truncated_report, truncated, total_count, shown_count) =
apply_truncation(report, self.max_items);
if writer.is_text() {
let text = format_dead_code_text_truncated(
&truncated_report,
truncated,
total_count,
shown_count,
);
writer.write_text(&text)?;
} else {
let output = DeadCodeOutput {
report: truncated_report,
truncated,
total_count,
shown_count,
};
writer.write(&output)?;
}
Ok(())
}
}
fn source_has_framework_directive(source: &str, ext: &str) -> bool {
if !matches!(ext, "ts" | "tsx" | "js" | "jsx" | "mjs") {
return false;
}
for line in source.lines().take(5) {
let trimmed = line.trim();
if trimmed == r#""use server""#
|| trimmed == r#"'use server'"#
|| trimmed == r#""use server";"#
|| trimmed == r#"'use server';"#
|| trimmed == r#""use client""#
|| trimmed == r#"'use client'"#
|| trimmed == r#""use client";"#
|| trimmed == r#"'use client';"#
{
return true;
}
if !trimmed.is_empty()
&& !trimmed.starts_with("//")
&& !trimmed.starts_with("/*")
&& !trimmed.starts_with('*')
&& !trimmed.starts_with('"')
&& !trimmed.starts_with('\'')
{
break;
}
}
false
}
fn tag_directive_functions(info: &mut ModuleInfo, source: &str, path: &Path) {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if source_has_framework_directive(source, ext) {
for func in &mut info.functions {
if !func.decorators.contains(&"use_server_directive".to_string()) {
func.decorators.push("use_server_directive".to_string());
}
}
for class in &mut info.classes {
for method in &mut class.methods {
if !method.decorators.contains(&"use_server_directive".to_string()) {
method.decorators.push("use_server_directive".to_string());
}
}
}
}
}
fn collect_module_infos(path: &Path, language: Language) -> Vec<(PathBuf, ModuleInfo)> {
let mut module_infos = Vec::new();
if path.is_file() {
if let Ok(mut info) = extract_file(path, path.parent()) {
if let Ok(source) = std::fs::read_to_string(path) {
tag_directive_functions(&mut info, &source, path);
}
let rel_path = path
.file_name()
.map(PathBuf::from)
.unwrap_or_else(|| path.to_path_buf());
module_infos.push((rel_path, info));
}
} else {
let extensions: &[&str] = language.extensions();
let mut file_count: usize = 0;
for entry in WalkDir::new(path)
.follow_links(false)
.into_iter()
.filter_map(|e| e.ok())
{
let file_path = entry.path();
if file_path.is_file() {
if let Some(ext_str) = file_path.extension().and_then(|e| e.to_str()) {
let dotted = format!(".{}", ext_str);
if extensions.contains(&dotted.as_str()) {
file_count += 1;
if file_count > MAX_FILES {
eprintln!(
"Warning: dead code scan truncated at {} files in {}",
MAX_FILES,
path.display()
);
break;
}
if let Ok(mut info) = extract_file(file_path, Some(path)) {
if let Ok(source) = std::fs::read_to_string(file_path) {
tag_directive_functions(&mut info, &source, file_path);
}
let rel_path = file_path
.strip_prefix(path)
.unwrap_or(file_path)
.to_path_buf();
module_infos.push((rel_path, info));
}
}
}
}
}
}
module_infos
}
pub(crate) fn collect_module_infos_with_refcounts(
path: &Path,
language: Language,
) -> (Vec<(PathBuf, ModuleInfo)>, HashMap<String, usize>) {
let mut module_infos = Vec::new();
let mut merged_counts: HashMap<String, usize> = HashMap::new();
if path.is_file() {
if let Ok((tree, source, lang)) = parse_file(path) {
if let Ok(mut info) = extract_from_tree(&tree, &source, lang, path, path.parent()) {
tag_directive_functions(&mut info, &source, path);
let rel_path = path
.file_name()
.map(PathBuf::from)
.unwrap_or_else(|| path.to_path_buf());
module_infos.push((rel_path, info));
}
let file_counts = count_identifiers_in_tree(&tree, source.as_bytes(), lang);
for (name, count) in file_counts {
*merged_counts.entry(name).or_insert(0) += count;
}
}
} else {
let extensions: &[&str] = language.extensions();
let mut file_count: usize = 0;
for entry in WalkDir::new(path)
.follow_links(false)
.into_iter()
.filter_map(|e| e.ok())
{
let file_path = entry.path();
if file_path.is_file() {
if let Some(ext_str) = file_path.extension().and_then(|e| e.to_str()) {
let dotted = format!(".{}", ext_str);
if extensions.contains(&dotted.as_str()) {
file_count += 1;
if file_count > MAX_FILES {
eprintln!(
"Warning: born-dead scan truncated at {} files in {}",
MAX_FILES,
path.display()
);
break;
}
if let Ok((tree, source, lang)) = parse_file(file_path) {
if let Ok(mut info) =
extract_from_tree(&tree, &source, lang, file_path, Some(path))
{
tag_directive_functions(&mut info, &source, file_path);
let rel_path = file_path
.strip_prefix(path)
.unwrap_or(file_path)
.to_path_buf();
module_infos.push((rel_path, info));
}
let file_counts =
count_identifiers_in_tree(&tree, source.as_bytes(), lang);
for (name, count) in file_counts {
*merged_counts.entry(name).or_insert(0) += count;
}
}
}
}
}
}
}
(module_infos, merged_counts)
}
#[derive(Serialize)]
struct DeadCodeOutput {
#[serde(flatten)]
report: DeadCodeReport,
#[serde(skip_serializing_if = "is_false", default)]
truncated: bool,
total_count: usize,
shown_count: usize,
}
fn is_false(b: &bool) -> bool {
!b
}
fn apply_truncation(
mut report: DeadCodeReport,
max_items: usize,
) -> (DeadCodeReport, bool, usize, usize) {
let total_count = report.dead_functions.len();
if total_count > max_items {
report.dead_functions.truncate(max_items);
let mut count = 0;
let mut new_by_file = std::collections::HashMap::new();
for (path, funcs) in report.by_file {
let remaining = max_items - count;
if remaining == 0 {
break;
}
let to_take = funcs.len().min(remaining);
let truncated_funcs: Vec<String> = funcs.into_iter().take(to_take).collect();
count += truncated_funcs.len();
new_by_file.insert(path, truncated_funcs);
}
report.by_file = new_by_file;
(report, true, total_count, max_items)
} else {
(report, false, total_count, total_count)
}
}
fn format_dead_code_text_truncated(
report: &DeadCodeReport,
truncated: bool,
total_count: usize,
shown_count: usize,
) -> String {
use colored::Colorize;
let mut output = String::new();
output.push_str(&format!(
"Dead Code Analysis\n\nDefinitely dead: {} / {} functions ({:.1}% dead)\n",
report.total_dead.to_string().red(),
report.total_functions,
report.dead_percentage
));
if report.total_possibly_dead > 0 {
output.push_str(&format!(
"Possibly dead (public but uncalled): {}\n",
report.total_possibly_dead.to_string().yellow()
));
}
output.push('\n');
if !report.by_file.is_empty() {
output.push_str("Definitely dead:\n");
for (file, funcs) in &report.by_file {
output.push_str(&format!("{}\n", file.display().to_string().green()));
for func in funcs {
output.push_str(&format!(" - {}\n", func.red()));
}
output.push('\n');
}
}
if truncated {
output.push_str(&format!(
"\n[{}: showing {} of {} dead functions]\n",
"TRUNCATED".yellow(),
shown_count,
total_count
));
}
output
}