use crate::graph::CallGraph;
use crate::graph::InternalCallChain;
use crate::pagination::PaginationMode;
use crate::test_detection::is_test_file;
use crate::traversal::WalkEntry;
use crate::types::{
AnalyzeFileField, ClassInfo, FileInfo, FunctionInfo, ImportInfo, ModuleInfo, SemanticAnalysis,
};
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
use std::path::{Path, PathBuf};
use thiserror::Error;
use tracing::instrument;
const MULTILINE_THRESHOLD: usize = 10;
fn is_method_of_class(func: &FunctionInfo, class: &ClassInfo) -> bool {
func.line >= class.line && func.end_line <= class.end_line
}
fn collect_class_methods<'a>(
classes: &'a [ClassInfo],
functions: &'a [FunctionInfo],
) -> HashMap<String, Vec<&'a FunctionInfo>> {
let mut methods_by_class: HashMap<String, Vec<&'a FunctionInfo>> = HashMap::new();
for class in classes {
if class.methods.is_empty() {
let methods: Vec<&FunctionInfo> = functions
.iter()
.filter(|f| is_method_of_class(f, class))
.collect();
methods_by_class.insert(class.name.clone(), methods);
} else {
methods_by_class.insert(class.name.clone(), class.methods.iter().collect());
}
}
methods_by_class
}
fn format_function_list_wrapped<'a>(
functions: impl Iterator<Item = &'a crate::types::FunctionInfo>,
call_frequency: &std::collections::HashMap<String, usize>,
) -> String {
let mut output = String::new();
let mut line = String::from(" ");
for (i, func) in functions.enumerate() {
let mut call_marker = func.compact_signature();
if let Some(&count) = call_frequency.get(&func.name)
&& count > 3
{
let _ = write!(call_marker, "\u{2022}{count}");
}
if i == 0 {
line.push_str(&call_marker);
} else if line.len() + call_marker.len() + 2 > 100 {
output.push_str(&line);
output.push('\n');
let mut new_line = String::with_capacity(2 + call_marker.len());
new_line.push_str(" ");
new_line.push_str(&call_marker);
line = new_line;
} else {
line.push_str(", ");
line.push_str(&call_marker);
}
}
if !line.trim().is_empty() {
output.push_str(&line);
output.push('\n');
}
output
}
fn format_file_info_parts(line_count: usize, fn_count: usize, cls_count: usize) -> Option<String> {
let mut parts = Vec::new();
if line_count > 0 {
parts.push(format!("{line_count}L"));
}
if fn_count > 0 {
parts.push(format!("{fn_count}F"));
}
if cls_count > 0 {
parts.push(format!("{cls_count}C"));
}
if parts.is_empty() {
None
} else {
Some(format!("[{}]", parts.join(", ")))
}
}
fn strip_base_path(path: &Path, base_path: Option<&Path>) -> String {
match base_path {
Some(base) => {
if let Ok(rel_path) = path.strip_prefix(base) {
rel_path.display().to_string()
} else {
path.display().to_string()
}
}
None => path.display().to_string(),
}
}
#[derive(Debug, Error)]
pub enum FormatterError {
#[error("Graph error: {0}")]
GraphError(#[from] crate::graph::GraphError),
}
#[instrument(skip_all)]
#[allow(clippy::too_many_lines)] pub fn format_structure(
entries: &[WalkEntry],
analysis_results: &[FileInfo],
max_depth: Option<u32>,
) -> String {
let mut output = String::new();
let analysis_map: HashMap<String, &FileInfo> = analysis_results
.iter()
.map(|a| (a.path.clone(), a))
.collect();
let (prod_files, test_files): (Vec<_>, Vec<_>) =
analysis_results.iter().partition(|a| !a.is_test);
let total_loc: usize = analysis_results.iter().map(|a| a.line_count).sum();
let total_functions: usize = analysis_results.iter().map(|a| a.function_count).sum();
let total_classes: usize = analysis_results.iter().map(|a| a.class_count).sum();
let mut lang_counts: HashMap<String, usize> = HashMap::new();
for analysis in analysis_results {
*lang_counts.entry(analysis.language.clone()).or_insert(0) += 1;
}
let total_files = analysis_results.len();
let primary_lang = lang_counts
.iter()
.max_by_key(|&(_, count)| count)
.map_or_else(
|| "unknown 0%".to_string(),
|(name, count)| {
let percentage = if total_files > 0 {
(*count * 100) / total_files
} else {
0
};
format!("{name} {percentage}%")
},
);
let _ = writeln!(
output,
"{total_files} files, {total_loc}L, {total_functions}F, {total_classes}C ({primary_lang})"
);
output.push_str("SUMMARY:\n");
let depth_label = match max_depth {
Some(n) if n > 0 => format!(" (max_depth={n})"),
_ => String::new(),
};
let _ = writeln!(
output,
"Shown: {} files ({} prod, {} test), {total_loc}L, {total_functions}F, {total_classes}C{depth_label}",
total_files,
prod_files.len(),
test_files.len()
);
if !lang_counts.is_empty() {
output.push_str("Languages: ");
let mut langs: Vec<_> = lang_counts.iter().collect();
langs.sort_by_key(|&(name, _)| name);
let lang_strs: Vec<String> = langs
.iter()
.map(|(name, count)| {
let percentage = if total_files > 0 {
(**count * 100) / total_files
} else {
0
};
format!("{name} ({percentage}%)")
})
.collect();
output.push_str(&lang_strs.join(", "));
output.push('\n');
}
output.push('\n');
output.push_str("PATH [LOC, FUNCTIONS, CLASSES]\n");
let mut test_buf = String::new();
for entry in entries {
if entry.depth == 0 {
continue;
}
let indent = " ".repeat(entry.depth - 1);
let name = entry
.path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("?");
if entry.is_dir {
let line = format!("{indent}{name}/\n");
output.push_str(&line);
} else if let Some(analysis) = analysis_map.get(&entry.path.display().to_string()) {
if let Some(info_str) = format_file_info_parts(
analysis.line_count,
analysis.function_count,
analysis.class_count,
) {
let line = format!("{indent}{name} {info_str}\n");
if analysis.is_test {
test_buf.push_str(&line);
} else {
output.push_str(&line);
}
} else {
let line = format!("{indent}{name}\n");
if analysis.is_test {
test_buf.push_str(&line);
} else {
output.push_str(&line);
}
}
}
}
if !test_buf.is_empty() {
output.push_str("\nTEST FILES [LOC, FUNCTIONS, CLASSES]\n");
output.push_str(&test_buf);
}
output
}
#[instrument(skip_all)]
pub fn format_file_details(
path: &str,
analysis: &SemanticAnalysis,
line_count: usize,
is_test: bool,
base_path: Option<&Path>,
) -> String {
let mut output = String::new();
let display_path = strip_base_path(Path::new(path), base_path);
let fn_count = analysis.functions.len();
let class_count = analysis.classes.len();
let import_count = analysis.imports.len();
if is_test {
let _ = writeln!(
output,
"FILE [TEST] {display_path}({line_count}L, {fn_count}F, {class_count}C, {import_count}I)"
);
} else {
let _ = writeln!(
output,
"FILE: {display_path}({line_count}L, {fn_count}F, {class_count}C, {import_count}I)"
);
}
output.push_str(&format_classes_section(
&analysis.classes,
&analysis.functions,
));
let top_level_functions: Vec<&FunctionInfo> = analysis
.functions
.iter()
.filter(|func| {
!analysis
.classes
.iter()
.any(|class| is_method_of_class(func, class))
})
.collect();
if !top_level_functions.is_empty() {
output.push_str("F:\n");
output.push_str(&format_function_list_wrapped(
top_level_functions.iter().copied(),
&analysis.call_frequency,
));
}
output.push_str(&format_imports_section(&analysis.imports));
output
}
fn format_chains_as_tree(chains: &[(&str, &str)], arrow: &str, focus_symbol: &str) -> String {
use std::collections::BTreeMap;
if chains.is_empty() {
return " (none)\n".to_string();
}
let mut output = String::new();
let mut groups: BTreeMap<String, BTreeMap<String, usize>> = BTreeMap::new();
for (parent, child) in chains {
if child.is_empty() {
groups.entry(parent.to_string()).or_default();
} else {
*groups
.entry(parent.to_string())
.or_default()
.entry(child.to_string())
.or_insert(0) += 1;
}
}
#[allow(clippy::similar_names)]
for (parent, children) in groups {
let _ = writeln!(output, " {focus_symbol} {arrow} {parent}");
let mut sorted: Vec<_> = children.into_iter().collect();
sorted.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
for (child, count) in sorted {
if count > 1 {
let _ = writeln!(output, " {arrow} {child} (x{count})");
} else {
let _ = writeln!(output, " {arrow} {child}");
}
}
}
output
}
#[instrument(skip_all)]
#[allow(clippy::too_many_lines)] #[allow(clippy::similar_names)] pub(crate) fn format_focused_internal(
graph: &CallGraph,
symbol: &str,
follow_depth: u32,
base_path: Option<&Path>,
incoming_chains: Option<&[InternalCallChain]>,
outgoing_chains: Option<&[InternalCallChain]>,
) -> Result<String, FormatterError> {
let mut output = String::new();
let def_count = graph.definitions.get(symbol).map_or(0, Vec::len);
let (incoming_chains_vec, outgoing_chains_vec);
let (incoming_chains_ref, outgoing_chains_ref) =
if let (Some(inc), Some(out)) = (incoming_chains, outgoing_chains) {
(inc, out)
} else {
incoming_chains_vec = graph.find_incoming_chains(symbol, follow_depth)?;
outgoing_chains_vec = graph.find_outgoing_chains(symbol, follow_depth)?;
(
incoming_chains_vec.as_slice(),
outgoing_chains_vec.as_slice(),
)
};
let (prod_chains, test_chains): (Vec<_>, Vec<_>) =
incoming_chains_ref.iter().cloned().partition(|chain| {
chain
.chain
.first()
.is_none_or(|(name, path, _)| !is_test_file(path) && !name.starts_with("test_"))
});
let callers_count = prod_chains
.iter()
.filter_map(|chain| chain.chain.first().map(|(p, _, _)| p))
.collect::<std::collections::HashSet<_>>()
.len();
let callees_count = outgoing_chains_ref
.iter()
.filter_map(|chain| chain.chain.first().map(|(p, _, _)| p))
.collect::<std::collections::HashSet<_>>()
.len();
let _ = writeln!(
output,
"FOCUS: {symbol} ({def_count} defs, {callers_count} callers, {callees_count} callees)"
);
let _ = writeln!(output, "DEPTH: {follow_depth}");
if let Some(definitions) = graph.definitions.get(symbol) {
output.push_str("DEFINED:\n");
for (path, line) in definitions {
let display = strip_base_path(path, base_path);
let _ = writeln!(output, " {display}:{line}");
}
} else {
output.push_str("DEFINED: (not found)\n");
}
output.push_str("CALLERS:\n");
let prod_refs: Vec<_> = prod_chains
.iter()
.filter_map(|chain| {
if chain.chain.len() >= 2 {
Some((chain.chain[0].0.as_str(), chain.chain[1].0.as_str()))
} else if chain.chain.len() == 1 {
Some((chain.chain[0].0.as_str(), ""))
} else {
None
}
})
.collect();
if prod_refs.is_empty() {
output.push_str(" (none)\n");
} else {
output.push_str(&format_chains_as_tree(&prod_refs, "<-", symbol));
}
if !test_chains.is_empty() {
let mut test_files: Vec<_> = test_chains
.iter()
.filter_map(|chain| {
chain
.chain
.first()
.map(|(_, path, _)| path.to_string_lossy().into_owned())
})
.collect();
test_files.sort();
test_files.dedup();
let display_files: Vec<_> = test_files
.iter()
.map(|f| strip_base_path(Path::new(f), base_path))
.collect();
let file_list = display_files.join(", ");
let test_count = test_chains.len();
let _ = writeln!(
output,
"CALLERS (test): {test_count} test functions (in {file_list})"
);
}
output.push_str("CALLEES:\n");
let outgoing_refs: Vec<_> = outgoing_chains_ref
.iter()
.filter_map(|chain| {
if chain.chain.len() >= 2 {
Some((chain.chain[0].0.as_str(), chain.chain[1].0.as_str()))
} else if chain.chain.len() == 1 {
Some((chain.chain[0].0.as_str(), ""))
} else {
None
}
})
.collect();
if outgoing_refs.is_empty() {
output.push_str(" (none)\n");
} else {
output.push_str(&format_chains_as_tree(&outgoing_refs, "->", symbol));
}
output.push_str("STATISTICS:\n");
let _ = writeln!(output, " Incoming calls: {callers_count}");
let _ = writeln!(output, " Outgoing calls: {callees_count}");
let mut files = HashSet::new();
for chain in &prod_chains {
for (_, path, _) in &chain.chain {
files.insert(path.clone());
}
}
for chain in outgoing_chains_ref {
for (_, path, _) in &chain.chain {
files.insert(path.clone());
}
}
if let Some(definitions) = graph.definitions.get(symbol) {
for (path, _) in definitions {
files.insert(path.clone());
}
}
let (prod_files, test_files): (Vec<_>, Vec<_>) =
files.into_iter().partition(|path| !is_test_file(path));
output.push_str("FILES:\n");
if prod_files.is_empty() && test_files.is_empty() {
output.push_str(" (none)\n");
} else {
if !prod_files.is_empty() {
let mut sorted_files = prod_files;
sorted_files.sort();
for file in sorted_files {
let display = strip_base_path(&file, base_path);
let _ = writeln!(output, " {display}");
}
}
if !test_files.is_empty() {
output.push_str(" TEST FILES:\n");
let mut sorted_files = test_files;
sorted_files.sort();
for file in sorted_files {
let display = strip_base_path(&file, base_path);
let _ = writeln!(output, " {display}");
}
}
}
Ok(output)
}
#[instrument(skip_all)]
#[allow(clippy::too_many_lines)] #[allow(clippy::similar_names)] pub(crate) fn format_focused_summary_internal(
graph: &CallGraph,
symbol: &str,
follow_depth: u32,
base_path: Option<&Path>,
incoming_chains: Option<&[InternalCallChain]>,
outgoing_chains: Option<&[InternalCallChain]>,
) -> Result<String, FormatterError> {
let mut output = String::new();
let def_count = graph.definitions.get(symbol).map_or(0, Vec::len);
let (incoming_chains_vec, outgoing_chains_vec);
let (incoming_chains_ref, outgoing_chains_ref) =
if let (Some(inc), Some(out)) = (incoming_chains, outgoing_chains) {
(inc, out)
} else {
incoming_chains_vec = graph.find_incoming_chains(symbol, follow_depth)?;
outgoing_chains_vec = graph.find_outgoing_chains(symbol, follow_depth)?;
(
incoming_chains_vec.as_slice(),
outgoing_chains_vec.as_slice(),
)
};
let (prod_chains, test_chains): (Vec<_>, Vec<_>) =
incoming_chains_ref.iter().cloned().partition(|chain| {
chain
.chain
.first()
.is_none_or(|(name, path, _)| !is_test_file(path) && !name.starts_with("test_"))
});
let callers_count = prod_chains
.iter()
.filter_map(|chain| chain.chain.first().map(|(p, _, _)| p))
.collect::<std::collections::HashSet<_>>()
.len();
let callees_count = outgoing_chains_ref
.iter()
.filter_map(|chain| chain.chain.first().map(|(p, _, _)| p))
.collect::<std::collections::HashSet<_>>()
.len();
let _ = writeln!(
output,
"FOCUS: {symbol} ({def_count} defs, {callers_count} callers, {callees_count} callees)"
);
let _ = writeln!(output, "DEPTH: {follow_depth}");
if let Some(definitions) = graph.definitions.get(symbol) {
output.push_str("DEFINED:\n");
for (path, line) in definitions {
let display = strip_base_path(path, base_path);
let _ = writeln!(output, " {display}:{line}");
}
} else {
output.push_str("DEFINED: (not found)\n");
}
output.push_str("CALLERS (top 10):\n");
if prod_chains.is_empty() {
output.push_str(" (none)\n");
} else {
let mut caller_freq: std::collections::HashMap<String, (usize, String)> =
std::collections::HashMap::new();
for chain in &prod_chains {
if let Some((name, path, _)) = chain.chain.first() {
let file_path = strip_base_path(path, base_path);
caller_freq
.entry(name.clone())
.and_modify(|(count, _)| *count += 1)
.or_insert((1, file_path));
}
}
let mut sorted_callers: Vec<_> = caller_freq.into_iter().collect();
sorted_callers.sort_by(|a, b| b.1.0.cmp(&a.1.0));
for (name, (_, file_path)) in sorted_callers.into_iter().take(10) {
let _ = writeln!(output, " {name} {file_path}");
}
}
if !test_chains.is_empty() {
let mut test_files: Vec<_> = test_chains
.iter()
.filter_map(|chain| {
chain
.chain
.first()
.map(|(_, path, _)| path.to_string_lossy().into_owned())
})
.collect();
test_files.sort();
test_files.dedup();
let test_count = test_chains.len();
let test_file_count = test_files.len();
let _ = writeln!(
output,
"CALLERS (test): {test_count} test functions (in {test_file_count} files)"
);
}
output.push_str("CALLEES (top 10):\n");
if outgoing_chains_ref.is_empty() {
output.push_str(" (none)\n");
} else {
let mut callee_freq: std::collections::HashMap<String, usize> =
std::collections::HashMap::new();
for chain in outgoing_chains_ref {
if let Some((name, _, _)) = chain.chain.first() {
*callee_freq.entry(name.clone()).or_insert(0) += 1;
}
}
let mut sorted_callees: Vec<_> = callee_freq.into_iter().collect();
sorted_callees.sort_by(|a, b| b.1.cmp(&a.1));
for (name, _) in sorted_callees.into_iter().take(10) {
let _ = writeln!(output, " {name}");
}
}
output.push_str("SUGGESTION:\n");
output.push_str("Use summary=false with force=true for full output\n");
Ok(output)
}
pub fn format_focused_summary(
graph: &CallGraph,
symbol: &str,
follow_depth: u32,
base_path: Option<&Path>,
) -> Result<String, FormatterError> {
format_focused_summary_internal(graph, symbol, follow_depth, base_path, None, None)
}
#[instrument(skip_all)]
#[allow(clippy::too_many_lines)] pub fn format_summary(
entries: &[WalkEntry],
analysis_results: &[FileInfo],
max_depth: Option<u32>,
subtree_counts: Option<&[(PathBuf, usize)]>,
) -> String {
let mut output = String::new();
let (prod_files, test_files): (Vec<_>, Vec<_>) =
analysis_results.iter().partition(|a| !a.is_test);
let total_loc: usize = analysis_results.iter().map(|a| a.line_count).sum();
let total_functions: usize = analysis_results.iter().map(|a| a.function_count).sum();
let total_classes: usize = analysis_results.iter().map(|a| a.class_count).sum();
let mut lang_counts: HashMap<String, usize> = HashMap::new();
for analysis in analysis_results {
*lang_counts.entry(analysis.language.clone()).or_insert(0) += 1;
}
let total_files = analysis_results.len();
output.push_str("SUMMARY:\n");
let depth_label = match max_depth {
Some(n) if n > 0 => format!(" (max_depth={n})"),
_ => String::new(),
};
let prod_count = prod_files.len();
let test_count = test_files.len();
let _ = writeln!(
output,
"{total_files} files ({prod_count} prod, {test_count} test), {total_loc}L, {total_functions}F, {total_classes}C{depth_label}"
);
if !lang_counts.is_empty() {
output.push_str("Languages: ");
let mut langs: Vec<_> = lang_counts.iter().collect();
langs.sort_by_key(|&(name, _)| name);
let lang_strs: Vec<String> = langs
.iter()
.map(|(name, count)| {
let percentage = if total_files > 0 {
(**count * 100) / total_files
} else {
0
};
format!("{name} ({percentage}%)")
})
.collect();
output.push_str(&lang_strs.join(", "));
output.push('\n');
}
output.push('\n');
output.push_str("STRUCTURE (depth 1):\n");
let analysis_map: HashMap<String, &FileInfo> = analysis_results
.iter()
.map(|a| (a.path.clone(), a))
.collect();
let mut depth1_entries: Vec<&WalkEntry> = entries.iter().filter(|e| e.depth == 1).collect();
depth1_entries.sort_by(|a, b| a.path.cmp(&b.path));
let mut largest_dir_name: Option<String> = None;
let mut largest_dir_path: Option<String> = None;
let mut largest_dir_count: usize = 0;
for entry in depth1_entries {
let name = entry
.path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("?");
if entry.is_dir {
let dir_path_str = entry.path.display().to_string();
let files_in_dir: Vec<&FileInfo> = analysis_results
.iter()
.filter(|f| Path::new(&f.path).starts_with(&entry.path))
.collect();
if files_in_dir.is_empty() {
let entry_name_str = name.to_string();
if let Some(counts) = subtree_counts {
let true_count = counts
.binary_search_by_key(&&entry.path, |(p, _)| p)
.ok()
.map_or(0, |i| counts[i].1);
if true_count > 0 {
if !crate::EXCLUDED_DIRS.contains(&entry_name_str.as_str())
&& true_count > largest_dir_count
{
largest_dir_count = true_count;
largest_dir_name = Some(entry_name_str);
largest_dir_path = Some(
entry
.path
.canonicalize()
.unwrap_or_else(|_| entry.path.clone())
.display()
.to_string(),
);
}
let depth_val = max_depth.unwrap_or(0);
let _ = writeln!(
output,
" {name}/ [{true_count} files total; showing 0 at depth={depth_val}, 0L, 0F, 0C]"
);
} else {
let _ = writeln!(output, " {name}/");
}
} else {
let _ = writeln!(output, " {name}/");
}
} else {
let dir_file_count = files_in_dir.len();
let dir_loc: usize = files_in_dir.iter().map(|f| f.line_count).sum();
let dir_functions: usize = files_in_dir.iter().map(|f| f.function_count).sum();
let dir_classes: usize = files_in_dir.iter().map(|f| f.class_count).sum();
let entry_name_str = name.to_string();
let effective_count = if let Some(counts) = subtree_counts {
counts
.binary_search_by_key(&&entry.path, |(p, _)| p)
.ok()
.map_or(dir_file_count, |i| counts[i].1)
} else {
dir_file_count
};
if !crate::EXCLUDED_DIRS.contains(&entry_name_str.as_str())
&& effective_count > largest_dir_count
{
largest_dir_count = effective_count;
largest_dir_name = Some(entry_name_str);
largest_dir_path = Some(
entry
.path
.canonicalize()
.unwrap_or_else(|_| entry.path.clone())
.display()
.to_string(),
);
}
let hint = if files_in_dir.len() > 1 && (dir_classes > 0 || dir_functions > 0) {
let mut top_files = files_in_dir.clone();
top_files.sort_unstable_by(|a, b| {
b.class_count
.cmp(&a.class_count)
.then(b.function_count.cmp(&a.function_count))
.then(a.path.cmp(&b.path))
});
let has_classes = top_files.iter().any(|f| f.class_count > 0);
if !has_classes {
top_files.sort_unstable_by(|a, b| {
b.function_count
.cmp(&a.function_count)
.then(a.path.cmp(&b.path))
});
}
let dir_path = Path::new(&dir_path_str);
let top_n: Vec<String> = top_files
.iter()
.take(3)
.filter(|f| {
if has_classes {
f.class_count > 0
} else {
f.function_count > 0
}
})
.map(|f| {
let rel = Path::new(&f.path).strip_prefix(dir_path).map_or_else(
|_| {
Path::new(&f.path)
.file_name()
.and_then(|n| n.to_str())
.map_or_else(
|| "?".to_owned(),
std::borrow::ToOwned::to_owned,
)
},
|p| p.to_string_lossy().into_owned(),
);
let count = if has_classes {
f.class_count
} else {
f.function_count
};
let suffix = if has_classes { 'C' } else { 'F' };
format!("{rel}({count}{suffix})")
})
.collect();
if top_n.is_empty() {
String::new()
} else {
let joined = top_n.join(", ");
format!(" top: {joined}")
}
} else {
String::new()
};
let mut subdirs: Vec<String> = entries
.iter()
.filter(|e| e.depth == 2 && e.is_dir && e.path.starts_with(&entry.path))
.filter_map(|e| {
e.path
.file_name()
.and_then(|n| n.to_str())
.map(std::borrow::ToOwned::to_owned)
})
.collect();
subdirs.sort();
subdirs.dedup();
let subdir_suffix = if subdirs.is_empty() {
String::new()
} else {
let subdirs_capped: Vec<String> =
subdirs.iter().take(5).map(|s| format!("{s}/")).collect();
let joined = subdirs_capped.join(", ");
format!(" sub: {joined}")
};
let files_label = if let Some(counts) = subtree_counts {
let true_count = counts
.binary_search_by_key(&&entry.path, |(p, _)| p)
.ok()
.map_or(dir_file_count, |i| counts[i].1);
if true_count == dir_file_count {
format!(
"{dir_file_count} files, {dir_loc}L, {dir_functions}F, {dir_classes}C"
)
} else {
let depth_val = max_depth.unwrap_or(0);
format!(
"{true_count} files total; showing {dir_file_count} at depth={depth_val}, {dir_loc}L, {dir_functions}F, {dir_classes}C"
)
}
} else {
format!("{dir_file_count} files, {dir_loc}L, {dir_functions}F, {dir_classes}C")
};
let _ = writeln!(output, " {name}/ [{files_label}]{hint}{subdir_suffix}");
}
} else {
if let Some(analysis) = analysis_map.get(&entry.path.display().to_string()) {
if let Some(info_str) = format_file_info_parts(
analysis.line_count,
analysis.function_count,
analysis.class_count,
) {
let _ = writeln!(output, " {name} {info_str}");
} else {
let _ = writeln!(output, " {name}");
}
}
}
}
output.push('\n');
if let (Some(name), Some(path)) = (largest_dir_name, largest_dir_path) {
let _ = writeln!(
output,
"SUGGESTION: Largest source directory: {name}/ ({largest_dir_count} files total). For module details, re-run with path={path} and max_depth=2."
);
} else {
output.push_str("SUGGESTION:\n");
output.push_str("Use a narrower path for details (e.g., analyze src/core/)\n");
}
output
}
#[instrument(skip_all)]
pub fn format_file_details_summary(
semantic: &SemanticAnalysis,
path: &str,
line_count: usize,
) -> String {
let mut output = String::new();
output.push_str("FILE:\n");
let _ = writeln!(output, " path: {path}");
let fn_count = semantic.functions.len();
let class_count = semantic.classes.len();
let _ = writeln!(output, " {line_count}L, {fn_count}F, {class_count}C");
output.push('\n');
if !semantic.functions.is_empty() {
output.push_str("TOP FUNCTIONS BY SIZE:\n");
let mut funcs: Vec<&crate::types::FunctionInfo> = semantic.functions.iter().collect();
let k = funcs.len().min(10);
if k > 0 {
funcs.select_nth_unstable_by(k.saturating_sub(1), |a, b| {
let a_span = a.end_line.saturating_sub(a.line);
let b_span = b.end_line.saturating_sub(b.line);
b_span.cmp(&a_span)
});
funcs[..k].sort_by(|a, b| {
let a_span = a.end_line.saturating_sub(a.line);
let b_span = b.end_line.saturating_sub(b.line);
b_span.cmp(&a_span)
});
}
for func in &funcs[..k] {
let span = func.end_line.saturating_sub(func.line);
let params = if func.parameters.is_empty() {
String::new()
} else {
format!("({})", func.parameters.join(", "))
};
let _ = writeln!(
output,
" {}:{}: {} {} [{}L]",
func.line, func.end_line, func.name, params, span
);
}
output.push('\n');
}
if !semantic.classes.is_empty() {
output.push_str("CLASSES:\n");
if semantic.classes.len() <= 10 {
for class in &semantic.classes {
let methods_count = class.methods.len();
let _ = writeln!(output, " {}: {}M", class.name, methods_count);
}
} else {
let _ = writeln!(output, " {} classes total", semantic.classes.len());
for class in semantic.classes.iter().take(5) {
let _ = writeln!(output, " {}", class.name);
}
if semantic.classes.len() > 5 {
let _ = writeln!(output, " ... and {} more", semantic.classes.len() - 5);
}
}
output.push('\n');
}
let _ = writeln!(output, "Imports: {}", semantic.imports.len());
output.push('\n');
output.push_str("SUGGESTION:\n");
output.push_str("Use force=true for full output, or narrow your scope\n");
output
}
#[instrument(skip_all)]
pub fn format_structure_paginated(
paginated_files: &[FileInfo],
total_files: usize,
max_depth: Option<u32>,
base_path: Option<&Path>,
verbose: bool,
) -> String {
let mut output = String::new();
let depth_label = match max_depth {
Some(n) if n > 0 => format!(" (max_depth={n})"),
_ => String::new(),
};
let _ = writeln!(
output,
"PAGINATED: showing {} of {} files{}\n",
paginated_files.len(),
total_files,
depth_label
);
let prod_files: Vec<&FileInfo> = paginated_files.iter().filter(|f| !f.is_test).collect();
let test_files: Vec<&FileInfo> = paginated_files.iter().filter(|f| f.is_test).collect();
if !prod_files.is_empty() {
if verbose {
output.push_str("FILES [LOC, FUNCTIONS, CLASSES]\n");
}
for file in &prod_files {
output.push_str(&format_file_entry(file, base_path));
}
}
if !test_files.is_empty() {
if verbose {
output.push_str("\nTEST FILES [LOC, FUNCTIONS, CLASSES]\n");
} else if !prod_files.is_empty() {
output.push('\n');
}
for file in &test_files {
output.push_str(&format_file_entry(file, base_path));
}
}
output
}
#[instrument(skip_all)]
#[allow(clippy::too_many_arguments)]
pub fn format_file_details_paginated(
functions_page: &[FunctionInfo],
total_functions: usize,
semantic: &SemanticAnalysis,
path: &str,
line_count: usize,
offset: usize,
verbose: bool,
fields: Option<&[AnalyzeFileField]>,
) -> String {
let mut output = String::new();
let start = offset + 1; let end = offset + functions_page.len();
let _ = writeln!(
output,
"FILE: {} ({}L, {}-{}/{}F, {}C, {}I)",
path,
line_count,
start,
end,
total_functions,
semantic.classes.len(),
semantic.imports.len()
);
let show_all = fields.is_none_or(<[AnalyzeFileField]>::is_empty);
let show_classes = show_all || fields.is_some_and(|f| f.contains(&AnalyzeFileField::Classes));
let show_imports = show_all || fields.is_some_and(|f| f.contains(&AnalyzeFileField::Imports));
let show_functions =
show_all || fields.is_some_and(|f| f.contains(&AnalyzeFileField::Functions));
if show_classes && offset == 0 && !semantic.classes.is_empty() {
output.push_str(&format_classes_section(
&semantic.classes,
&semantic.functions,
));
}
if show_imports && offset == 0 && (verbose || !show_all) {
output.push_str(&format_imports_section(&semantic.imports));
}
let top_level_functions: Vec<&FunctionInfo> = functions_page
.iter()
.filter(|func| {
!semantic
.classes
.iter()
.any(|class| is_method_of_class(func, class))
})
.collect();
if show_functions && !top_level_functions.is_empty() {
output.push_str("F:\n");
output.push_str(&format_function_list_wrapped(
top_level_functions.iter().copied(),
&semantic.call_frequency,
));
}
output
}
#[instrument(skip_all)]
#[allow(clippy::too_many_arguments)]
#[allow(clippy::similar_names)] pub fn format_focused_paginated(
paginated_chains: &[InternalCallChain],
total: usize,
mode: PaginationMode,
symbol: &str,
prod_chains: &[InternalCallChain],
test_chains: &[InternalCallChain],
outgoing_chains: &[InternalCallChain],
def_count: usize,
offset: usize,
base_path: Option<&Path>,
_verbose: bool,
) -> String {
let start = offset + 1; let end = offset + paginated_chains.len();
let callers_count = prod_chains.len();
let callees_count = outgoing_chains.len();
let mut output = String::new();
let _ = writeln!(
output,
"FOCUS: {symbol} ({def_count} defs, {callers_count} callers, {callees_count} callees)"
);
match mode {
PaginationMode::Callers => {
let _ = writeln!(output, "CALLERS ({start}-{end} of {total}):");
let page_refs: Vec<_> = paginated_chains
.iter()
.filter_map(|chain| {
if chain.chain.len() >= 2 {
Some((chain.chain[0].0.as_str(), chain.chain[1].0.as_str()))
} else if chain.chain.len() == 1 {
Some((chain.chain[0].0.as_str(), ""))
} else {
None
}
})
.collect();
if page_refs.is_empty() {
output.push_str(" (none)\n");
} else {
output.push_str(&format_chains_as_tree(&page_refs, "<-", symbol));
}
if !test_chains.is_empty() {
let mut test_files: Vec<_> = test_chains
.iter()
.filter_map(|chain| {
chain
.chain
.first()
.map(|(_, path, _)| path.to_string_lossy().into_owned())
})
.collect();
test_files.sort();
test_files.dedup();
let display_files: Vec<_> = test_files
.iter()
.map(|f| strip_base_path(std::path::Path::new(f), base_path))
.collect();
let _ = writeln!(
output,
"CALLERS (test): {} test functions (in {})",
test_chains.len(),
display_files.join(", ")
);
}
let callee_names: Vec<_> = outgoing_chains
.iter()
.filter_map(|chain| chain.chain.first().map(|(p, _, _)| p.clone()))
.collect::<std::collections::HashSet<_>>()
.into_iter()
.collect();
if callee_names.is_empty() {
output.push_str("CALLEES: (none)\n");
} else {
let _ = writeln!(
output,
"CALLEES: {callees_count} (use cursor for callee pagination)"
);
}
}
PaginationMode::Callees => {
let _ = writeln!(output, "CALLERS: {callers_count} production callers");
if !test_chains.is_empty() {
let _ = writeln!(
output,
"CALLERS (test): {} test functions",
test_chains.len()
);
}
let _ = writeln!(output, "CALLEES ({start}-{end} of {total}):");
let page_refs: Vec<_> = paginated_chains
.iter()
.filter_map(|chain| {
if chain.chain.len() >= 2 {
Some((chain.chain[0].0.as_str(), chain.chain[1].0.as_str()))
} else if chain.chain.len() == 1 {
Some((chain.chain[0].0.as_str(), ""))
} else {
None
}
})
.collect();
if page_refs.is_empty() {
output.push_str(" (none)\n");
} else {
output.push_str(&format_chains_as_tree(&page_refs, "->", symbol));
}
}
PaginationMode::Default => {
unreachable!("format_focused_paginated called with PaginationMode::Default")
}
}
output
}
fn format_file_entry(file: &FileInfo, base_path: Option<&Path>) -> String {
let mut parts = Vec::new();
if file.line_count > 0 {
parts.push(format!("{}L", file.line_count));
}
if file.function_count > 0 {
parts.push(format!("{}F", file.function_count));
}
if file.class_count > 0 {
parts.push(format!("{}C", file.class_count));
}
let display_path = strip_base_path(Path::new(&file.path), base_path);
if parts.is_empty() {
format!("{display_path}\n")
} else {
format!("{display_path} [{}]\n", parts.join(", "))
}
}
#[instrument(skip_all)]
pub fn format_module_info(info: &ModuleInfo) -> String {
use std::fmt::Write as _;
let fn_count = info.functions.len();
let import_count = info.imports.len();
let mut out = String::with_capacity(64 + fn_count * 24 + import_count * 32);
let _ = writeln!(
out,
"FILE: {} ({}L, {}F, {}I)",
info.name, info.line_count, fn_count, import_count
);
if !info.functions.is_empty() {
out.push_str("F:\n ");
let parts: Vec<String> = info
.functions
.iter()
.map(|f| format!("{}:{}", f.name, f.line))
.collect();
out.push_str(&parts.join(", "));
out.push('\n');
}
if !info.imports.is_empty() {
out.push_str("I:\n ");
let parts: Vec<String> = info
.imports
.iter()
.map(|i| {
if i.items.is_empty() {
i.module.clone()
} else {
format!("{}:{}", i.module, i.items.join(", "))
}
})
.collect();
out.push_str(&parts.join("; "));
out.push('\n');
}
out
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_strip_base_path_relative() {
let path = Path::new("/home/user/project/src/main.rs");
let base = Path::new("/home/user/project");
let result = strip_base_path(path, Some(base));
assert_eq!(result, "src/main.rs");
}
#[test]
fn test_strip_base_path_fallback_absolute() {
let path = Path::new("/other/project/src/main.rs");
let base = Path::new("/home/user/project");
let result = strip_base_path(path, Some(base));
assert_eq!(result, "/other/project/src/main.rs");
}
#[test]
fn test_strip_base_path_none() {
let path = Path::new("/home/user/project/src/main.rs");
let result = strip_base_path(path, None);
assert_eq!(result, "/home/user/project/src/main.rs");
}
#[test]
fn test_format_file_details_summary_empty() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let semantic = SemanticAnalysis {
functions: vec![],
classes: vec![],
imports: vec![],
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let result = format_file_details_summary(&semantic, "src/main.rs", 100);
assert!(result.contains("FILE:"));
assert!(result.contains("100L, 0F, 0C"));
assert!(result.contains("src/main.rs"));
assert!(result.contains("Imports: 0"));
assert!(result.contains("SUGGESTION:"));
}
#[test]
fn test_format_file_details_summary_with_functions() {
use crate::types::{ClassInfo, FunctionInfo, SemanticAnalysis};
use std::collections::HashMap;
let semantic = SemanticAnalysis {
functions: vec![
FunctionInfo {
name: "short".to_string(),
line: 10,
end_line: 12,
parameters: vec![],
return_type: None,
},
FunctionInfo {
name: "long_function".to_string(),
line: 20,
end_line: 50,
parameters: vec!["x".to_string(), "y".to_string()],
return_type: Some("i32".to_string()),
},
],
classes: vec![ClassInfo {
name: "MyClass".to_string(),
line: 60,
end_line: 80,
methods: vec![],
fields: vec![],
inherits: vec![],
}],
imports: vec![],
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let result = format_file_details_summary(&semantic, "src/lib.rs", 250);
assert!(result.contains("FILE:"));
assert!(result.contains("src/lib.rs"));
assert!(result.contains("250L, 2F, 1C"));
assert!(result.contains("TOP FUNCTIONS BY SIZE:"));
let long_idx = result.find("long_function").unwrap_or(0);
let short_idx = result.find("short").unwrap_or(0);
assert!(
long_idx > 0 && short_idx > 0 && long_idx < short_idx,
"long_function should appear before short"
);
assert!(result.contains("CLASSES:"));
assert!(result.contains("MyClass:"));
assert!(result.contains("Imports: 0"));
}
#[test]
fn test_format_file_info_parts_all_zero() {
assert_eq!(format_file_info_parts(0, 0, 0), None);
}
#[test]
fn test_format_file_info_parts_partial() {
assert_eq!(
format_file_info_parts(42, 0, 3),
Some("[42L, 3C]".to_string())
);
}
#[test]
fn test_format_file_info_parts_all_nonzero() {
assert_eq!(
format_file_info_parts(100, 5, 2),
Some("[100L, 5F, 2C]".to_string())
);
}
#[test]
fn test_format_function_list_wrapped_empty() {
let freq = std::collections::HashMap::new();
let result = format_function_list_wrapped(std::iter::empty(), &freq);
assert_eq!(result, "");
}
#[test]
fn test_format_function_list_wrapped_bullet_annotation() {
use crate::types::FunctionInfo;
use std::collections::HashMap;
let mut freq = HashMap::new();
freq.insert("frequent".to_string(), 5);
let funcs = vec![FunctionInfo {
name: "frequent".to_string(),
line: 1,
end_line: 10,
parameters: vec![],
return_type: Some("void".to_string()),
}];
let result = format_function_list_wrapped(funcs.iter(), &freq);
assert!(result.contains("\u{2022}5"));
}
#[test]
fn test_compact_format_omits_sections() {
use crate::types::{ClassInfo, FunctionInfo, ImportInfo, SemanticAnalysis};
use std::collections::HashMap;
let funcs: Vec<FunctionInfo> = (0..10)
.map(|i| FunctionInfo {
name: format!("fn_{}", i),
line: i * 5 + 1,
end_line: i * 5 + 4,
parameters: vec![format!("x: u32")],
return_type: Some("bool".to_string()),
})
.collect();
let imports: Vec<ImportInfo> = vec![ImportInfo {
module: "std::collections".to_string(),
items: vec!["HashMap".to_string()],
line: 1,
}];
let classes: Vec<ClassInfo> = vec![ClassInfo {
name: "MyStruct".to_string(),
line: 100,
end_line: 150,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let semantic = SemanticAnalysis {
functions: funcs,
classes,
imports,
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let verbose_out = format_file_details_paginated(
&semantic.functions,
semantic.functions.len(),
&semantic,
"src/lib.rs",
100,
0,
true,
None,
);
let compact_out = format_file_details_paginated(
&semantic.functions,
semantic.functions.len(),
&semantic,
"src/lib.rs",
100,
0,
false,
None,
);
assert!(verbose_out.contains("C:\n"), "verbose must have C: section");
assert!(verbose_out.contains("I:\n"), "verbose must have I: section");
assert!(verbose_out.contains("F:\n"), "verbose must have F: section");
assert!(
compact_out.contains("C:\n"),
"compact must have C: section (restored)"
);
assert!(
!compact_out.contains("I:\n"),
"compact must not have I: section (imports omitted)"
);
assert!(
compact_out.contains("F:\n"),
"compact must have F: section with wrapped formatting"
);
assert!(compact_out.contains("fn_0"), "compact must list functions");
let has_two_on_same_line = compact_out
.lines()
.any(|l| l.contains("fn_0") && l.contains("fn_1"));
assert!(
has_two_on_same_line,
"compact must render multiple functions per line (wrapped), not one-per-line"
);
}
#[test]
fn test_compact_mode_consistent_token_reduction() {
use crate::types::{FunctionInfo, SemanticAnalysis};
use std::collections::HashMap;
let funcs: Vec<FunctionInfo> = (0..50)
.map(|i| FunctionInfo {
name: format!("function_name_{}", i),
line: i * 10 + 1,
end_line: i * 10 + 8,
parameters: vec![
"arg1: u32".to_string(),
"arg2: String".to_string(),
"arg3: Option<bool>".to_string(),
],
return_type: Some("Result<Vec<String>, Error>".to_string()),
})
.collect();
let semantic = SemanticAnalysis {
functions: funcs,
classes: vec![],
imports: vec![],
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let verbose_out = format_file_details_paginated(
&semantic.functions,
semantic.functions.len(),
&semantic,
"src/large_file.rs",
1000,
0,
true,
None,
);
let compact_out = format_file_details_paginated(
&semantic.functions,
semantic.functions.len(),
&semantic,
"src/large_file.rs",
1000,
0,
false,
None,
);
assert!(
compact_out.len() <= verbose_out.len(),
"compact ({} chars) must be <= verbose ({} chars)",
compact_out.len(),
verbose_out.len(),
);
}
#[test]
fn test_format_module_info_happy_path() {
use crate::types::{ModuleFunctionInfo, ModuleImportInfo, ModuleInfo};
let info = ModuleInfo {
name: "parser.rs".to_string(),
line_count: 312,
language: "rust".to_string(),
functions: vec![
ModuleFunctionInfo {
name: "parse_file".to_string(),
line: 24,
},
ModuleFunctionInfo {
name: "parse_block".to_string(),
line: 58,
},
],
imports: vec![
ModuleImportInfo {
module: "crate::types".to_string(),
items: vec!["Token".to_string(), "Expr".to_string()],
},
ModuleImportInfo {
module: "std::io".to_string(),
items: vec!["BufReader".to_string()],
},
],
};
let result = format_module_info(&info);
assert!(result.starts_with("FILE: parser.rs (312L, 2F, 2I)"));
assert!(result.contains("F:"));
assert!(result.contains("parse_file:24"));
assert!(result.contains("parse_block:58"));
assert!(result.contains("I:"));
assert!(result.contains("crate::types:Token, Expr"));
assert!(result.contains("std::io:BufReader"));
assert!(result.contains("; "));
assert!(!result.contains('{'));
}
#[test]
fn test_format_module_info_empty() {
use crate::types::ModuleInfo;
let info = ModuleInfo {
name: "empty.rs".to_string(),
line_count: 0,
language: "rust".to_string(),
functions: vec![],
imports: vec![],
};
let result = format_module_info(&info);
assert!(result.starts_with("FILE: empty.rs (0L, 0F, 0I)"));
assert!(!result.contains("F:"));
assert!(!result.contains("I:"));
}
#[test]
fn test_compact_mode_empty_classes_no_header() {
use crate::types::{FunctionInfo, SemanticAnalysis};
use std::collections::HashMap;
let funcs: Vec<FunctionInfo> = (0..5)
.map(|i| FunctionInfo {
name: format!("fn_{}", i),
line: i * 5 + 1,
end_line: i * 5 + 4,
parameters: vec![],
return_type: None,
})
.collect();
let semantic = SemanticAnalysis {
functions: funcs,
classes: vec![], imports: vec![],
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let compact_out = format_file_details_paginated(
&semantic.functions,
semantic.functions.len(),
&semantic,
"src/simple.rs",
100,
0,
false,
None,
);
assert!(
!compact_out.contains("C:\n"),
"compact mode must not emit C: header when classes are empty"
);
}
#[test]
fn test_format_classes_with_methods() {
use crate::types::{ClassInfo, FunctionInfo};
let functions = vec![
FunctionInfo {
name: "method_a".to_string(),
line: 5,
end_line: 8,
parameters: vec![],
return_type: None,
},
FunctionInfo {
name: "method_b".to_string(),
line: 10,
end_line: 12,
parameters: vec![],
return_type: None,
},
FunctionInfo {
name: "top_level_func".to_string(),
line: 50,
end_line: 55,
parameters: vec![],
return_type: None,
},
];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 1,
end_line: 30,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let output = format_classes_section(&classes, &functions);
assert!(
output.contains("MyClass:1-30"),
"class header should show start-end range"
);
assert!(output.contains("method_a:5"), "method_a should be listed");
assert!(output.contains("method_b:10"), "method_b should be listed");
assert!(
!output.contains("top_level_func"),
"top_level_func outside class range should not be listed"
);
}
#[test]
fn test_format_classes_method_cap() {
use crate::types::{ClassInfo, FunctionInfo};
let mut functions = Vec::new();
for i in 0..15 {
functions.push(FunctionInfo {
name: format!("method_{}", i),
line: 2 + i,
end_line: 3 + i,
parameters: vec![],
return_type: None,
});
}
let classes = vec![ClassInfo {
name: "LargeClass".to_string(),
line: 1,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let output = format_classes_section(&classes, &functions);
assert!(output.contains("method_0"), "first method should be listed");
assert!(output.contains("method_9"), "10th method should be listed");
assert!(
!output.contains("method_10"),
"11th method should not be listed (cap at 10)"
);
assert!(
output.contains("... (5 more)"),
"truncation message should show remaining count"
);
}
#[test]
fn test_format_classes_no_methods() {
use crate::types::{ClassInfo, FunctionInfo};
let functions = vec![FunctionInfo {
name: "top_level".to_string(),
line: 100,
end_line: 105,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "EmptyClass".to_string(),
line: 1,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let output = format_classes_section(&classes, &functions);
assert!(
output.contains("EmptyClass:1-50"),
"empty class header should appear"
);
assert!(
!output.contains("top_level"),
"top-level functions outside class should not appear"
);
}
#[test]
fn test_f_section_excludes_methods() {
use crate::types::{ClassInfo, FunctionInfo, SemanticAnalysis};
use std::collections::HashMap;
let functions = vec![
FunctionInfo {
name: "method_a".to_string(),
line: 5,
end_line: 10,
parameters: vec![],
return_type: None,
},
FunctionInfo {
name: "top_level".to_string(),
line: 50,
end_line: 55,
parameters: vec![],
return_type: None,
},
];
let semantic = SemanticAnalysis {
functions,
classes: vec![ClassInfo {
name: "TestClass".to_string(),
line: 1,
end_line: 30,
methods: vec![],
fields: vec![],
inherits: vec![],
}],
imports: vec![],
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let output = format_file_details("test.rs", &semantic, 100, false, None);
assert!(output.contains("C:"), "classes section should exist");
assert!(
output.contains("method_a:5"),
"method should be in C: section"
);
assert!(output.contains("F:"), "F: section should exist");
assert!(
output.contains("top_level"),
"top-level function should be in F: section"
);
let f_pos = output.find("F:").unwrap();
let method_pos = output.find("method_a").unwrap();
assert!(
method_pos < f_pos,
"method_a should appear before F: section"
);
}
#[test]
fn test_format_focused_paginated_unit() {
use crate::graph::InternalCallChain;
use crate::pagination::PaginationMode;
use std::path::PathBuf;
let make_chain = |name: &str| -> InternalCallChain {
InternalCallChain {
chain: vec![
(name.to_string(), PathBuf::from("src/lib.rs"), 10),
("target".to_string(), PathBuf::from("src/lib.rs"), 5),
],
}
};
let prod_chains: Vec<InternalCallChain> = (0..8)
.map(|i| make_chain(&format!("caller_{}", i)))
.collect();
let page = &prod_chains[0..3];
let formatted = format_focused_paginated(
page,
8,
PaginationMode::Callers,
"target",
&prod_chains,
&[],
&[],
1,
0,
None,
true,
);
assert!(
formatted.contains("CALLERS (1-3 of 8):"),
"header should show 1-3 of 8, got: {}",
formatted
);
assert!(
formatted.contains("FOCUS: target"),
"should have FOCUS header"
);
}
#[test]
fn test_fields_none_regression() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
None,
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(output.contains("C:"), "Classes section missing");
assert!(output.contains("I:"), "Imports section missing");
assert!(output.contains("F:"), "Functions section missing");
}
#[test]
fn test_fields_functions_only() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![AnalyzeFileField::Functions]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(!output.contains("C:"), "Classes section should not appear");
assert!(!output.contains("I:"), "Imports section should not appear");
assert!(output.contains("F:"), "Functions section missing");
}
#[test]
fn test_fields_classes_only() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![AnalyzeFileField::Classes]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(output.contains("C:"), "Classes section missing");
assert!(!output.contains("I:"), "Imports section should not appear");
assert!(
!output.contains("F:"),
"Functions section should not appear"
);
}
#[test]
fn test_fields_imports_verbose() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![AnalyzeFileField::Imports]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(!output.contains("C:"), "Classes section should not appear");
assert!(output.contains("I:"), "Imports section missing");
assert!(
!output.contains("F:"),
"Functions section should not appear"
);
}
#[test]
fn test_fields_imports_no_verbose() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![AnalyzeFileField::Imports]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
false,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(!output.contains("C:"), "Classes section should not appear");
assert!(
output.contains("I:"),
"Imports section should appear (explicitly listed in fields)"
);
assert!(
!output.contains("F:"),
"Functions section should not appear"
);
}
#[test]
fn test_fields_empty_array() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(
output.contains("C:"),
"Classes section missing (empty fields = show all)"
);
assert!(
output.contains("I:"),
"Imports section missing (empty fields = show all)"
);
assert!(
output.contains("F:"),
"Functions section missing (empty fields = show all)"
);
}
#[test]
fn test_fields_pagination_no_functions() {
use crate::types::SemanticAnalysis;
use std::collections::HashMap;
let functions = vec![FunctionInfo {
name: "hello".to_string(),
line: 10,
end_line: 15,
parameters: vec![],
return_type: None,
}];
let classes = vec![ClassInfo {
name: "MyClass".to_string(),
line: 20,
end_line: 50,
methods: vec![],
fields: vec![],
inherits: vec![],
}];
let imports = vec![ImportInfo {
module: "std".to_string(),
items: vec!["io".to_string()],
line: 1,
}];
let semantic = SemanticAnalysis {
functions: functions.clone(),
classes: classes.clone(),
imports: imports.clone(),
references: vec![],
call_frequency: HashMap::new(),
calls: vec![],
impl_traits: vec![],
};
let fields = Some(vec![AnalyzeFileField::Classes, AnalyzeFileField::Imports]);
let output = format_file_details_paginated(
&functions,
functions.len(),
&semantic,
"test.rs",
100,
0,
true,
fields.as_deref(),
);
assert!(output.contains("FILE: test.rs"), "FILE header missing");
assert!(
output.contains("1-1/1F"),
"FILE header should contain valid range (1-1/1F)"
);
assert!(output.contains("C:"), "Classes section missing");
assert!(output.contains("I:"), "Imports section missing");
assert!(
!output.contains("F:"),
"Functions section should not appear (filtered by fields)"
);
}
}
fn format_classes_section(classes: &[ClassInfo], functions: &[FunctionInfo]) -> String {
let mut output = String::new();
if classes.is_empty() {
return output;
}
output.push_str("C:\n");
let methods_by_class = collect_class_methods(classes, functions);
let has_methods = methods_by_class.values().any(|m| !m.is_empty());
if classes.len() <= MULTILINE_THRESHOLD && !has_methods {
let class_strs: Vec<String> = classes
.iter()
.map(|class| {
if class.inherits.is_empty() {
format!("{}:{}-{}", class.name, class.line, class.end_line)
} else {
format!(
"{}:{}-{} ({})",
class.name,
class.line,
class.end_line,
class.inherits.join(", ")
)
}
})
.collect();
output.push_str(" ");
output.push_str(&class_strs.join("; "));
output.push('\n');
} else {
for class in classes {
if class.inherits.is_empty() {
let _ = writeln!(output, " {}:{}-{}", class.name, class.line, class.end_line);
} else {
let _ = writeln!(
output,
" {}:{}-{} ({})",
class.name,
class.line,
class.end_line,
class.inherits.join(", ")
);
}
if let Some(methods) = methods_by_class.get(&class.name)
&& !methods.is_empty()
{
for (i, method) in methods.iter().take(10).enumerate() {
let _ = writeln!(output, " {}:{}", method.name, method.line);
if i + 1 == 10 && methods.len() > 10 {
let _ = writeln!(output, " ... ({} more)", methods.len() - 10);
break;
}
}
}
}
}
output
}
fn format_imports_section(imports: &[ImportInfo]) -> String {
let mut output = String::new();
if imports.is_empty() {
return output;
}
output.push_str("I:\n");
let mut module_map: HashMap<String, usize> = HashMap::new();
for import in imports {
module_map
.entry(import.module.clone())
.and_modify(|count| *count += 1)
.or_insert(1);
}
let mut modules: Vec<_> = module_map.keys().cloned().collect();
modules.sort();
let formatted_modules: Vec<String> = modules
.iter()
.map(|module| format!("{}({})", module, module_map[module]))
.collect();
if formatted_modules.len() <= MULTILINE_THRESHOLD {
output.push_str(" ");
output.push_str(&formatted_modules.join("; "));
output.push('\n');
} else {
for module_str in formatted_modules {
output.push_str(" ");
output.push_str(&module_str);
output.push('\n');
}
}
output
}