1use crate::formatter::{
9 format_file_details, format_focused_internal, format_focused_summary_internal, format_structure,
10};
11use crate::graph::{CallGraph, InternalCallChain};
12use crate::lang::{language_for_extension, supported_languages};
13use crate::parser::{ElementExtractor, SemanticExtractor};
14use crate::test_detection::is_test_file;
15use crate::traversal::{WalkEntry, walk_directory};
16use crate::types::{
17 AnalysisMode, FileInfo, ImplTraitInfo, ImportInfo, SemanticAnalysis, SymbolMatchMode,
18};
19use rayon::prelude::*;
20#[cfg(feature = "schemars")]
21use schemars::JsonSchema;
22use serde::{Deserialize, Serialize};
23use std::path::{Path, PathBuf};
24use std::sync::Arc;
25use std::sync::atomic::{AtomicUsize, Ordering};
26use std::time::Instant;
27use thiserror::Error;
28use tokio_util::sync::CancellationToken;
29use tracing::instrument;
30
31#[derive(Debug, Error)]
32#[non_exhaustive]
33pub enum AnalyzeError {
34 #[error("Traversal error: {0}")]
35 Traversal(#[from] crate::traversal::TraversalError),
36 #[error("Parser error: {0}")]
37 Parser(#[from] crate::parser::ParserError),
38 #[error("Graph error: {0}")]
39 Graph(#[from] crate::graph::GraphError),
40 #[error("Formatter error: {0}")]
41 Formatter(#[from] crate::formatter::FormatterError),
42 #[error("Analysis cancelled")]
43 Cancelled,
44 #[error("unsupported language: {0}")]
45 UnsupportedLanguage(String),
46}
47
48#[derive(Debug, Clone, Serialize)]
50#[cfg_attr(feature = "schemars", derive(JsonSchema))]
51#[non_exhaustive]
52pub struct AnalysisOutput {
53 #[cfg_attr(
54 feature = "schemars",
55 schemars(description = "Formatted text representation of the analysis")
56 )]
57 pub formatted: String,
58 #[cfg_attr(
59 feature = "schemars",
60 schemars(description = "List of files analyzed in the directory")
61 )]
62 pub files: Vec<FileInfo>,
63 #[serde(skip)]
65 #[cfg_attr(feature = "schemars", schemars(skip))]
66 pub entries: Vec<WalkEntry>,
67 #[serde(skip)]
69 #[cfg_attr(feature = "schemars", schemars(skip))]
70 pub subtree_counts: Option<Vec<(std::path::PathBuf, usize)>>,
71 #[serde(skip_serializing_if = "Option::is_none")]
72 #[cfg_attr(
73 feature = "schemars",
74 schemars(
75 description = "Opaque cursor token for the next page of results (absent when no more results)"
76 )
77 )]
78 pub next_cursor: Option<String>,
79}
80
81#[derive(Debug, Clone, Serialize)]
83#[cfg_attr(feature = "schemars", derive(JsonSchema))]
84#[non_exhaustive]
85pub struct FileAnalysisOutput {
86 #[cfg_attr(
87 feature = "schemars",
88 schemars(description = "Formatted text representation of the analysis")
89 )]
90 pub formatted: String,
91 #[cfg_attr(
92 feature = "schemars",
93 schemars(description = "Semantic analysis data including functions, classes, and imports")
94 )]
95 pub semantic: SemanticAnalysis,
96 #[cfg_attr(
97 feature = "schemars",
98 schemars(description = "Total line count of the analyzed file")
99 )]
100 #[cfg_attr(
101 feature = "schemars",
102 schemars(schema_with = "crate::schema_helpers::integer_schema")
103 )]
104 pub line_count: usize,
105 #[serde(skip_serializing_if = "Option::is_none")]
106 #[cfg_attr(
107 feature = "schemars",
108 schemars(
109 description = "Opaque cursor token for the next page of results (absent when no more results)"
110 )
111 )]
112 pub next_cursor: Option<String>,
113}
114
115impl FileAnalysisOutput {
116 #[must_use]
118 pub fn new(
119 formatted: String,
120 semantic: SemanticAnalysis,
121 line_count: usize,
122 next_cursor: Option<String>,
123 ) -> Self {
124 Self {
125 formatted,
126 semantic,
127 line_count,
128 next_cursor,
129 }
130 }
131}
132#[instrument(skip_all, fields(path = %root.display()))]
133#[allow(clippy::needless_pass_by_value)]
135pub fn analyze_directory_with_progress(
136 root: &Path,
137 entries: Vec<WalkEntry>,
138 progress: Arc<AtomicUsize>,
139 ct: CancellationToken,
140) -> Result<AnalysisOutput, AnalyzeError> {
141 if ct.is_cancelled() {
143 return Err(AnalyzeError::Cancelled);
144 }
145
146 let file_entries: Vec<&WalkEntry> = entries.iter().filter(|e| !e.is_dir).collect();
148
149 let start = Instant::now();
150 tracing::debug!(file_count = file_entries.len(), root = %root.display(), "analysis start");
151
152 let analysis_results: Vec<FileInfo> = file_entries
154 .par_iter()
155 .filter_map(|entry| {
156 if ct.is_cancelled() {
158 return None;
159 }
160
161 let path_str = entry.path.display().to_string();
162
163 let ext = entry.path.extension().and_then(|e| e.to_str());
165
166 let Ok(source) = std::fs::read_to_string(&entry.path) else {
168 progress.fetch_add(1, Ordering::Relaxed);
169 return None;
170 };
171
172 let line_count = source.lines().count();
174
175 let (language, function_count, class_count) = if let Some(ext_str) = ext {
177 if let Some(lang) = language_for_extension(ext_str) {
178 let lang_str = lang.to_string();
179 match ElementExtractor::extract_with_depth(&source, &lang_str) {
180 Ok((func_count, class_count)) => (lang_str, func_count, class_count),
181 Err(_) => (lang_str, 0, 0),
182 }
183 } else {
184 ("unknown".to_string(), 0, 0)
185 }
186 } else {
187 ("unknown".to_string(), 0, 0)
188 };
189
190 progress.fetch_add(1, Ordering::Relaxed);
191
192 let is_test = is_test_file(&entry.path);
193
194 Some(FileInfo {
195 path: path_str,
196 line_count,
197 function_count,
198 class_count,
199 language,
200 is_test,
201 })
202 })
203 .collect();
204
205 if ct.is_cancelled() {
207 return Err(AnalyzeError::Cancelled);
208 }
209
210 tracing::debug!(
211 file_count = file_entries.len(),
212 duration_ms = u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
213 "analysis complete"
214 );
215
216 let formatted = format_structure(&entries, &analysis_results, None);
218
219 Ok(AnalysisOutput {
220 formatted,
221 files: analysis_results,
222 entries,
223 next_cursor: None,
224 subtree_counts: None,
225 })
226}
227
228#[instrument(skip_all, fields(path = %root.display()))]
230pub fn analyze_directory(
231 root: &Path,
232 max_depth: Option<u32>,
233) -> Result<AnalysisOutput, AnalyzeError> {
234 let entries = walk_directory(root, max_depth)?;
235 let counter = Arc::new(AtomicUsize::new(0));
236 let ct = CancellationToken::new();
237 analyze_directory_with_progress(root, entries, counter, ct)
238}
239
240#[must_use]
242pub fn determine_mode(path: &str, focus: Option<&str>) -> AnalysisMode {
243 if focus.is_some() {
244 return AnalysisMode::SymbolFocus;
245 }
246
247 let path_obj = Path::new(path);
248 if path_obj.is_dir() {
249 AnalysisMode::Overview
250 } else {
251 AnalysisMode::FileDetails
252 }
253}
254
255#[instrument(skip_all, fields(path))]
257pub fn analyze_file(
258 path: &str,
259 ast_recursion_limit: Option<usize>,
260) -> Result<FileAnalysisOutput, AnalyzeError> {
261 let start = Instant::now();
262 let source = std::fs::read_to_string(path)
263 .map_err(|e| AnalyzeError::Parser(crate::parser::ParserError::ParseError(e.to_string())))?;
264
265 let line_count = source.lines().count();
266
267 let ext = Path::new(path)
269 .extension()
270 .and_then(|e| e.to_str())
271 .and_then(language_for_extension)
272 .map_or_else(|| "unknown".to_string(), std::string::ToString::to_string);
273
274 let mut semantic = SemanticExtractor::extract(&source, &ext, ast_recursion_limit)?;
276
277 for r in &mut semantic.references {
279 r.location = path.to_string();
280 }
281
282 if ext == "python" {
284 resolve_wildcard_imports(Path::new(path), &mut semantic.imports);
285 }
286
287 let is_test = is_test_file(Path::new(path));
289
290 let parent_dir = Path::new(path).parent();
292
293 let formatted = format_file_details(path, &semantic, line_count, is_test, parent_dir);
295
296 tracing::debug!(path = %path, language = %ext, functions = semantic.functions.len(), classes = semantic.classes.len(), imports = semantic.imports.len(), duration_ms = u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX), "file analysis complete");
297
298 Ok(FileAnalysisOutput::new(
299 formatted, semantic, line_count, None,
300 ))
301}
302
303#[inline]
330pub fn analyze_str(
331 source: &str,
332 language: &str,
333 ast_recursion_limit: Option<usize>,
334) -> Result<FileAnalysisOutput, AnalyzeError> {
335 let lang = language_for_extension(language).or_else(|| {
338 let lower = language.to_ascii_lowercase();
339 supported_languages()
340 .iter()
341 .find(|&&name| name == lower)
342 .copied()
343 });
344 let lang = lang.ok_or_else(|| AnalyzeError::UnsupportedLanguage(language.to_string()))?;
345
346 let mut semantic = SemanticExtractor::extract(source, lang, ast_recursion_limit)?;
348
349 for r in &mut semantic.references {
351 r.location = "<memory>".to_string();
352 }
353
354 let line_count = source.lines().count();
356
357 let formatted = format_file_details("", &semantic, line_count, false, None);
359
360 Ok(FileAnalysisOutput::new(
361 formatted, semantic, line_count, None,
362 ))
363}
364
365#[derive(Debug, Clone, Serialize, Deserialize)]
367#[cfg_attr(feature = "schemars", derive(JsonSchema))]
368pub struct CallChainEntry {
369 #[cfg_attr(
370 feature = "schemars",
371 schemars(description = "Symbol name of the caller or callee")
372 )]
373 pub symbol: String,
374 #[cfg_attr(
375 feature = "schemars",
376 schemars(description = "File path relative to the repository root")
377 )]
378 pub file: String,
379 #[cfg_attr(
380 feature = "schemars",
381 schemars(
382 description = "Line number of the definition or call site (1-indexed)",
383 schema_with = "crate::schema_helpers::integer_schema"
384 )
385 )]
386 pub line: usize,
387}
388
389#[derive(Debug, Serialize)]
391#[cfg_attr(feature = "schemars", derive(JsonSchema))]
392#[non_exhaustive]
393pub struct FocusedAnalysisOutput {
394 #[cfg_attr(
395 feature = "schemars",
396 schemars(description = "Formatted text representation of the call graph analysis")
397 )]
398 pub formatted: String,
399 #[serde(skip_serializing_if = "Option::is_none")]
400 #[cfg_attr(
401 feature = "schemars",
402 schemars(
403 description = "Opaque cursor token for the next page of results (absent when no more results)"
404 )
405 )]
406 pub next_cursor: Option<String>,
407 #[serde(skip)]
410 #[cfg_attr(feature = "schemars", schemars(skip))]
411 pub prod_chains: Vec<InternalCallChain>,
412 #[serde(skip)]
414 #[cfg_attr(feature = "schemars", schemars(skip))]
415 pub test_chains: Vec<InternalCallChain>,
416 #[serde(skip)]
418 #[cfg_attr(feature = "schemars", schemars(skip))]
419 pub outgoing_chains: Vec<InternalCallChain>,
420 #[serde(skip)]
422 #[cfg_attr(feature = "schemars", schemars(skip))]
423 pub def_count: usize,
424 #[serde(skip)]
426 #[cfg_attr(feature = "schemars", schemars(skip))]
427 pub unfiltered_caller_count: usize,
428 #[serde(skip)]
430 #[cfg_attr(feature = "schemars", schemars(skip))]
431 pub impl_trait_caller_count: usize,
432 #[serde(skip_serializing_if = "Option::is_none")]
434 pub callers: Option<Vec<CallChainEntry>>,
435 #[serde(skip_serializing_if = "Option::is_none")]
437 pub test_callers: Option<Vec<CallChainEntry>>,
438 #[serde(skip_serializing_if = "Option::is_none")]
440 pub callees: Option<Vec<CallChainEntry>>,
441}
442
443#[derive(Clone)]
446pub struct FocusedAnalysisConfig {
447 pub focus: String,
448 pub match_mode: SymbolMatchMode,
449 pub follow_depth: u32,
450 pub max_depth: Option<u32>,
451 pub ast_recursion_limit: Option<usize>,
452 pub use_summary: bool,
453 pub impl_only: Option<bool>,
454}
455
456#[derive(Clone)]
458struct InternalFocusedParams {
459 focus: String,
460 match_mode: SymbolMatchMode,
461 follow_depth: u32,
462 ast_recursion_limit: Option<usize>,
463 use_summary: bool,
464 impl_only: Option<bool>,
465}
466
467type FileAnalysisBatch = (Vec<(PathBuf, SemanticAnalysis)>, Vec<ImplTraitInfo>);
469
470fn collect_file_analysis(
472 entries: &[WalkEntry],
473 progress: &Arc<AtomicUsize>,
474 ct: &CancellationToken,
475 ast_recursion_limit: Option<usize>,
476) -> Result<FileAnalysisBatch, AnalyzeError> {
477 if ct.is_cancelled() {
479 return Err(AnalyzeError::Cancelled);
480 }
481
482 let file_entries: Vec<&WalkEntry> = entries.iter().filter(|e| !e.is_dir).collect();
485
486 let analysis_results: Vec<(PathBuf, SemanticAnalysis)> = file_entries
487 .par_iter()
488 .filter_map(|entry| {
489 if ct.is_cancelled() {
491 return None;
492 }
493
494 let ext = entry.path.extension().and_then(|e| e.to_str());
495
496 let Ok(source) = std::fs::read_to_string(&entry.path) else {
498 progress.fetch_add(1, Ordering::Relaxed);
499 return None;
500 };
501
502 let language = if let Some(ext_str) = ext {
504 language_for_extension(ext_str)
505 .map_or_else(|| "unknown".to_string(), std::string::ToString::to_string)
506 } else {
507 "unknown".to_string()
508 };
509
510 if let Ok(mut semantic) =
511 SemanticExtractor::extract(&source, &language, ast_recursion_limit)
512 {
513 for r in &mut semantic.references {
515 r.location = entry.path.display().to_string();
516 }
517 for trait_info in &mut semantic.impl_traits {
519 trait_info.path.clone_from(&entry.path);
520 }
521 progress.fetch_add(1, Ordering::Relaxed);
522 Some((entry.path.clone(), semantic))
523 } else {
524 progress.fetch_add(1, Ordering::Relaxed);
525 None
526 }
527 })
528 .collect();
529
530 if ct.is_cancelled() {
532 return Err(AnalyzeError::Cancelled);
533 }
534
535 let all_impl_traits: Vec<ImplTraitInfo> = analysis_results
537 .iter()
538 .flat_map(|(_, sem)| sem.impl_traits.iter().cloned())
539 .collect();
540
541 Ok((analysis_results, all_impl_traits))
542}
543
544fn build_call_graph(
546 analysis_results: Vec<(PathBuf, SemanticAnalysis)>,
547 all_impl_traits: &[ImplTraitInfo],
548) -> Result<CallGraph, AnalyzeError> {
549 CallGraph::build_from_results(
552 analysis_results,
553 all_impl_traits,
554 false, )
556 .map_err(std::convert::Into::into)
557}
558
559fn resolve_symbol(
564 graph: &mut CallGraph,
565 params: &InternalFocusedParams,
566) -> Result<(String, usize, usize), AnalyzeError> {
567 let resolved_focus = if params.match_mode == SymbolMatchMode::Exact {
569 let exists = graph.definitions.contains_key(¶ms.focus)
570 || graph.callers.contains_key(¶ms.focus)
571 || graph.callees.contains_key(¶ms.focus);
572 if exists {
573 params.focus.clone()
574 } else {
575 return Err(crate::graph::GraphError::SymbolNotFound {
576 symbol: params.focus.clone(),
577 hint: "Try match_mode=insensitive for a case-insensitive search, or match_mode=prefix to list symbols starting with this name.".to_string(),
578 }
579 .into());
580 }
581 } else {
582 graph.resolve_symbol_indexed(¶ms.focus, ¶ms.match_mode)?
583 };
584
585 let unfiltered_caller_count = graph.callers.get(&resolved_focus).map_or(0, |edges| {
587 edges
588 .iter()
589 .map(|e| &e.neighbor_name)
590 .collect::<std::collections::HashSet<_>>()
591 .len()
592 });
593
594 let impl_trait_caller_count = if params.impl_only.unwrap_or(false) {
598 for edges in graph.callers.values_mut() {
599 edges.retain(|e| e.is_impl_trait);
600 }
601 graph.callers.get(&resolved_focus).map_or(0, |edges| {
602 edges
603 .iter()
604 .map(|e| &e.neighbor_name)
605 .collect::<std::collections::HashSet<_>>()
606 .len()
607 })
608 } else {
609 unfiltered_caller_count
610 };
611
612 Ok((
613 resolved_focus,
614 unfiltered_caller_count,
615 impl_trait_caller_count,
616 ))
617}
618
619type ChainComputeResult = (
621 String,
622 Vec<InternalCallChain>,
623 Vec<InternalCallChain>,
624 Vec<InternalCallChain>,
625 usize,
626);
627
628fn chains_to_entries(
632 chains: &[InternalCallChain],
633 root: Option<&std::path::Path>,
634) -> Option<Vec<CallChainEntry>> {
635 if chains.is_empty() {
636 return None;
637 }
638 let entries: Vec<CallChainEntry> = chains
639 .iter()
640 .take(10)
641 .filter_map(|chain| {
642 let (symbol, path, line) = chain.chain.first()?;
643 let file = match root {
644 Some(root) => path
645 .strip_prefix(root)
646 .unwrap_or(path.as_path())
647 .to_string_lossy()
648 .into_owned(),
649 None => path.to_string_lossy().into_owned(),
650 };
651 Some(CallChainEntry {
652 symbol: symbol.clone(),
653 file,
654 line: *line,
655 })
656 })
657 .collect();
658 if entries.is_empty() {
659 None
660 } else {
661 Some(entries)
662 }
663}
664
665fn compute_chains(
667 graph: &CallGraph,
668 resolved_focus: &str,
669 root: &Path,
670 params: &InternalFocusedParams,
671 unfiltered_caller_count: usize,
672 impl_trait_caller_count: usize,
673) -> Result<ChainComputeResult, AnalyzeError> {
674 let def_count = graph.definitions.get(resolved_focus).map_or(0, Vec::len);
676 let incoming_chains = graph.find_incoming_chains(resolved_focus, params.follow_depth)?;
677 let outgoing_chains = graph.find_outgoing_chains(resolved_focus, params.follow_depth)?;
678
679 let (prod_chains, test_chains): (Vec<_>, Vec<_>) =
680 incoming_chains.iter().cloned().partition(|chain| {
681 chain
682 .chain
683 .first()
684 .is_none_or(|(name, path, _)| !is_test_file(path) && !name.starts_with("test_"))
685 });
686
687 let mut formatted = if params.use_summary {
689 format_focused_summary_internal(
690 graph,
691 resolved_focus,
692 params.follow_depth,
693 Some(root),
694 Some(&incoming_chains),
695 Some(&outgoing_chains),
696 )?
697 } else {
698 format_focused_internal(
699 graph,
700 resolved_focus,
701 params.follow_depth,
702 Some(root),
703 Some(&incoming_chains),
704 Some(&outgoing_chains),
705 )?
706 };
707
708 if params.impl_only.unwrap_or(false) {
710 let filter_header = format!(
711 "FILTER: impl_only=true ({impl_trait_caller_count} of {unfiltered_caller_count} callers shown)\n",
712 );
713 formatted = format!("{filter_header}{formatted}");
714 }
715
716 Ok((
717 formatted,
718 prod_chains,
719 test_chains,
720 outgoing_chains,
721 def_count,
722 ))
723}
724
725#[allow(clippy::needless_pass_by_value)]
728pub fn analyze_focused_with_progress(
729 root: &Path,
730 params: &FocusedAnalysisConfig,
731 progress: Arc<AtomicUsize>,
732 ct: CancellationToken,
733) -> Result<FocusedAnalysisOutput, AnalyzeError> {
734 let entries = walk_directory(root, params.max_depth)?;
735 let internal_params = InternalFocusedParams {
736 focus: params.focus.clone(),
737 match_mode: params.match_mode.clone(),
738 follow_depth: params.follow_depth,
739 ast_recursion_limit: params.ast_recursion_limit,
740 use_summary: params.use_summary,
741 impl_only: params.impl_only,
742 };
743 analyze_focused_with_progress_with_entries_internal(
744 root,
745 params.max_depth,
746 &progress,
747 &ct,
748 &internal_params,
749 &entries,
750 )
751}
752
753#[instrument(skip_all, fields(path = %root.display(), symbol = %params.focus))]
755fn analyze_focused_with_progress_with_entries_internal(
756 root: &Path,
757 _max_depth: Option<u32>,
758 progress: &Arc<AtomicUsize>,
759 ct: &CancellationToken,
760 params: &InternalFocusedParams,
761 entries: &[WalkEntry],
762) -> Result<FocusedAnalysisOutput, AnalyzeError> {
763 if ct.is_cancelled() {
765 return Err(AnalyzeError::Cancelled);
766 }
767
768 if root.is_file() {
770 let formatted =
771 "Single-file focus not supported. Please provide a directory path for cross-file call graph analysis.\n"
772 .to_string();
773 return Ok(FocusedAnalysisOutput {
774 formatted,
775 next_cursor: None,
776 prod_chains: vec![],
777 test_chains: vec![],
778 outgoing_chains: vec![],
779 def_count: 0,
780 unfiltered_caller_count: 0,
781 impl_trait_caller_count: 0,
782 callers: None,
783 test_callers: None,
784 callees: None,
785 });
786 }
787
788 let (analysis_results, all_impl_traits) =
790 collect_file_analysis(entries, progress, ct, params.ast_recursion_limit)?;
791
792 if ct.is_cancelled() {
794 return Err(AnalyzeError::Cancelled);
795 }
796
797 let mut graph = build_call_graph(analysis_results, &all_impl_traits)?;
799
800 if ct.is_cancelled() {
802 return Err(AnalyzeError::Cancelled);
803 }
804
805 let (resolved_focus, unfiltered_caller_count, impl_trait_caller_count) =
807 resolve_symbol(&mut graph, params)?;
808
809 if ct.is_cancelled() {
811 return Err(AnalyzeError::Cancelled);
812 }
813
814 let (formatted, prod_chains, test_chains, outgoing_chains, def_count) = compute_chains(
816 &graph,
817 &resolved_focus,
818 root,
819 params,
820 unfiltered_caller_count,
821 impl_trait_caller_count,
822 )?;
823
824 let (depth1_callers, depth1_test_callers, depth1_callees) = if params.follow_depth <= 1 {
827 let callers = chains_to_entries(&prod_chains, Some(root));
829 let test_callers = chains_to_entries(&test_chains, Some(root));
830 let callees = chains_to_entries(&outgoing_chains, Some(root));
831 (callers, test_callers, callees)
832 } else {
833 let incoming1 = graph
835 .find_incoming_chains(&resolved_focus, 1)
836 .unwrap_or_default();
837 let outgoing1 = graph
838 .find_outgoing_chains(&resolved_focus, 1)
839 .unwrap_or_default();
840 let (prod1, test1): (Vec<_>, Vec<_>) = incoming1.into_iter().partition(|chain| {
841 chain
842 .chain
843 .first()
844 .is_none_or(|(name, path, _)| !is_test_file(path) && !name.starts_with("test_"))
845 });
846 let callers = chains_to_entries(&prod1, Some(root));
847 let test_callers = chains_to_entries(&test1, Some(root));
848 let callees = chains_to_entries(&outgoing1, Some(root));
849 (callers, test_callers, callees)
850 };
851
852 Ok(FocusedAnalysisOutput {
853 formatted,
854 next_cursor: None,
855 callers: depth1_callers,
856 test_callers: depth1_test_callers,
857 callees: depth1_callees,
858 prod_chains,
859 test_chains,
860 outgoing_chains,
861 def_count,
862 unfiltered_caller_count,
863 impl_trait_caller_count,
864 })
865}
866
867pub fn analyze_focused_with_progress_with_entries(
869 root: &Path,
870 params: &FocusedAnalysisConfig,
871 progress: &Arc<AtomicUsize>,
872 ct: &CancellationToken,
873 entries: &[WalkEntry],
874) -> Result<FocusedAnalysisOutput, AnalyzeError> {
875 let internal_params = InternalFocusedParams {
876 focus: params.focus.clone(),
877 match_mode: params.match_mode.clone(),
878 follow_depth: params.follow_depth,
879 ast_recursion_limit: params.ast_recursion_limit,
880 use_summary: params.use_summary,
881 impl_only: params.impl_only,
882 };
883 analyze_focused_with_progress_with_entries_internal(
884 root,
885 params.max_depth,
886 progress,
887 ct,
888 &internal_params,
889 entries,
890 )
891}
892
893#[instrument(skip_all, fields(path = %root.display(), symbol = %focus))]
894pub fn analyze_focused(
895 root: &Path,
896 focus: &str,
897 follow_depth: u32,
898 max_depth: Option<u32>,
899 ast_recursion_limit: Option<usize>,
900) -> Result<FocusedAnalysisOutput, AnalyzeError> {
901 let entries = walk_directory(root, max_depth)?;
902 let counter = Arc::new(AtomicUsize::new(0));
903 let ct = CancellationToken::new();
904 let params = FocusedAnalysisConfig {
905 focus: focus.to_string(),
906 match_mode: SymbolMatchMode::Exact,
907 follow_depth,
908 max_depth,
909 ast_recursion_limit,
910 use_summary: false,
911 impl_only: None,
912 };
913 analyze_focused_with_progress_with_entries(root, ¶ms, &counter, &ct, &entries)
914}
915
916#[instrument(skip_all, fields(path))]
919pub fn analyze_module_file(path: &str) -> Result<crate::types::ModuleInfo, AnalyzeError> {
920 let source = std::fs::read_to_string(path)
921 .map_err(|e| AnalyzeError::Parser(crate::parser::ParserError::ParseError(e.to_string())))?;
922
923 let file_path = Path::new(path);
924 let name = file_path
925 .file_name()
926 .and_then(|s| s.to_str())
927 .unwrap_or("unknown")
928 .to_string();
929
930 let line_count = source.lines().count();
931
932 let language = file_path
933 .extension()
934 .and_then(|e| e.to_str())
935 .and_then(language_for_extension)
936 .ok_or_else(|| {
937 AnalyzeError::Parser(crate::parser::ParserError::ParseError(
938 "unsupported or missing file extension".to_string(),
939 ))
940 })?;
941
942 let semantic = SemanticExtractor::extract(&source, language, None)?;
943
944 let functions = semantic
945 .functions
946 .into_iter()
947 .map(|f| crate::types::ModuleFunctionInfo {
948 name: f.name,
949 line: f.line,
950 })
951 .collect();
952
953 let imports = semantic
954 .imports
955 .into_iter()
956 .map(|i| crate::types::ModuleImportInfo {
957 module: i.module,
958 items: i.items,
959 })
960 .collect();
961
962 Ok(crate::types::ModuleInfo {
963 name,
964 line_count,
965 language: language.to_string(),
966 functions,
967 imports,
968 })
969}
970
971pub fn analyze_import_lookup(
977 root: &Path,
978 module: &str,
979 entries: &[WalkEntry],
980 ast_recursion_limit: Option<usize>,
981) -> Result<FocusedAnalysisOutput, AnalyzeError> {
982 let mut matches: Vec<(PathBuf, usize)> = Vec::new();
983
984 for entry in entries {
985 if entry.is_dir {
986 continue;
987 }
988 let ext = entry
989 .path
990 .extension()
991 .and_then(|e| e.to_str())
992 .and_then(crate::lang::language_for_extension);
993 let Some(lang) = ext else {
994 continue;
995 };
996 let Ok(source) = std::fs::read_to_string(&entry.path) else {
997 continue;
998 };
999 let Ok(semantic) = SemanticExtractor::extract(&source, lang, ast_recursion_limit) else {
1000 continue;
1001 };
1002 for import in &semantic.imports {
1003 if import.module == module || import.items.iter().any(|item| item == module) {
1004 matches.push((entry.path.clone(), import.line));
1005 break;
1006 }
1007 }
1008 }
1009
1010 let mut text = format!("IMPORT_LOOKUP: {module}\n");
1011 text.push_str(&format!("ROOT: {}\n", root.display()));
1012 text.push_str(&format!("MATCHES: {}\n", matches.len()));
1013 for (path, line) in &matches {
1014 let rel = path.strip_prefix(root).unwrap_or(path);
1015 text.push_str(&format!(" {}:{line}\n", rel.display()));
1016 }
1017
1018 Ok(FocusedAnalysisOutput {
1019 formatted: text,
1020 next_cursor: None,
1021 prod_chains: vec![],
1022 test_chains: vec![],
1023 outgoing_chains: vec![],
1024 def_count: 0,
1025 unfiltered_caller_count: 0,
1026 impl_trait_caller_count: 0,
1027 callers: None,
1028 test_callers: None,
1029 callees: None,
1030 })
1031}
1032
1033fn resolve_wildcard_imports(file_path: &Path, imports: &mut [ImportInfo]) {
1043 use std::collections::HashMap;
1044
1045 let mut resolved_cache: HashMap<PathBuf, Vec<String>> = HashMap::new();
1046 let Ok(file_path_canonical) = file_path.canonicalize() else {
1047 tracing::debug!(file = ?file_path, "unable to canonicalize current file path");
1048 return;
1049 };
1050
1051 for import in imports.iter_mut() {
1052 if import.items != ["*"] {
1053 continue;
1054 }
1055 resolve_single_wildcard(import, file_path, &file_path_canonical, &mut resolved_cache);
1056 }
1057}
1058
1059fn resolve_single_wildcard(
1061 import: &mut ImportInfo,
1062 file_path: &Path,
1063 file_path_canonical: &Path,
1064 resolved_cache: &mut std::collections::HashMap<PathBuf, Vec<String>>,
1065) {
1066 let module = import.module.clone();
1067 let dot_count = module.chars().take_while(|c| *c == '.').count();
1068 if dot_count == 0 {
1069 return;
1070 }
1071 let module_path = module.trim_start_matches('.');
1072
1073 let Some(target_to_read) = locate_target_file(file_path, dot_count, module_path, &module)
1074 else {
1075 return;
1076 };
1077
1078 let Ok(canonical) = target_to_read.canonicalize() else {
1079 tracing::debug!(target = ?target_to_read, import = %module, "unable to canonicalize path");
1080 return;
1081 };
1082
1083 if canonical == file_path_canonical {
1084 tracing::debug!(target = ?canonical, import = %module, "cannot import from self");
1085 return;
1086 }
1087
1088 if let Some(cached) = resolved_cache.get(&canonical) {
1089 tracing::debug!(import = %module, symbols_count = cached.len(), "using cached symbols");
1090 import.items.clone_from(cached);
1091 return;
1092 }
1093
1094 if let Some(symbols) = parse_target_symbols(&target_to_read, &module) {
1095 tracing::debug!(import = %module, resolved_count = symbols.len(), "wildcard import resolved");
1096 import.items.clone_from(&symbols);
1097 resolved_cache.insert(canonical, symbols);
1098 }
1099}
1100
1101fn locate_target_file(
1103 file_path: &Path,
1104 dot_count: usize,
1105 module_path: &str,
1106 module: &str,
1107) -> Option<PathBuf> {
1108 let mut target_dir = file_path.parent()?.to_path_buf();
1109
1110 for _ in 1..dot_count {
1111 if !target_dir.pop() {
1112 tracing::debug!(import = %module, "unable to climb {} levels", dot_count.saturating_sub(1));
1113 return None;
1114 }
1115 }
1116
1117 let target_file = if module_path.is_empty() {
1118 target_dir.join("__init__.py")
1119 } else {
1120 let rel_path = module_path.replace('.', "/");
1121 target_dir.join(format!("{rel_path}.py"))
1122 };
1123
1124 if target_file.exists() {
1125 Some(target_file)
1126 } else if target_file.with_extension("").is_dir() {
1127 let init = target_file.with_extension("").join("__init__.py");
1128 if init.exists() { Some(init) } else { None }
1129 } else {
1130 tracing::debug!(target = ?target_file, import = %module, "target file not found");
1131 None
1132 }
1133}
1134
1135fn parse_target_symbols(target_path: &Path, module: &str) -> Option<Vec<String>> {
1137 use tree_sitter::Parser;
1138
1139 let source = match std::fs::read_to_string(target_path) {
1140 Ok(s) => s,
1141 Err(e) => {
1142 tracing::debug!(target = ?target_path, import = %module, error = %e, "unable to read target file");
1143 return None;
1144 }
1145 };
1146
1147 let lang_info = crate::languages::get_language_info("python")?;
1149 let mut parser = Parser::new();
1150 if parser.set_language(&lang_info.language).is_err() {
1151 return None;
1152 }
1153 let tree = parser.parse(&source, None)?;
1154
1155 let mut symbols = Vec::new();
1157 extract_all_from_tree(&tree, &source, &mut symbols);
1158 if !symbols.is_empty() {
1159 tracing::debug!(import = %module, symbols = ?symbols, "using __all__ symbols");
1160 return Some(symbols);
1161 }
1162
1163 let root = tree.root_node();
1165 let mut cursor = root.walk();
1166 for child in root.children(&mut cursor) {
1167 if matches!(child.kind(), "function_definition" | "class_definition")
1168 && let Some(name_node) = child.child_by_field_name("name")
1169 {
1170 let name = source[name_node.start_byte()..name_node.end_byte()].to_string();
1171 if !name.starts_with('_') {
1172 symbols.push(name);
1173 }
1174 }
1175 }
1176 tracing::debug!(import = %module, fallback_symbols = ?symbols, "using fallback function/class names");
1177 Some(symbols)
1178}
1179
1180fn extract_all_from_tree(tree: &tree_sitter::Tree, source: &str, result: &mut Vec<String>) {
1182 let root = tree.root_node();
1183 let mut cursor = root.walk();
1184 for child in root.children(&mut cursor) {
1185 if child.kind() == "simple_statement" {
1186 let mut simple_cursor = child.walk();
1188 for simple_child in child.children(&mut simple_cursor) {
1189 if simple_child.kind() == "assignment"
1190 && let Some(left) = simple_child.child_by_field_name("left")
1191 {
1192 let target_text = source[left.start_byte()..left.end_byte()].trim();
1193 if target_text == "__all__"
1194 && let Some(right) = simple_child.child_by_field_name("right")
1195 {
1196 extract_string_list_from_list_node(&right, source, result);
1197 }
1198 }
1199 }
1200 } else if child.kind() == "expression_statement" {
1201 let mut stmt_cursor = child.walk();
1203 for stmt_child in child.children(&mut stmt_cursor) {
1204 if stmt_child.kind() == "assignment"
1205 && let Some(left) = stmt_child.child_by_field_name("left")
1206 {
1207 let target_text = source[left.start_byte()..left.end_byte()].trim();
1208 if target_text == "__all__"
1209 && let Some(right) = stmt_child.child_by_field_name("right")
1210 {
1211 extract_string_list_from_list_node(&right, source, result);
1212 }
1213 }
1214 }
1215 }
1216 }
1217}
1218
1219fn extract_string_list_from_list_node(
1221 list_node: &tree_sitter::Node,
1222 source: &str,
1223 result: &mut Vec<String>,
1224) {
1225 let mut cursor = list_node.walk();
1226 for child in list_node.named_children(&mut cursor) {
1227 if child.kind() == "string" {
1228 let raw = source[child.start_byte()..child.end_byte()].trim();
1229 let unquoted = raw.trim_matches('"').trim_matches('\'').to_string();
1231 if !unquoted.is_empty() {
1232 result.push(unquoted);
1233 }
1234 }
1235 }
1236}
1237
1238#[cfg(test)]
1239mod tests {
1240 use super::*;
1241 use crate::formatter::format_focused_paginated;
1242 use crate::graph::InternalCallChain;
1243 use crate::pagination::{PaginationMode, decode_cursor, paginate_slice};
1244 use std::fs;
1245 use std::path::PathBuf;
1246 use tempfile::TempDir;
1247
1248 #[cfg(feature = "lang-rust")]
1249 #[test]
1250 fn analyze_str_rust_happy_path() {
1251 let source = "fn hello() -> i32 { 42 }";
1252 let result = analyze_str(source, "rs", None);
1253 assert!(result.is_ok());
1254 }
1255
1256 #[cfg(feature = "lang-python")]
1257 #[test]
1258 fn analyze_str_python_happy_path() {
1259 let source = "def greet(name):\n return f'Hello {name}'";
1260 let result = analyze_str(source, "py", None);
1261 assert!(result.is_ok());
1262 }
1263
1264 #[cfg(feature = "lang-rust")]
1265 #[test]
1266 fn analyze_str_rust_by_language_name() {
1267 let source = "fn hello() -> i32 { 42 }";
1268 let result = analyze_str(source, "rust", None);
1269 assert!(result.is_ok());
1270 }
1271
1272 #[cfg(feature = "lang-python")]
1273 #[test]
1274 fn analyze_str_python_by_language_name() {
1275 let source = "def greet(name):\n return f'Hello {name}'";
1276 let result = analyze_str(source, "python", None);
1277 assert!(result.is_ok());
1278 }
1279
1280 #[cfg(feature = "lang-rust")]
1281 #[test]
1282 fn analyze_str_rust_mixed_case() {
1283 let source = "fn hello() -> i32 { 42 }";
1284 let result = analyze_str(source, "RuSt", None);
1285 assert!(result.is_ok());
1286 }
1287
1288 #[cfg(feature = "lang-python")]
1289 #[test]
1290 fn analyze_str_python_mixed_case() {
1291 let source = "def greet(name):\n return f'Hello {name}'";
1292 let result = analyze_str(source, "PyThOn", None);
1293 assert!(result.is_ok());
1294 }
1295
1296 #[test]
1297 fn analyze_str_unsupported_language() {
1298 let result = analyze_str("code", "brainfuck", None);
1299 assert!(
1300 matches!(result, Err(AnalyzeError::UnsupportedLanguage(lang)) if lang == "brainfuck")
1301 );
1302 }
1303
1304 #[cfg(feature = "lang-rust")]
1305 #[test]
1306 fn test_symbol_focus_callers_pagination_first_page() {
1307 let temp_dir = TempDir::new().unwrap();
1308
1309 let mut code = String::from("fn target() {}\n");
1311 for i in 0..15 {
1312 code.push_str(&format!("fn caller_{:02}() {{ target(); }}\n", i));
1313 }
1314 fs::write(temp_dir.path().join("lib.rs"), &code).unwrap();
1315
1316 let output = analyze_focused(temp_dir.path(), "target", 1, None, None).unwrap();
1318
1319 let paginated = paginate_slice(&output.prod_chains, 0, 5, PaginationMode::Callers)
1321 .expect("paginate failed");
1322 assert!(
1323 paginated.total >= 5,
1324 "should have enough callers to paginate"
1325 );
1326 assert!(
1327 paginated.next_cursor.is_some(),
1328 "should have next_cursor for page 1"
1329 );
1330
1331 assert_eq!(paginated.items.len(), 5);
1333 }
1334
1335 #[test]
1336 fn test_symbol_focus_callers_pagination_second_page() {
1337 let temp_dir = TempDir::new().unwrap();
1338
1339 let mut code = String::from("fn target() {}\n");
1340 for i in 0..12 {
1341 code.push_str(&format!("fn caller_{:02}() {{ target(); }}\n", i));
1342 }
1343 fs::write(temp_dir.path().join("lib.rs"), &code).unwrap();
1344
1345 let output = analyze_focused(temp_dir.path(), "target", 1, None, None).unwrap();
1346 let total_prod = output.prod_chains.len();
1347
1348 if total_prod > 5 {
1349 let p1 = paginate_slice(&output.prod_chains, 0, 5, PaginationMode::Callers)
1351 .expect("paginate failed");
1352 assert!(p1.next_cursor.is_some());
1353
1354 let cursor_str = p1.next_cursor.unwrap();
1355 let cursor_data = decode_cursor(&cursor_str).expect("decode failed");
1356
1357 let p2 = paginate_slice(
1359 &output.prod_chains,
1360 cursor_data.offset,
1361 5,
1362 PaginationMode::Callers,
1363 )
1364 .expect("paginate failed");
1365
1366 let formatted = format_focused_paginated(
1368 &p2.items,
1369 total_prod,
1370 PaginationMode::Callers,
1371 "target",
1372 &output.prod_chains,
1373 &output.test_chains,
1374 &output.outgoing_chains,
1375 output.def_count,
1376 cursor_data.offset,
1377 Some(temp_dir.path()),
1378 true,
1379 );
1380
1381 let expected_start = cursor_data.offset + 1;
1383 assert!(
1384 formatted.contains(&format!("CALLERS ({}", expected_start)),
1385 "header should show page 2 range, got: {}",
1386 formatted
1387 );
1388 }
1389 }
1390
1391 #[test]
1392 fn test_chains_to_entries_empty_returns_none() {
1393 let chains: Vec<InternalCallChain> = vec![];
1395
1396 let result = chains_to_entries(&chains, None);
1398
1399 assert!(result.is_none());
1401 }
1402
1403 #[test]
1404 fn test_chains_to_entries_with_data_returns_entries() {
1405 let chains = vec![
1407 InternalCallChain {
1408 chain: vec![("caller1".to_string(), PathBuf::from("/root/lib.rs"), 10)],
1409 },
1410 InternalCallChain {
1411 chain: vec![("caller2".to_string(), PathBuf::from("/root/other.rs"), 20)],
1412 },
1413 ];
1414 let root = PathBuf::from("/root");
1415
1416 let result = chains_to_entries(&chains, Some(root.as_path()));
1418
1419 assert!(result.is_some());
1421 let entries = result.unwrap();
1422 assert_eq!(entries.len(), 2);
1423 assert_eq!(entries[0].symbol, "caller1");
1424 assert_eq!(entries[0].file, "lib.rs");
1425 assert_eq!(entries[0].line, 10);
1426 assert_eq!(entries[1].symbol, "caller2");
1427 assert_eq!(entries[1].file, "other.rs");
1428 assert_eq!(entries[1].line, 20);
1429 }
1430
1431 #[test]
1432 fn test_symbol_focus_callees_pagination() {
1433 let temp_dir = TempDir::new().unwrap();
1434
1435 let mut code = String::from("fn target() {\n");
1437 for i in 0..10 {
1438 code.push_str(&format!(" callee_{:02}();\n", i));
1439 }
1440 code.push_str("}\n");
1441 for i in 0..10 {
1442 code.push_str(&format!("fn callee_{:02}() {{}}\n", i));
1443 }
1444 fs::write(temp_dir.path().join("lib.rs"), &code).unwrap();
1445
1446 let output = analyze_focused(temp_dir.path(), "target", 1, None, None).unwrap();
1447 let total_callees = output.outgoing_chains.len();
1448
1449 if total_callees > 3 {
1450 let paginated = paginate_slice(&output.outgoing_chains, 0, 3, PaginationMode::Callees)
1451 .expect("paginate failed");
1452
1453 let formatted = format_focused_paginated(
1454 &paginated.items,
1455 total_callees,
1456 PaginationMode::Callees,
1457 "target",
1458 &output.prod_chains,
1459 &output.test_chains,
1460 &output.outgoing_chains,
1461 output.def_count,
1462 0,
1463 Some(temp_dir.path()),
1464 true,
1465 );
1466
1467 assert!(
1468 formatted.contains(&format!(
1469 "CALLEES (1-{} of {})",
1470 paginated.items.len(),
1471 total_callees
1472 )),
1473 "header should show callees range, got: {}",
1474 formatted
1475 );
1476 }
1477 }
1478
1479 #[test]
1480 fn test_symbol_focus_empty_prod_callers() {
1481 let temp_dir = TempDir::new().unwrap();
1482
1483 let code = r#"
1485fn target() {}
1486
1487#[cfg(test)]
1488mod tests {
1489 use super::*;
1490 #[test]
1491 fn test_something() { target(); }
1492}
1493"#;
1494 fs::write(temp_dir.path().join("lib.rs"), code).unwrap();
1495
1496 let output = analyze_focused(temp_dir.path(), "target", 1, None, None).unwrap();
1497
1498 let paginated = paginate_slice(&output.prod_chains, 0, 100, PaginationMode::Callers)
1500 .expect("paginate failed");
1501 assert_eq!(paginated.items.len(), output.prod_chains.len());
1502 assert!(
1503 paginated.next_cursor.is_none(),
1504 "no next_cursor for empty or single-page prod_chains"
1505 );
1506 }
1507
1508 #[test]
1509 fn test_impl_only_filter_header_correct_counts() {
1510 let temp_dir = TempDir::new().unwrap();
1511
1512 let code = r#"
1517trait MyTrait {
1518 fn focus_symbol();
1519}
1520
1521struct SomeType;
1522
1523impl MyTrait for SomeType {
1524 fn focus_symbol() {}
1525}
1526
1527fn impl_caller() {
1528 SomeType::focus_symbol();
1529}
1530
1531fn regular_caller() {
1532 SomeType::focus_symbol();
1533}
1534"#;
1535 fs::write(temp_dir.path().join("lib.rs"), code).unwrap();
1536
1537 let params = FocusedAnalysisConfig {
1539 focus: "focus_symbol".to_string(),
1540 match_mode: SymbolMatchMode::Insensitive,
1541 follow_depth: 1,
1542 max_depth: None,
1543 ast_recursion_limit: None,
1544 use_summary: false,
1545 impl_only: Some(true),
1546 };
1547 let output = analyze_focused_with_progress(
1548 temp_dir.path(),
1549 ¶ms,
1550 Arc::new(AtomicUsize::new(0)),
1551 CancellationToken::new(),
1552 )
1553 .unwrap();
1554
1555 assert!(
1557 output.formatted.contains("FILTER: impl_only=true"),
1558 "formatted output should contain FILTER header for impl_only=true, got: {}",
1559 output.formatted
1560 );
1561
1562 assert!(
1564 output.impl_trait_caller_count < output.unfiltered_caller_count,
1565 "impl_trait_caller_count ({}) should be less than unfiltered_caller_count ({})",
1566 output.impl_trait_caller_count,
1567 output.unfiltered_caller_count
1568 );
1569
1570 let filter_line = output
1572 .formatted
1573 .lines()
1574 .find(|line| line.contains("FILTER: impl_only=true"))
1575 .expect("should find FILTER line");
1576 assert!(
1577 filter_line.contains(&format!(
1578 "({} of {} callers shown)",
1579 output.impl_trait_caller_count, output.unfiltered_caller_count
1580 )),
1581 "FILTER line should show correct N of M counts, got: {}",
1582 filter_line
1583 );
1584 }
1585
1586 #[test]
1587 fn test_callers_count_matches_formatted_output() {
1588 let temp_dir = TempDir::new().unwrap();
1589
1590 let code = r#"
1592fn target() {}
1593fn caller_a() { target(); }
1594fn caller_b() { target(); }
1595fn caller_c() { target(); }
1596"#;
1597 fs::write(temp_dir.path().join("lib.rs"), code).unwrap();
1598
1599 let output = analyze_focused(temp_dir.path(), "target", 1, None, None).unwrap();
1601
1602 let formatted = &output.formatted;
1604 let callers_count_from_output = formatted
1605 .lines()
1606 .find(|line| line.contains("FOCUS:"))
1607 .and_then(|line| {
1608 line.split(',')
1609 .find(|part| part.contains("callers"))
1610 .and_then(|part| {
1611 part.trim()
1612 .split_whitespace()
1613 .next()
1614 .and_then(|s| s.parse::<usize>().ok())
1615 })
1616 })
1617 .expect("should find CALLERS count in formatted output");
1618
1619 let expected_callers_count = output
1621 .prod_chains
1622 .iter()
1623 .filter_map(|chain| chain.chain.first().map(|(name, _, _)| name))
1624 .collect::<std::collections::HashSet<_>>()
1625 .len();
1626
1627 assert_eq!(
1628 callers_count_from_output, expected_callers_count,
1629 "CALLERS count in formatted output should match unique-first-caller count in prod_chains"
1630 );
1631 }
1632}