Skip to main content

reflex/semantic/
tools.rs

1//! Tool execution system for agentic context gathering
2//!
3//! This module handles execution of tool calls from the LLM including:
4//! - Running `rfx context` commands
5//! - Executing exploratory queries
6//! - Running codebase analysis (hotspots, unused files, etc.)
7
8use anyhow::{Context as AnyhowContext, Result};
9use crate::cache::CacheManager;
10use crate::dependency::DependencyIndex;
11use crate::query::QueryEngine;
12
13use super::executor::parse_command;
14use super::schema_agentic::{ToolCall, ContextGatheringParams, AnalysisType};
15
16/// Result of executing a tool call
17#[derive(Debug, Clone)]
18pub struct ToolResult {
19    /// Description of what this tool did
20    pub description: String,
21
22    /// The output/result from the tool
23    pub output: String,
24
25    /// Whether the tool execution was successful
26    pub success: bool,
27}
28
29/// Execute a single tool call
30pub async fn execute_tool(
31    tool: &ToolCall,
32    cache: &CacheManager,
33) -> Result<ToolResult> {
34    match tool {
35        ToolCall::GatherContext { params } => {
36            execute_gather_context(params, cache)
37        }
38        ToolCall::ExploreCodebase { description, command } => {
39            execute_explore_codebase(description, command, cache).await
40        }
41        ToolCall::AnalyzeStructure { analysis_type } => {
42            execute_analyze_structure(*analysis_type, cache)
43        }
44        ToolCall::SearchDocumentation { query, files } => {
45            execute_search_documentation(query, files.as_deref(), cache)
46        }
47        ToolCall::GetStatistics => {
48            execute_get_statistics(cache)
49        }
50        ToolCall::GetDependencies { file_path, reverse } => {
51            execute_get_dependencies(file_path, *reverse, cache)
52        }
53        ToolCall::GetAnalysisSummary { min_dependents } => {
54            execute_get_analysis_summary(*min_dependents, cache)
55        }
56        ToolCall::FindIslands { min_size, max_size } => {
57            execute_find_islands(*min_size, *max_size, cache)
58        }
59    }
60}
61
62/// Execute context gathering tool
63fn execute_gather_context(
64    params: &ContextGatheringParams,
65    cache: &CacheManager,
66) -> Result<ToolResult> {
67    log::info!("Executing gather_context tool");
68
69    // Build context options from params
70    let mut opts = crate::context::ContextOptions {
71        structure: params.structure,
72        path: params.path.clone(),
73        file_types: params.file_types,
74        project_type: params.project_type,
75        framework: params.framework,
76        entry_points: params.entry_points,
77        test_layout: params.test_layout,
78        config_files: params.config_files,
79        depth: params.depth,
80        json: false, // Always use text format for LLM consumption
81    };
82
83    // If no specific flags, enable all context types by default
84    if opts.is_empty() {
85        opts.structure = true;
86        opts.file_types = true;
87        opts.project_type = true;
88        opts.framework = true;
89        opts.entry_points = true;
90        opts.test_layout = true;
91        opts.config_files = true;
92    }
93
94    // Generate context
95    let output = crate::context::generate_context(cache, &opts)
96        .context("Failed to generate codebase context")?;
97
98    // Build description of what was gathered
99    let mut parts = Vec::new();
100    if opts.structure { parts.push("structure"); }
101    if opts.file_types { parts.push("file types"); }
102    if opts.project_type { parts.push("project type"); }
103    if opts.framework { parts.push("frameworks"); }
104    if opts.entry_points { parts.push("entry points"); }
105    if opts.test_layout { parts.push("test layout"); }
106    if opts.config_files { parts.push("config files"); }
107
108    let description = if parts.is_empty() {
109        "Gathered general codebase context".to_string()
110    } else {
111        format!("Gathered codebase context: {}", parts.join(", "))
112    };
113
114    log::debug!("Context gathering successful: {} chars", output.len());
115
116    Ok(ToolResult {
117        description,
118        output,
119        success: true,
120    })
121}
122
123/// Execute exploratory codebase query
124async fn execute_explore_codebase(
125    description: &str,
126    command: &str,
127    cache: &CacheManager,
128) -> Result<ToolResult> {
129    log::info!("Executing explore_codebase tool: {}", description);
130
131    // Parse the command
132    let parsed = parse_command(command)
133        .with_context(|| format!("Failed to parse exploration command: {}", command))?;
134
135    // Convert to QueryFilter
136    let filter = parsed.to_query_filter()?;
137
138    // Create query engine
139    let engine = QueryEngine::new(CacheManager::new(cache.workspace_root()));
140
141    // Execute query
142    let response = engine.search_with_metadata(&parsed.pattern, filter)
143        .with_context(|| format!("Failed to execute exploration query: {}", command))?;
144
145    // Format results for LLM consumption
146    let output = format_exploration_results(&response, &parsed.pattern);
147
148    log::debug!("Exploration query found {} file groups", response.results.len());
149
150    Ok(ToolResult {
151        description: format!("Explored: {}", description),
152        output,
153        success: true,
154    })
155}
156
157/// Execute structure analysis (hotspots, unused files, etc.)
158fn execute_analyze_structure(
159    analysis_type: AnalysisType,
160    cache: &CacheManager,
161) -> Result<ToolResult> {
162    log::info!("Executing analyze_structure tool: {:?}", analysis_type);
163
164    // Create dependency index
165    let deps_index = DependencyIndex::new(CacheManager::new(cache.workspace_root()));
166
167    let output = match analysis_type {
168        AnalysisType::Hotspots => {
169            // Get hotspots (returns file IDs and counts)
170            let hotspot_ids = deps_index.find_hotspots(Some(10), 2)?; // top 10, min 2 dependents
171
172            // Convert file IDs to paths
173            let file_ids: Vec<i64> = hotspot_ids.iter().map(|(id, _)| *id).collect();
174            let paths = deps_index.get_file_paths(&file_ids)?;
175
176            // Convert to (String, usize) format
177            let hotspots: Vec<(String, usize)> = hotspot_ids.iter()
178                .filter_map(|(id, count)| {
179                    paths.get(id).map(|path| (path.clone(), *count))
180                })
181                .collect();
182
183            format_hotspots(&hotspots)
184        }
185        AnalysisType::Unused => {
186            // Get unused files (returns file IDs)
187            let unused_ids = deps_index.find_unused_files()?;
188
189            // Convert file IDs to paths
190            let paths = deps_index.get_file_paths(&unused_ids)?;
191            let unused: Vec<String> = unused_ids.iter()
192                .filter_map(|id| paths.get(id).cloned())
193                .collect();
194
195            format_unused_files(&unused)
196        }
197        AnalysisType::Circular => {
198            // Get circular dependencies (returns vectors of file IDs)
199            let circular_ids = deps_index.detect_circular_dependencies()?;
200
201            // Collect all unique file IDs
202            let all_ids: Vec<i64> = circular_ids.iter()
203                .flat_map(|cycle| cycle.iter())
204                .copied()
205                .collect::<std::collections::HashSet<_>>()
206                .into_iter()
207                .collect();
208
209            // Convert all IDs to paths
210            let paths = deps_index.get_file_paths(&all_ids)?;
211
212            // Convert cycles to path cycles
213            let circular: Vec<Vec<String>> = circular_ids.iter()
214                .map(|cycle| {
215                    cycle.iter()
216                        .filter_map(|id| paths.get(id).cloned())
217                        .collect()
218                })
219                .collect();
220
221            format_circular_deps(&circular)
222        }
223    };
224
225    let description = match analysis_type {
226        AnalysisType::Hotspots => "Analyzed dependency hotspots (most-imported files)",
227        AnalysisType::Unused => "Analyzed unused files (no importers)",
228        AnalysisType::Circular => "Analyzed circular dependencies",
229    };
230
231    log::debug!("Analysis complete: {} chars", output.len());
232
233    Ok(ToolResult {
234        description: description.to_string(),
235        output,
236        success: true,
237    })
238}
239
240/// Execute documentation search tool
241fn execute_search_documentation(
242    query: &str,
243    files: Option<&[String]>,
244    cache: &CacheManager,
245) -> Result<ToolResult> {
246    log::info!("Executing search_documentation tool: query='{}'", query);
247
248    let workspace_root = cache.workspace_root();
249
250    // Default documentation files to search
251    let default_files = vec!["CLAUDE.md".to_string(), "README.md".to_string()];
252    let search_files = files.unwrap_or(&default_files);
253
254    let mut found_sections = Vec::new();
255    let mut searched_files = Vec::new();
256
257    // Search specified documentation files
258    for file in search_files {
259        let file_path = workspace_root.join(file);
260
261        if !file_path.exists() {
262            log::debug!("Documentation file does not exist: {}", file);
263            continue;
264        }
265
266        searched_files.push(file.clone());
267
268        match std::fs::read_to_string(&file_path) {
269            Ok(content) => {
270                // Search for query keywords in the content
271                if let Some(sections) = search_documentation_content(&content, query, file) {
272                    found_sections.push(sections);
273                }
274            }
275            Err(e) => {
276                log::warn!("Failed to read documentation file {}: {}", file, e);
277            }
278        }
279    }
280
281    // Also search .context/ directory for markdown files
282    let context_dir = workspace_root.join(".context");
283    if context_dir.exists() && context_dir.is_dir() {
284        if let Ok(entries) = std::fs::read_dir(&context_dir) {
285            for entry in entries.flatten() {
286                let path = entry.path();
287                if path.extension().and_then(|s| s.to_str()) == Some("md") {
288                    if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
289                        if let Ok(content) = std::fs::read_to_string(&path) {
290                            if let Some(sections) = search_documentation_content(
291                                &content,
292                                query,
293                                &format!(".context/{}", file_name),
294                            ) {
295                                found_sections.push(sections);
296                                searched_files.push(format!(".context/{}", file_name));
297                            }
298                        }
299                    }
300                }
301            }
302        }
303    }
304
305    // Format output
306    let output = if found_sections.is_empty() {
307        format!(
308            "No relevant documentation found for query '{}' in files: {}\n\nTry:\n- Using different keywords\n- Searching the codebase directly with explore_codebase",
309            query,
310            searched_files.join(", ")
311        )
312    } else {
313        format!(
314            "Found documentation for '{}' in {} file(s):\n\n{}",
315            query,
316            found_sections.len(),
317            found_sections.join("\n\n---\n\n")
318        )
319    };
320
321    log::debug!("Documentation search found {} sections", found_sections.len());
322
323    Ok(ToolResult {
324        description: format!("Searched documentation for: {}", query),
325        output,
326        success: !found_sections.is_empty(),
327    })
328}
329
330/// Search documentation content for query and extract relevant sections
331fn search_documentation_content(content: &str, query: &str, file_name: &str) -> Option<String> {
332    // Tokenize query into keywords (filter out common stop words)
333    let stop_words = ["the", "a", "an", "and", "or", "but", "in", "on", "at", "to", "for", "of", "with", "by", "from", "is", "are", "was", "were", "be", "been", "being", "have", "has", "had", "do", "does", "did", "will", "would", "should", "could", "may", "might", "can", "what", "how", "where", "when", "why", "which", "who"];
334    let keywords: Vec<String> = query.to_lowercase()
335        .split_whitespace()
336        .filter(|word| !stop_words.contains(word) && word.len() > 2)
337        .map(|s| s.to_string())
338        .collect();
339
340    if keywords.is_empty() {
341        return None;
342    }
343
344    let lines: Vec<&str> = content.lines().collect();
345    let mut relevant_sections = Vec::new();
346    let mut current_section = String::new();
347    let mut current_section_title = String::new();
348    let mut in_relevant_section = false;
349    let mut relevance_score = 0;
350
351    for line in lines.iter() {
352        let line_lower = line.to_lowercase();
353
354        // Check if this is a heading
355        if line.starts_with('#') {
356            // Save previous section if it was relevant
357            if in_relevant_section && relevance_score >= 2 {  // Need at least 2 keyword matches
358                relevant_sections.push(format!(
359                    "## {} ({})\n\n{}",
360                    current_section_title,
361                    file_name,
362                    current_section.trim()
363                ));
364            }
365
366            // Start new section
367            current_section.clear();
368            current_section_title = line.trim_start_matches('#').trim().to_string();
369            relevance_score = 0;
370            in_relevant_section = false;
371
372            // Check if heading contains any query keywords
373            let heading_lower = line_lower.clone();
374            for keyword in &keywords {
375                if heading_lower.contains(keyword) {
376                    in_relevant_section = true;
377                    relevance_score += 10;
378                }
379            }
380        }
381
382        // Check if content contains any query keywords
383        let mut line_matches = 0;
384        for keyword in &keywords {
385            if line_lower.contains(keyword) {
386                in_relevant_section = true;
387                line_matches += 1;
388            }
389        }
390        relevance_score += line_matches;
391
392        // Add line to current section (with some context)
393        if in_relevant_section || relevance_score > 0 {
394            current_section.push_str(line);
395            current_section.push('\n');
396
397            // Limit section size to prevent massive outputs
398            if current_section.lines().count() > 150 {
399                break;
400            }
401        }
402    }
403
404    // Save last section if relevant
405    if in_relevant_section && relevance_score >= 2 {  // Need at least 2 keyword matches
406        relevant_sections.push(format!(
407            "## {} ({})\n\n{}",
408            current_section_title,
409            file_name,
410            current_section.trim()
411        ));
412    }
413
414    if relevant_sections.is_empty() {
415        None
416    } else {
417        // Sort sections by relevance (most matches first) and limit to top 3
418        Some(relevant_sections.iter().take(3).cloned().collect::<Vec<_>>().join("\n\n"))
419    }
420}
421
422/// Format exploration query results for LLM
423fn format_exploration_results(
424    response: &crate::models::QueryResponse,
425    pattern: &str,
426) -> String {
427    if response.results.is_empty() {
428        return format!("No results found for pattern: {}", pattern);
429    }
430
431    let mut output = Vec::new();
432    output.push(format!(
433        "Found {} total matches across {} files for pattern '{}':\n",
434        response.pagination.total,
435        response.results.len(),
436        pattern
437    ));
438
439    // Show first 5 file groups
440    for (idx, file_group) in response.results.iter().take(5).enumerate() {
441        output.push(format!("\n{}. {}", idx + 1, file_group.path));
442
443        // Show first 3 matches per file
444        for match_result in file_group.matches.iter().take(3) {
445            // Show context before the match
446            for (idx, line) in match_result.context_before.iter().enumerate() {
447                let line_num = match_result.span.start_line.saturating_sub(match_result.context_before.len() - idx);
448                output.push(format!("   Line {}: {}", line_num, line.trim()));
449            }
450
451            // Show the match line itself
452            output.push(format!(
453                "   Line {}: {}",
454                match_result.span.start_line,
455                match_result.preview.lines().next().unwrap_or("").trim()
456            ));
457
458            // Show context after the match
459            for (idx, line) in match_result.context_after.iter().enumerate() {
460                let line_num = match_result.span.start_line + idx + 1;
461                output.push(format!("   Line {}: {}", line_num, line.trim()));
462            }
463        }
464
465        if file_group.matches.len() > 3 {
466            output.push(format!("   ... and {} more matches", file_group.matches.len() - 3));
467        }
468    }
469
470    if response.results.len() > 5 {
471        output.push(format!("\n... and {} more files", response.results.len() - 5));
472    }
473
474    output.join("\n")
475}
476
477/// Format hotspot analysis results
478fn format_hotspots(hotspots: &[(String, usize)]) -> String {
479    if hotspots.is_empty() {
480        return "No dependency hotspots found.".to_string();
481    }
482
483    let mut output = Vec::new();
484    output.push(format!("Top {} most-imported files:\n", hotspots.len().min(10)));
485
486    for (idx, (path, count)) in hotspots.iter().take(10).enumerate() {
487        output.push(format!("{}. {} ({} importers)", idx + 1, path, count));
488    }
489
490    if hotspots.len() > 10 {
491        output.push(format!("\n... and {} more hotspots", hotspots.len() - 10));
492    }
493
494    output.join("\n")
495}
496
497/// Format unused files analysis results
498fn format_unused_files(unused: &[String]) -> String {
499    if unused.is_empty() {
500        return "No unused files found (all files are imported by others).".to_string();
501    }
502
503    let mut output = Vec::new();
504    output.push(format!("Found {} unused files (no importers):\n", unused.len()));
505
506    for (idx, path) in unused.iter().take(15).enumerate() {
507        output.push(format!("{}. {}", idx + 1, path));
508    }
509
510    if unused.len() > 15 {
511        output.push(format!("\n... and {} more unused files", unused.len() - 15));
512    }
513
514    output.join("\n")
515}
516
517/// Format circular dependency analysis results
518fn format_circular_deps(circular: &[Vec<String>]) -> String {
519    if circular.is_empty() {
520        return "No circular dependencies found.".to_string();
521    }
522
523    let mut output = Vec::new();
524    output.push(format!("Found {} circular dependency chains:\n", circular.len()));
525
526    for (idx, cycle) in circular.iter().take(5).enumerate() {
527        output.push(format!("\n{}. Cycle ({} files):", idx + 1, cycle.len()));
528        output.push(format!("   {}", cycle.join(" → ")));
529    }
530
531    if circular.len() > 5 {
532        output.push(format!("\n... and {} more circular dependencies", circular.len() - 5));
533    }
534
535    output.join("\n")
536}
537
538/// Execute get statistics tool
539fn execute_get_statistics(
540    cache: &CacheManager,
541) -> Result<ToolResult> {
542    log::info!("Executing get_statistics tool");
543
544    // Get index statistics
545    let stats = cache.stats()
546        .context("Failed to get cache statistics")?;
547
548    // Format output
549    let output = format_statistics(&stats);
550
551    log::debug!("Statistics retrieved successfully");
552
553    Ok(ToolResult {
554        description: "Retrieved index statistics".to_string(),
555        output,
556        success: true,
557    })
558}
559
560/// Execute get dependencies tool
561fn execute_get_dependencies(
562    file_path: &str,
563    reverse: bool,
564    cache: &CacheManager,
565) -> Result<ToolResult> {
566    log::info!("Executing get_dependencies tool: file={}, reverse={}", file_path, reverse);
567
568    // Create dependency index
569    let deps_index = DependencyIndex::new(CacheManager::new(cache.workspace_root()));
570
571    // Get file ID by path (supports fuzzy matching)
572    let file_id = deps_index.get_file_id_by_path(file_path)
573        .context(format!("Failed to find file: {}", file_path))?
574        .ok_or_else(|| anyhow::anyhow!("File not found: {}", file_path))?;
575
576    let output = if reverse {
577        // Get files that depend on this file (reverse dependencies)
578        let dependent_ids = deps_index.get_dependents(file_id)
579            .context("Failed to get reverse dependencies")?;
580
581        // Convert file IDs to paths
582        let paths = deps_index.get_file_paths(&dependent_ids)?;
583        let dependents: Vec<String> = dependent_ids.iter()
584            .filter_map(|id| paths.get(id).cloned())
585            .collect();
586
587        format_reverse_dependencies(file_path, &dependents)
588    } else {
589        // Get dependencies of this file
590        let deps = deps_index.get_dependencies_info(file_id)
591            .context("Failed to get dependencies")?;
592
593        format_dependencies(file_path, &deps)
594    };
595
596    let description = if reverse {
597        format!("Found reverse dependencies for: {}", file_path)
598    } else {
599        format!("Found dependencies for: {}", file_path)
600    };
601
602    log::debug!("Dependencies retrieved successfully");
603
604    Ok(ToolResult {
605        description,
606        output,
607        success: true,
608    })
609}
610
611/// Execute get analysis summary tool
612fn execute_get_analysis_summary(
613    min_dependents: usize,
614    cache: &CacheManager,
615) -> Result<ToolResult> {
616    log::info!("Executing get_analysis_summary tool: min_dependents={}", min_dependents);
617
618    // Create dependency index
619    let deps_index = DependencyIndex::new(CacheManager::new(cache.workspace_root()));
620
621    // Get hotspots
622    let hotspot_ids = deps_index.find_hotspots(Some(10), min_dependents)?;
623    let hotspot_count = hotspot_ids.len();
624
625    // Get unused files count
626    let unused_ids = deps_index.find_unused_files()?;
627    let unused_count = unused_ids.len();
628
629    // Get circular dependencies count
630    let circular_ids = deps_index.detect_circular_dependencies()?;
631    let circular_count = circular_ids.len();
632
633    // Format summary
634    let output = format_analysis_summary(hotspot_count, unused_count, circular_count, min_dependents);
635
636    log::debug!("Analysis summary retrieved successfully");
637
638    Ok(ToolResult {
639        description: "Retrieved dependency analysis summary".to_string(),
640        output,
641        success: true,
642    })
643}
644
645/// Execute find islands tool
646fn execute_find_islands(
647    min_size: usize,
648    max_size: usize,
649    cache: &CacheManager,
650) -> Result<ToolResult> {
651    log::info!("Executing find_islands tool: min_size={}, max_size={}", min_size, max_size);
652
653    // Create dependency index
654    let deps_index = DependencyIndex::new(CacheManager::new(cache.workspace_root()));
655
656    // Get all islands
657    let all_islands = deps_index.find_islands()?;
658
659    // Filter by size
660    let filtered_islands: Vec<Vec<i64>> = all_islands.into_iter()
661        .filter(|island| island.len() >= min_size && island.len() <= max_size)
662        .collect();
663
664    // Convert file IDs to paths
665    let all_ids: Vec<i64> = filtered_islands.iter()
666        .flat_map(|island| island.iter())
667        .copied()
668        .collect::<std::collections::HashSet<_>>()
669        .into_iter()
670        .collect();
671
672    let paths = deps_index.get_file_paths(&all_ids)?;
673
674    let islands_with_paths: Vec<Vec<String>> = filtered_islands.iter()
675        .map(|island| {
676            island.iter()
677                .filter_map(|id| paths.get(id).cloned())
678                .collect()
679        })
680        .collect();
681
682    // Format output
683    let output = format_islands(&islands_with_paths, min_size, max_size);
684
685    log::debug!("Islands retrieved successfully: {} islands found", islands_with_paths.len());
686
687    Ok(ToolResult {
688        description: format!("Found {} disconnected components", islands_with_paths.len()),
689        output,
690        success: true,
691    })
692}
693
694/// Format statistics output
695fn format_statistics(stats: &crate::models::IndexStats) -> String {
696    let mut output = Vec::new();
697
698    output.push(format!("# Index Statistics\n"));
699    output.push(format!("Total files: {}", stats.total_files));
700    output.push(format!("Index size: {:.2} MB\n", stats.index_size_bytes as f64 / 1_048_576.0));
701
702    // Files by language
703    if !stats.files_by_language.is_empty() {
704        output.push("## Files by Language\n".to_string());
705        let mut lang_counts: Vec<_> = stats.files_by_language.iter().collect();
706        lang_counts.sort_by(|a, b| b.1.cmp(a.1)); // Sort by count descending
707
708        for (lang, count) in lang_counts.iter().take(10) {
709            let percentage = (**count as f64 / stats.total_files as f64) * 100.0;
710            output.push(format!("- {}: {} files ({:.1}%)", lang, count, percentage));
711        }
712
713        if lang_counts.len() > 10 {
714            output.push(format!("... and {} more languages", lang_counts.len() - 10));
715        }
716    }
717
718    // Lines by language
719    if !stats.lines_by_language.is_empty() {
720        output.push("\n## Lines of Code by Language\n".to_string());
721        let mut line_counts: Vec<_> = stats.lines_by_language.iter().collect();
722        line_counts.sort_by(|a, b| b.1.cmp(a.1)); // Sort by count descending
723
724        let total_lines: usize = stats.lines_by_language.values().sum();
725
726        for (lang, count) in line_counts.iter().take(10) {
727            let percentage = (**count as f64 / total_lines as f64) * 100.0;
728            let formatted_count = count.to_string().as_str().chars().rev().enumerate().map(|(i, c)| if i != 0 && i % 3 == 0 { format!(",{}", c) } else { c.to_string() }).collect::<Vec<_>>().into_iter().rev().collect::<String>();
729            output.push(format!("- {}: {} lines ({:.1}%)", lang, formatted_count, percentage));
730        }
731
732        if line_counts.len() > 10 {
733            output.push(format!("... and {} more languages", line_counts.len() - 10));
734        }
735    }
736
737    output.push(format!("\nLast updated: {}", stats.last_updated));
738
739    output.join("\n")
740}
741
742/// Format dependencies output
743fn format_dependencies(file_path: &str, deps: &[crate::models::DependencyInfo]) -> String {
744    if deps.is_empty() {
745        return format!("File '{}' has no dependencies.", file_path);
746    }
747
748    let mut output = Vec::new();
749    output.push(format!("# Dependencies of '{}'\n", file_path));
750    output.push(format!("Found {} dependencies:\n", deps.len()));
751
752    for (idx, dep) in deps.iter().take(20).enumerate() {
753        let line_info = dep.line.map(|l| format!(" (line {})", l)).unwrap_or_default();
754        output.push(format!("{}. {}{}", idx + 1, dep.path, line_info));
755
756        // Show imported symbols if available
757        if let Some(symbols) = &dep.symbols {
758            if !symbols.is_empty() {
759                output.push(format!("   Symbols: {}", symbols.join(", ")));
760            }
761        }
762    }
763
764    if deps.len() > 20 {
765        output.push(format!("\n... and {} more dependencies", deps.len() - 20));
766    }
767
768    output.join("\n")
769}
770
771/// Format reverse dependencies output
772fn format_reverse_dependencies(file_path: &str, dependents: &[String]) -> String {
773    if dependents.is_empty() {
774        return format!("No files depend on '{}'.", file_path);
775    }
776
777    let mut output = Vec::new();
778    output.push(format!("# Files that import '{}'\n", file_path));
779    output.push(format!("Found {} files:\n", dependents.len()));
780
781    for (idx, path) in dependents.iter().take(20).enumerate() {
782        output.push(format!("{}. {}", idx + 1, path));
783    }
784
785    if dependents.len() > 20 {
786        output.push(format!("\n... and {} more files", dependents.len() - 20));
787    }
788
789    output.join("\n")
790}
791
792/// Format analysis summary output
793fn format_analysis_summary(hotspot_count: usize, unused_count: usize, circular_count: usize, min_dependents: usize) -> String {
794    let mut output = Vec::new();
795
796    output.push("# Dependency Analysis Summary\n".to_string());
797    output.push(format!("Hotspots (files with {}+ importers): {}", min_dependents, hotspot_count));
798    output.push(format!("Unused files (no importers): {}", unused_count));
799    output.push(format!("Circular dependency chains: {}", circular_count));
800
801    if hotspot_count > 0 {
802        output.push("\n**Hotspots** indicate central/important files that many other files depend on.".to_string());
803    }
804
805    if unused_count > 0 {
806        output.push("\n**Unused files** may be dead code or entry points (like main.rs, index.ts).".to_string());
807    }
808
809    if circular_count > 0 {
810        output.push("\n**Circular dependencies** can cause compilation issues and indicate architectural problems.".to_string());
811    }
812
813    output.join("\n")
814}
815
816/// Format islands output
817fn format_islands(islands: &[Vec<String>], min_size: usize, max_size: usize) -> String {
818    if islands.is_empty() {
819        return format!("No disconnected components found (size {}-{}).", min_size, max_size);
820    }
821
822    let mut output = Vec::new();
823    output.push(format!("# Disconnected Components (Islands)\n"));
824    output.push(format!("Found {} islands (size {}-{}):\n", islands.len(), min_size, max_size));
825
826    for (idx, island) in islands.iter().take(5).enumerate() {
827        output.push(format!("\n{}. Island with {} files:", idx + 1, island.len()));
828
829        for (file_idx, file) in island.iter().take(10).enumerate() {
830            output.push(format!("   {}. {}", file_idx + 1, file));
831        }
832
833        if island.len() > 10 {
834            output.push(format!("   ... and {} more files", island.len() - 10));
835        }
836    }
837
838    if islands.len() > 5 {
839        output.push(format!("\n... and {} more islands", islands.len() - 5));
840    }
841
842    output.push("\n**Islands** are groups of files that depend on each other but have no dependencies outside the group.".to_string());
843    output.push("This can indicate isolated subsystems or potential dead code.".to_string());
844
845    output.join("\n")
846}
847
848/// Format all tool results into a single context string for the next LLM call
849pub fn format_tool_results(results: &[ToolResult]) -> String {
850    if results.is_empty() {
851        return String::new();
852    }
853
854    let mut output = Vec::new();
855    output.push("## Tool Execution Results\n".to_string());
856
857    for (idx, result) in results.iter().enumerate() {
858        output.push(format!("\n### Tool {} - {}", idx + 1, result.description));
859        output.push(String::new());
860        output.push(result.output.clone());
861        output.push(String::new());
862    }
863
864    output.join("\n")
865}
866
867#[cfg(test)]
868mod tests {
869    use super::*;
870
871    #[test]
872    fn test_format_tool_results_empty() {
873        let results = vec![];
874        let output = format_tool_results(&results);
875        assert!(output.is_empty());
876    }
877
878    #[test]
879    fn test_format_tool_results_single() {
880        let results = vec![ToolResult {
881            description: "Test tool".to_string(),
882            output: "Test output".to_string(),
883            success: true,
884        }];
885
886        let output = format_tool_results(&results);
887        assert!(output.contains("Tool Execution Results"));
888        assert!(output.contains("Test tool"));
889        assert!(output.contains("Test output"));
890    }
891
892    #[test]
893    fn test_format_hotspots() {
894        let hotspots = vec![
895            ("src/main.rs".to_string(), 10),
896            ("src/lib.rs".to_string(), 5),
897        ];
898
899        let output = format_hotspots(&hotspots);
900        assert!(output.contains("most-imported files"));
901        assert!(output.contains("src/main.rs"));
902        assert!(output.contains("10 importers"));
903    }
904
905    #[test]
906    fn test_format_unused_files() {
907        let unused = vec![
908            "src/old.rs".to_string(),
909            "tests/legacy.rs".to_string(),
910        ];
911
912        let output = format_unused_files(&unused);
913        assert!(output.contains("unused files"));
914        assert!(output.contains("src/old.rs"));
915    }
916
917    #[test]
918    fn test_format_circular_deps() {
919        let circular = vec![
920            vec!["a.rs".to_string(), "b.rs".to_string(), "a.rs".to_string()],
921        ];
922
923        let output = format_circular_deps(&circular);
924        assert!(output.contains("circular dependency"));
925        assert!(output.contains("a.rs → b.rs → a.rs"));
926    }
927}