Skip to main content

code_analyze_mcp/
lib.rs

1//! Rust MCP server for code structure analysis using tree-sitter.
2//!
3//! This crate exposes four MCP tools for multiple programming languages:
4//!
5//! - **`analyze_directory`**: Directory tree with file counts and structure
6//! - **`analyze_file`**: Semantic extraction (functions, classes, imports, references)
7//! - **`analyze_symbol`**: Call graph analysis (callers and callees)
8//! - **`analyze_module`**: Lightweight function and import index
9//!
10//! Key entry points:
11//! - [`analyze::analyze_directory`]: Analyze entire directory tree
12//! - [`analyze::analyze_file`]: Analyze single file
13//! - [`parser::ElementExtractor`]: Parse language-specific elements
14//!
15//! Languages supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran.
16
17pub mod logging;
18pub mod metrics;
19
20use code_analyze_core::{analyze, cache, completion, graph, traversal, types};
21
22pub(crate) const EXCLUDED_DIRS: &[&str] = &[
23    "node_modules",
24    "vendor",
25    ".git",
26    "__pycache__",
27    "target",
28    "dist",
29    "build",
30    ".venv",
31];
32
33use code_analyze_core::cache::AnalysisCache;
34use code_analyze_core::formatter::{
35    format_file_details_paginated, format_file_details_summary, format_focused_paginated,
36    format_module_info, format_structure_paginated, format_summary,
37};
38use code_analyze_core::pagination::{
39    CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
40};
41use code_analyze_core::traversal::{WalkEntry, walk_directory};
42use code_analyze_core::types::{
43    AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
44    AnalyzeSymbolParams, SymbolMatchMode,
45};
46use logging::LogEvent;
47use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
48use rmcp::handler::server::wrapper::Parameters;
49use rmcp::model::{
50    CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
51    CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
52    LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
53    ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
54};
55use rmcp::service::{NotificationContext, RequestContext};
56use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
57use serde_json::Value;
58use std::path::Path;
59use std::sync::{Arc, Mutex};
60use tokio::sync::{Mutex as TokioMutex, mpsc};
61use tracing::{instrument, warn};
62use tracing_subscriber::filter::LevelFilter;
63
64static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
65
66const SIZE_LIMIT: usize = 50_000;
67
68/// Returns `true` when `summary=true` and a `cursor` are both provided, which is an invalid
69/// combination since summary mode and pagination are mutually exclusive.
70#[must_use]
71pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
72    summary == Some(true) && cursor.is_some()
73}
74
75#[must_use]
76fn error_meta(
77    category: &'static str,
78    is_retryable: bool,
79    suggested_action: &'static str,
80) -> serde_json::Value {
81    serde_json::json!({
82        "errorCategory": category,
83        "isRetryable": is_retryable,
84        "suggestedAction": suggested_action,
85    })
86}
87
88#[must_use]
89fn err_to_tool_result(e: ErrorData) -> CallToolResult {
90    CallToolResult::error(vec![Content::text(e.message)])
91}
92
93fn no_cache_meta() -> Meta {
94    let mut m = serde_json::Map::new();
95    m.insert(
96        "cache_hint".to_string(),
97        serde_json::Value::String("no-cache".to_string()),
98    );
99    Meta(m)
100}
101
102/// Helper function for paginating focus chains (callers or callees).
103/// Returns (items, re-encoded_cursor_option).
104fn paginate_focus_chains(
105    chains: &[graph::InternalCallChain],
106    mode: PaginationMode,
107    offset: usize,
108    page_size: usize,
109) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
110    let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
111        ErrorData::new(
112            rmcp::model::ErrorCode::INTERNAL_ERROR,
113            e.to_string(),
114            Some(error_meta("transient", true, "retry the request")),
115        )
116    })?;
117
118    if paginated.next_cursor.is_none() && offset == 0 {
119        return Ok((paginated.items, None));
120    }
121
122    let next = if let Some(raw_cursor) = paginated.next_cursor {
123        let decoded = decode_cursor(&raw_cursor).map_err(|e| {
124            ErrorData::new(
125                rmcp::model::ErrorCode::INVALID_PARAMS,
126                e.to_string(),
127                Some(error_meta("validation", false, "invalid cursor format")),
128            )
129        })?;
130        Some(
131            encode_cursor(&CursorData {
132                mode,
133                offset: decoded.offset,
134            })
135            .map_err(|e| {
136                ErrorData::new(
137                    rmcp::model::ErrorCode::INVALID_PARAMS,
138                    e.to_string(),
139                    Some(error_meta("validation", false, "invalid cursor format")),
140                )
141            })?,
142        )
143    } else {
144        None
145    };
146
147    Ok((paginated.items, next))
148}
149
150/// MCP server handler that wires the four analysis tools to the rmcp transport.
151///
152/// Holds shared state: tool router, analysis cache, peer connection, log-level filter,
153/// log event channel, metrics sender, and per-session sequence tracking.
154#[derive(Clone)]
155pub struct CodeAnalyzer {
156    tool_router: ToolRouter<Self>,
157    cache: AnalysisCache,
158    peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
159    log_level_filter: Arc<Mutex<LevelFilter>>,
160    event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
161    metrics_tx: crate::metrics::MetricsSender,
162    session_call_seq: Arc<std::sync::atomic::AtomicU32>,
163    session_id: Arc<TokioMutex<Option<String>>>,
164}
165
166#[tool_router]
167impl CodeAnalyzer {
168    #[must_use]
169    pub fn list_tools() -> Vec<rmcp::model::Tool> {
170        Self::tool_router().list_all()
171    }
172
173    pub fn new(
174        peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
175        log_level_filter: Arc<Mutex<LevelFilter>>,
176        event_rx: mpsc::UnboundedReceiver<LogEvent>,
177        metrics_tx: crate::metrics::MetricsSender,
178    ) -> Self {
179        CodeAnalyzer {
180            tool_router: Self::tool_router(),
181            cache: AnalysisCache::new(100),
182            peer,
183            log_level_filter,
184            event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
185            metrics_tx,
186            session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
187            session_id: Arc::new(TokioMutex::new(None)),
188        }
189    }
190
191    #[instrument(skip(self))]
192    async fn emit_progress(
193        &self,
194        peer: Option<Peer<RoleServer>>,
195        token: &ProgressToken,
196        progress: f64,
197        total: f64,
198        message: String,
199    ) {
200        if let Some(peer) = peer {
201            let notification = ServerNotification::ProgressNotification(Notification::new(
202                ProgressNotificationParam {
203                    progress_token: token.clone(),
204                    progress,
205                    total: Some(total),
206                    message: Some(message),
207                },
208            ));
209            if let Err(e) = peer.send_notification(notification).await {
210                warn!("Failed to send progress notification: {}", e);
211            }
212        }
213    }
214
215    /// Private helper: Extract analysis logic for overview mode (`analyze_directory`).
216    /// Returns the complete analysis output after spawning and monitoring progress.
217    /// Cancels the blocking task when `ct` is triggered; returns an error on cancellation.
218    #[allow(clippy::too_many_lines)] // long but cohesive analysis loop; extracting sub-functions would obscure the control flow
219    #[allow(clippy::cast_precision_loss)] // progress percentage display; precision loss acceptable for usize counts
220    #[instrument(skip(self, params, ct))]
221    async fn handle_overview_mode(
222        &self,
223        params: &AnalyzeDirectoryParams,
224        ct: tokio_util::sync::CancellationToken,
225    ) -> Result<std::sync::Arc<analyze::AnalysisOutput>, ErrorData> {
226        let path = Path::new(&params.path);
227        let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
228        let counter_clone = counter.clone();
229        let path_owned = path.to_path_buf();
230        let max_depth = params.max_depth;
231        let ct_clone = ct.clone();
232
233        // Single unbounded walk; filter in-memory to respect max_depth for analysis.
234        let all_entries = walk_directory(path, None).map_err(|e| {
235            ErrorData::new(
236                rmcp::model::ErrorCode::INTERNAL_ERROR,
237                format!("Failed to walk directory: {e}"),
238                Some(error_meta(
239                    "resource",
240                    false,
241                    "check path permissions and availability",
242                )),
243            )
244        })?;
245
246        // Canonicalize max_depth: Some(0) is semantically identical to None (unlimited).
247        let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
248
249        // Build cache key from all_entries (before depth filtering)
250        let cache_key = cache::DirectoryCacheKey::from_entries(
251            &all_entries,
252            canonical_max_depth,
253            AnalysisMode::Overview,
254        );
255
256        // Check cache
257        if let Some(cached) = self.cache.get_directory(&cache_key) {
258            return Ok(cached);
259        }
260
261        // Compute subtree counts from the full entry set before filtering.
262        let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
263            Some(traversal::subtree_counts_from_entries(path, &all_entries))
264        } else {
265            None
266        };
267
268        // Filter to depth-bounded subset for analysis.
269        let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
270            && depth > 0
271        {
272            all_entries
273                .into_iter()
274                .filter(|e| e.depth <= depth as usize)
275                .collect()
276        } else {
277            all_entries
278        };
279
280        // Get total file count for progress reporting
281        let total_files = entries.iter().filter(|e| !e.is_dir).count();
282
283        // Spawn blocking analysis with progress tracking
284        let handle = tokio::task::spawn_blocking(move || {
285            analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
286        });
287
288        // Poll and emit progress every 100ms
289        let token = ProgressToken(NumberOrString::String(
290            format!(
291                "analyze-overview-{}",
292                std::time::SystemTime::now()
293                    .duration_since(std::time::UNIX_EPOCH)
294                    .map(|d| d.as_nanos())
295                    .unwrap_or(0)
296            )
297            .into(),
298        ));
299        let peer = self.peer.lock().await.clone();
300        let mut last_progress = 0usize;
301        let mut cancelled = false;
302        loop {
303            tokio::time::sleep(std::time::Duration::from_millis(100)).await;
304            if ct.is_cancelled() {
305                cancelled = true;
306                break;
307            }
308            let current = counter.load(std::sync::atomic::Ordering::Relaxed);
309            if current != last_progress && total_files > 0 {
310                self.emit_progress(
311                    peer.clone(),
312                    &token,
313                    current as f64,
314                    total_files as f64,
315                    format!("Analyzing {current}/{total_files} files"),
316                )
317                .await;
318                last_progress = current;
319            }
320            if handle.is_finished() {
321                break;
322            }
323        }
324
325        // Emit final 100% progress only if not cancelled
326        if !cancelled && total_files > 0 {
327            self.emit_progress(
328                peer.clone(),
329                &token,
330                total_files as f64,
331                total_files as f64,
332                format!("Completed analyzing {total_files} files"),
333            )
334            .await;
335        }
336
337        match handle.await {
338            Ok(Ok(mut output)) => {
339                output.subtree_counts = subtree_counts;
340                let arc_output = std::sync::Arc::new(output);
341                self.cache.put_directory(cache_key, arc_output.clone());
342                Ok(arc_output)
343            }
344            Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
345                rmcp::model::ErrorCode::INTERNAL_ERROR,
346                "Analysis cancelled".to_string(),
347                Some(error_meta("transient", true, "analysis was cancelled")),
348            )),
349            Ok(Err(e)) => Err(ErrorData::new(
350                rmcp::model::ErrorCode::INTERNAL_ERROR,
351                format!("Error analyzing directory: {e}"),
352                Some(error_meta(
353                    "resource",
354                    false,
355                    "check path and file permissions",
356                )),
357            )),
358            Err(e) => Err(ErrorData::new(
359                rmcp::model::ErrorCode::INTERNAL_ERROR,
360                format!("Task join error: {e}"),
361                Some(error_meta("transient", true, "retry the request")),
362            )),
363        }
364    }
365
366    /// Private helper: Extract analysis logic for file details mode (`analyze_file`).
367    /// Returns the cached or newly analyzed file output.
368    #[instrument(skip(self, params))]
369    async fn handle_file_details_mode(
370        &self,
371        params: &AnalyzeFileParams,
372    ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
373        // Build cache key from file metadata
374        let cache_key = std::fs::metadata(&params.path).ok().and_then(|meta| {
375            meta.modified().ok().map(|mtime| cache::CacheKey {
376                path: std::path::PathBuf::from(&params.path),
377                modified: mtime,
378                mode: AnalysisMode::FileDetails,
379            })
380        });
381
382        // Check cache first
383        if let Some(ref key) = cache_key
384            && let Some(cached) = self.cache.get(key)
385        {
386            return Ok(cached);
387        }
388
389        // Cache miss or no cache key, analyze and optionally store
390        match analyze::analyze_file(&params.path, params.ast_recursion_limit) {
391            Ok(output) => {
392                let arc_output = std::sync::Arc::new(output);
393                if let Some(key) = cache_key {
394                    self.cache.put(key, arc_output.clone());
395                }
396                Ok(arc_output)
397            }
398            Err(e) => Err(ErrorData::new(
399                rmcp::model::ErrorCode::INTERNAL_ERROR,
400                format!("Error analyzing file: {e}"),
401                Some(error_meta(
402                    "resource",
403                    false,
404                    "check file path and permissions",
405                )),
406            )),
407        }
408    }
409
410    // Validate impl_only: only valid for directories that contain Rust source files.
411    fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
412        let has_rust = entries.iter().any(|e| {
413            !e.is_dir
414                && e.path
415                    .extension()
416                    .and_then(|x: &std::ffi::OsStr| x.to_str())
417                    == Some("rs")
418        });
419
420        if !has_rust {
421            return Err(ErrorData::new(
422                rmcp::model::ErrorCode::INVALID_PARAMS,
423                "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
424                Some(error_meta(
425                    "validation",
426                    false,
427                    "remove impl_only or point to a directory containing .rs files",
428                )),
429            ));
430        }
431        Ok(())
432    }
433
434    // Poll progress until analysis task completes.
435    #[allow(clippy::cast_precision_loss)] // progress percentage display; precision loss acceptable for usize counts
436    async fn poll_progress_until_done(
437        &self,
438        analysis_params: &FocusedAnalysisParams,
439        counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
440        ct: tokio_util::sync::CancellationToken,
441        entries: std::sync::Arc<Vec<WalkEntry>>,
442        total_files: usize,
443        symbol_display: &str,
444    ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
445        let counter_clone = counter.clone();
446        let ct_clone = ct.clone();
447        let entries_clone = std::sync::Arc::clone(&entries);
448        let path_owned = analysis_params.path.clone();
449        let symbol_owned = analysis_params.symbol.clone();
450        let match_mode_owned = analysis_params.match_mode.clone();
451        let follow_depth = analysis_params.follow_depth;
452        let max_depth = analysis_params.max_depth;
453        let ast_recursion_limit = analysis_params.ast_recursion_limit;
454        let use_summary = analysis_params.use_summary;
455        let impl_only = analysis_params.impl_only;
456        let handle = tokio::task::spawn_blocking(move || {
457            let params = analyze::FocusedAnalysisConfig {
458                focus: symbol_owned,
459                match_mode: match_mode_owned,
460                follow_depth,
461                max_depth,
462                ast_recursion_limit,
463                use_summary,
464                impl_only,
465            };
466            analyze::analyze_focused_with_progress_with_entries(
467                &path_owned,
468                &params,
469                &counter_clone,
470                &ct_clone,
471                &entries_clone,
472            )
473        });
474
475        let token = ProgressToken(NumberOrString::String(
476            format!(
477                "analyze-symbol-{}",
478                std::time::SystemTime::now()
479                    .duration_since(std::time::UNIX_EPOCH)
480                    .map(|d| d.as_nanos())
481                    .unwrap_or(0)
482            )
483            .into(),
484        ));
485        let peer = self.peer.lock().await.clone();
486        let mut last_progress = 0usize;
487        let mut cancelled = false;
488
489        loop {
490            tokio::time::sleep(std::time::Duration::from_millis(100)).await;
491            if ct.is_cancelled() {
492                cancelled = true;
493                break;
494            }
495            let current = counter.load(std::sync::atomic::Ordering::Relaxed);
496            if current != last_progress && total_files > 0 {
497                self.emit_progress(
498                    peer.clone(),
499                    &token,
500                    current as f64,
501                    total_files as f64,
502                    format!(
503                        "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
504                    ),
505                )
506                .await;
507                last_progress = current;
508            }
509            if handle.is_finished() {
510                break;
511            }
512        }
513
514        if !cancelled && total_files > 0 {
515            self.emit_progress(
516                peer.clone(),
517                &token,
518                total_files as f64,
519                total_files as f64,
520                format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
521            )
522            .await;
523        }
524
525        match handle.await {
526            Ok(Ok(output)) => Ok(output),
527            Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
528                rmcp::model::ErrorCode::INTERNAL_ERROR,
529                "Analysis cancelled".to_string(),
530                Some(error_meta("transient", true, "analysis was cancelled")),
531            )),
532            Ok(Err(e)) => Err(ErrorData::new(
533                rmcp::model::ErrorCode::INTERNAL_ERROR,
534                format!("Error analyzing symbol: {e}"),
535                Some(error_meta("resource", false, "check symbol name and file")),
536            )),
537            Err(e) => Err(ErrorData::new(
538                rmcp::model::ErrorCode::INTERNAL_ERROR,
539                format!("Task join error: {e}"),
540                Some(error_meta("transient", true, "retry the request")),
541            )),
542        }
543    }
544
545    // Run focused analysis with auto-summary retry on SIZE_LIMIT overflow.
546    async fn run_focused_with_auto_summary(
547        &self,
548        params: &AnalyzeSymbolParams,
549        analysis_params: &FocusedAnalysisParams,
550        counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
551        ct: tokio_util::sync::CancellationToken,
552        entries: std::sync::Arc<Vec<WalkEntry>>,
553        total_files: usize,
554    ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
555        let use_summary_for_task = params.output_control.force != Some(true)
556            && params.output_control.summary == Some(true);
557
558        let analysis_params_initial = FocusedAnalysisParams {
559            use_summary: use_summary_for_task,
560            ..analysis_params.clone()
561        };
562
563        let mut output = self
564            .poll_progress_until_done(
565                &analysis_params_initial,
566                counter.clone(),
567                ct.clone(),
568                entries.clone(),
569                total_files,
570                &params.symbol,
571            )
572            .await?;
573
574        if params.output_control.summary.is_none()
575            && params.output_control.force != Some(true)
576            && output.formatted.len() > SIZE_LIMIT
577        {
578            let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
579            let analysis_params_retry = FocusedAnalysisParams {
580                use_summary: true,
581                ..analysis_params.clone()
582            };
583            let summary_result = self
584                .poll_progress_until_done(
585                    &analysis_params_retry,
586                    counter2,
587                    ct,
588                    entries,
589                    total_files,
590                    &params.symbol,
591                )
592                .await;
593
594            if let Ok(summary_output) = summary_result {
595                output.formatted = summary_output.formatted;
596            } else {
597                let estimated_tokens = output.formatted.len() / 4;
598                let message = format!(
599                    "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
600                    output.formatted.len(),
601                    estimated_tokens
602                );
603                return Err(ErrorData::new(
604                    rmcp::model::ErrorCode::INVALID_PARAMS,
605                    message,
606                    Some(error_meta(
607                        "validation",
608                        false,
609                        "use summary=true or force=true",
610                    )),
611                ));
612            }
613        } else if output.formatted.len() > SIZE_LIMIT
614            && params.output_control.force != Some(true)
615            && params.output_control.summary == Some(false)
616        {
617            let estimated_tokens = output.formatted.len() / 4;
618            let message = format!(
619                "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
620                 - force=true to return full output\n\
621                 - summary=true to get compact summary\n\
622                 - Narrow your scope (smaller directory, specific file)",
623                output.formatted.len(),
624                estimated_tokens
625            );
626            return Err(ErrorData::new(
627                rmcp::model::ErrorCode::INVALID_PARAMS,
628                message,
629                Some(error_meta(
630                    "validation",
631                    false,
632                    "use force=true, summary=true, or narrow scope",
633                )),
634            ));
635        }
636
637        Ok(output)
638    }
639
640    /// Private helper: Extract analysis logic for focused mode (`analyze_symbol`).
641    /// Returns the complete focused analysis output after spawning and monitoring progress.
642    /// Cancels the blocking task when `ct` is triggered; returns an error on cancellation.
643    #[instrument(skip(self, params, ct))]
644    async fn handle_focused_mode(
645        &self,
646        params: &AnalyzeSymbolParams,
647        ct: tokio_util::sync::CancellationToken,
648    ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
649        let path = Path::new(&params.path);
650        let entries = match walk_directory(path, params.max_depth) {
651            Ok(e) => e,
652            Err(e) => {
653                return Err(ErrorData::new(
654                    rmcp::model::ErrorCode::INTERNAL_ERROR,
655                    format!("Failed to walk directory: {e}"),
656                    Some(error_meta(
657                        "resource",
658                        false,
659                        "check path permissions and availability",
660                    )),
661                ));
662            }
663        };
664        let entries = std::sync::Arc::new(entries);
665
666        if params.impl_only == Some(true) {
667            Self::validate_impl_only(&entries)?;
668        }
669
670        let total_files = entries.iter().filter(|e| !e.is_dir).count();
671        let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
672
673        let analysis_params = FocusedAnalysisParams {
674            path: path.to_path_buf(),
675            symbol: params.symbol.clone(),
676            match_mode: params.match_mode.clone().unwrap_or_default(),
677            follow_depth: params.follow_depth.unwrap_or(1),
678            max_depth: params.max_depth,
679            ast_recursion_limit: params.ast_recursion_limit,
680            use_summary: false,
681            impl_only: params.impl_only,
682        };
683
684        let mut output = self
685            .run_focused_with_auto_summary(
686                params,
687                &analysis_params,
688                counter,
689                ct,
690                entries,
691                total_files,
692            )
693            .await?;
694
695        if params.impl_only == Some(true) {
696            let filter_line = format!(
697                "FILTER: impl_only=true ({} of {} callers shown)\n",
698                output.impl_trait_caller_count, output.unfiltered_caller_count
699            );
700            output.formatted = format!("{}{}", filter_line, output.formatted);
701
702            if output.impl_trait_caller_count == 0 {
703                output.formatted.push_str(
704                    "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
705                );
706            }
707        }
708
709        Ok(output)
710    }
711
712    #[instrument(skip(self, context))]
713    #[tool(
714        name = "analyze_directory",
715        description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
716        output_schema = schema_for_type::<analyze::AnalysisOutput>(),
717        annotations(
718            title = "Analyze Directory",
719            read_only_hint = true,
720            destructive_hint = false,
721            idempotent_hint = true,
722            open_world_hint = false
723        )
724    )]
725    async fn analyze_directory(
726        &self,
727        params: Parameters<AnalyzeDirectoryParams>,
728        context: RequestContext<RoleServer>,
729    ) -> Result<CallToolResult, ErrorData> {
730        let params = params.0;
731        let ct = context.ct.clone();
732        let t_start = std::time::Instant::now();
733        let param_path = params.path.clone();
734        let max_depth_val = params.max_depth;
735        let seq = self
736            .session_call_seq
737            .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
738        let sid = self.session_id.lock().await.clone();
739
740        // Call handler for analysis and progress tracking
741        let arc_output = match self.handle_overview_mode(&params, ct).await {
742            Ok(v) => v,
743            Err(e) => return Ok(err_to_tool_result(e)),
744        };
745        // Extract the value from Arc for modification. On a cache hit the Arc is shared,
746        // so try_unwrap may fail; fall back to cloning the underlying value in that case.
747        let mut output = match std::sync::Arc::try_unwrap(arc_output) {
748            Ok(owned) => owned,
749            Err(arc) => (*arc).clone(),
750        };
751
752        // summary=true (explicit) and cursor are mutually exclusive.
753        // Auto-summarization (summary=None + large output) must NOT block cursor pagination.
754        if summary_cursor_conflict(
755            params.output_control.summary,
756            params.pagination.cursor.as_deref(),
757        ) {
758            return Ok(err_to_tool_result(ErrorData::new(
759                rmcp::model::ErrorCode::INVALID_PARAMS,
760                "summary=true is incompatible with a pagination cursor; use one or the other"
761                    .to_string(),
762                Some(error_meta(
763                    "validation",
764                    false,
765                    "remove cursor or set summary=false",
766                )),
767            )));
768        }
769
770        // Apply summary/output size limiting logic
771        let use_summary = if params.output_control.force == Some(true) {
772            false
773        } else if params.output_control.summary == Some(true) {
774            true
775        } else if params.output_control.summary == Some(false) {
776            false
777        } else {
778            output.formatted.len() > SIZE_LIMIT
779        };
780
781        if use_summary {
782            output.formatted = format_summary(
783                &output.entries,
784                &output.files,
785                params.max_depth,
786                output.subtree_counts.as_deref(),
787            );
788        }
789
790        // Decode pagination cursor if provided
791        let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
792        let offset = if let Some(ref cursor_str) = params.pagination.cursor {
793            let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
794                ErrorData::new(
795                    rmcp::model::ErrorCode::INVALID_PARAMS,
796                    e.to_string(),
797                    Some(error_meta("validation", false, "invalid cursor format")),
798                )
799            }) {
800                Ok(v) => v,
801                Err(e) => return Ok(err_to_tool_result(e)),
802            };
803            cursor_data.offset
804        } else {
805            0
806        };
807
808        // Apply pagination to files
809        let paginated =
810            match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
811                Ok(v) => v,
812                Err(e) => {
813                    return Ok(err_to_tool_result(ErrorData::new(
814                        rmcp::model::ErrorCode::INTERNAL_ERROR,
815                        e.to_string(),
816                        Some(error_meta("transient", true, "retry the request")),
817                    )));
818                }
819            };
820
821        let verbose = params.output_control.verbose.unwrap_or(false);
822        if !use_summary {
823            output.formatted = format_structure_paginated(
824                &paginated.items,
825                paginated.total,
826                params.max_depth,
827                Some(Path::new(&params.path)),
828                verbose,
829            );
830        }
831
832        // Update next_cursor in output after pagination (unless using summary mode)
833        if use_summary {
834            output.next_cursor = None;
835        } else {
836            output.next_cursor.clone_from(&paginated.next_cursor);
837        }
838
839        // Build final text output with pagination cursor if present (unless using summary mode)
840        let mut final_text = output.formatted.clone();
841        if !use_summary && let Some(cursor) = paginated.next_cursor {
842            final_text.push('\n');
843            final_text.push_str("NEXT_CURSOR: ");
844            final_text.push_str(&cursor);
845        }
846
847        let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
848            .with_meta(Some(no_cache_meta()));
849        let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
850        result.structured_content = Some(structured);
851        let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
852        self.metrics_tx.send(crate::metrics::MetricEvent {
853            ts: crate::metrics::unix_ms(),
854            tool: "analyze_directory",
855            duration_ms: dur,
856            output_chars: final_text.len(),
857            param_path_depth: crate::metrics::path_component_count(&param_path),
858            max_depth: max_depth_val,
859            result: "ok",
860            error_type: None,
861            session_id: sid,
862            seq: Some(seq),
863        });
864        Ok(result)
865    }
866
867    #[instrument(skip(self, _context))]
868    #[tool(
869        name = "analyze_file",
870        description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py. The fields parameter limits output to specific sections. Valid values: \"functions\", \"classes\", \"imports\". The FILE header (path, line count, section counts) is always emitted. Omit fields to return all sections. When summary=true, fields is ignored. When fields explicitly lists \"imports\", the imports section is rendered regardless of the verbose flag; in all other cases imports require verbose=true.",
871        output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
872        annotations(
873            title = "Analyze File",
874            read_only_hint = true,
875            destructive_hint = false,
876            idempotent_hint = true,
877            open_world_hint = false
878        )
879    )]
880    async fn analyze_file(
881        &self,
882        params: Parameters<AnalyzeFileParams>,
883        _context: RequestContext<RoleServer>,
884    ) -> Result<CallToolResult, ErrorData> {
885        let params = params.0;
886        let t_start = std::time::Instant::now();
887        let param_path = params.path.clone();
888        let seq = self
889            .session_call_seq
890            .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
891        let sid = self.session_id.lock().await.clone();
892
893        // Call handler for analysis and caching
894        let arc_output = match self.handle_file_details_mode(&params).await {
895            Ok(v) => v,
896            Err(e) => return Ok(err_to_tool_result(e)),
897        };
898
899        // Clone only the two fields that may be mutated per-request (formatted and
900        // next_cursor). The heavy SemanticAnalysis data is shared via Arc and never
901        // modified, so we borrow it directly from the cached pointer.
902        let mut formatted = arc_output.formatted.clone();
903        let line_count = arc_output.line_count;
904
905        // Apply summary/output size limiting logic
906        let use_summary = if params.output_control.force == Some(true) {
907            false
908        } else if params.output_control.summary == Some(true) {
909            true
910        } else if params.output_control.summary == Some(false) {
911            false
912        } else {
913            formatted.len() > SIZE_LIMIT
914        };
915
916        if use_summary {
917            formatted = format_file_details_summary(&arc_output.semantic, &params.path, line_count);
918        } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
919            let estimated_tokens = formatted.len() / 4;
920            let message = format!(
921                "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
922                 - force=true to return full output\n\
923                 - Narrow your scope (smaller directory, specific file)\n\
924                 - Use analyze_symbol mode for targeted analysis\n\
925                 - Reduce max_depth parameter",
926                formatted.len(),
927                estimated_tokens
928            );
929            return Ok(err_to_tool_result(ErrorData::new(
930                rmcp::model::ErrorCode::INVALID_PARAMS,
931                message,
932                Some(error_meta(
933                    "validation",
934                    false,
935                    "use force=true or narrow scope",
936                )),
937            )));
938        }
939
940        // Decode pagination cursor if provided (analyze_file)
941        let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
942        let offset = if let Some(ref cursor_str) = params.pagination.cursor {
943            let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
944                ErrorData::new(
945                    rmcp::model::ErrorCode::INVALID_PARAMS,
946                    e.to_string(),
947                    Some(error_meta("validation", false, "invalid cursor format")),
948                )
949            }) {
950                Ok(v) => v,
951                Err(e) => return Ok(err_to_tool_result(e)),
952            };
953            cursor_data.offset
954        } else {
955            0
956        };
957
958        // Filter to top-level functions only (exclude methods) before pagination
959        let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
960            .semantic
961            .functions
962            .iter()
963            .filter(|func| {
964                !arc_output
965                    .semantic
966                    .classes
967                    .iter()
968                    .any(|class| func.line >= class.line && func.end_line <= class.end_line)
969            })
970            .cloned()
971            .collect();
972
973        // Paginate top-level functions only
974        let paginated =
975            match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
976                Ok(v) => v,
977                Err(e) => {
978                    return Ok(err_to_tool_result(ErrorData::new(
979                        rmcp::model::ErrorCode::INTERNAL_ERROR,
980                        e.to_string(),
981                        Some(error_meta("transient", true, "retry the request")),
982                    )));
983                }
984            };
985
986        // Regenerate formatted output using the paginated formatter (handles verbose and pagination correctly)
987        let verbose = params.output_control.verbose.unwrap_or(false);
988        if !use_summary {
989            // fields: serde rejects unknown enum variants at deserialization; no runtime validation required
990            formatted = format_file_details_paginated(
991                &paginated.items,
992                paginated.total,
993                &arc_output.semantic,
994                &params.path,
995                line_count,
996                offset,
997                verbose,
998                params.fields.as_deref(),
999            );
1000        }
1001
1002        // Capture next_cursor from pagination result (unless using summary mode)
1003        let next_cursor = if use_summary {
1004            None
1005        } else {
1006            paginated.next_cursor.clone()
1007        };
1008
1009        // Build final text output with pagination cursor if present (unless using summary mode)
1010        let mut final_text = formatted.clone();
1011        if !use_summary && let Some(ref cursor) = next_cursor {
1012            final_text.push('\n');
1013            final_text.push_str("NEXT_CURSOR: ");
1014            final_text.push_str(cursor);
1015        }
1016
1017        // Build the response output, sharing SemanticAnalysis from the Arc to avoid cloning it.
1018        let response_output = analyze::FileAnalysisOutput::new(
1019            formatted,
1020            arc_output.semantic.clone(),
1021            line_count,
1022            next_cursor,
1023        );
1024
1025        let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1026            .with_meta(Some(no_cache_meta()));
1027        let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1028        result.structured_content = Some(structured);
1029        let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1030        self.metrics_tx.send(crate::metrics::MetricEvent {
1031            ts: crate::metrics::unix_ms(),
1032            tool: "analyze_file",
1033            duration_ms: dur,
1034            output_chars: final_text.len(),
1035            param_path_depth: crate::metrics::path_component_count(&param_path),
1036            max_depth: None,
1037            result: "ok",
1038            error_type: None,
1039            session_id: sid,
1040            seq: Some(seq),
1041        });
1042        Ok(result)
1043    }
1044
1045    #[instrument(skip(self, context))]
1046    #[tool(
1047        name = "analyze_symbol",
1048        description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. impl_only=true: restrict callers to only those from 'impl Trait for Type' blocks (Rust only); returns INVALID_PARAMS for non-Rust directories; emits a FILTER header showing how many callers were retained. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method",
1049        output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1050        annotations(
1051            title = "Analyze Symbol",
1052            read_only_hint = true,
1053            destructive_hint = false,
1054            idempotent_hint = true,
1055            open_world_hint = false
1056        )
1057    )]
1058    async fn analyze_symbol(
1059        &self,
1060        params: Parameters<AnalyzeSymbolParams>,
1061        context: RequestContext<RoleServer>,
1062    ) -> Result<CallToolResult, ErrorData> {
1063        let params = params.0;
1064        let ct = context.ct.clone();
1065        let t_start = std::time::Instant::now();
1066        let param_path = params.path.clone();
1067        let max_depth_val = params.follow_depth;
1068        let seq = self
1069            .session_call_seq
1070            .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1071        let sid = self.session_id.lock().await.clone();
1072
1073        // Call handler for analysis and progress tracking
1074        let mut output = match self.handle_focused_mode(&params, ct).await {
1075            Ok(v) => v,
1076            Err(e) => return Ok(err_to_tool_result(e)),
1077        };
1078
1079        // Decode pagination cursor if provided (analyze_symbol)
1080        let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1081        let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1082            let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1083                ErrorData::new(
1084                    rmcp::model::ErrorCode::INVALID_PARAMS,
1085                    e.to_string(),
1086                    Some(error_meta("validation", false, "invalid cursor format")),
1087                )
1088            }) {
1089                Ok(v) => v,
1090                Err(e) => return Ok(err_to_tool_result(e)),
1091            };
1092            cursor_data.offset
1093        } else {
1094            0
1095        };
1096
1097        // SymbolFocus pagination: decode cursor mode to determine callers vs callees
1098        let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1099            decode_cursor(cursor_str)
1100                .map(|c| c.mode)
1101                .unwrap_or(PaginationMode::Callers)
1102        } else {
1103            PaginationMode::Callers
1104        };
1105
1106        let use_summary = params.output_control.summary == Some(true);
1107        let verbose = params.output_control.verbose.unwrap_or(false);
1108
1109        let mut callee_cursor = match cursor_mode {
1110            PaginationMode::Callers => {
1111                let (paginated_items, paginated_next) = match paginate_focus_chains(
1112                    &output.prod_chains,
1113                    PaginationMode::Callers,
1114                    offset,
1115                    page_size,
1116                ) {
1117                    Ok(v) => v,
1118                    Err(e) => return Ok(err_to_tool_result(e)),
1119                };
1120
1121                if !use_summary
1122                    && (paginated_next.is_some()
1123                        || offset > 0
1124                        || !verbose
1125                        || !output.outgoing_chains.is_empty())
1126                {
1127                    let base_path = Path::new(&params.path);
1128                    output.formatted = format_focused_paginated(
1129                        &paginated_items,
1130                        output.prod_chains.len(),
1131                        PaginationMode::Callers,
1132                        &params.symbol,
1133                        &output.prod_chains,
1134                        &output.test_chains,
1135                        &output.outgoing_chains,
1136                        output.def_count,
1137                        offset,
1138                        Some(base_path),
1139                        verbose,
1140                    );
1141                    paginated_next
1142                } else {
1143                    None
1144                }
1145            }
1146            PaginationMode::Callees => {
1147                let (paginated_items, paginated_next) = match paginate_focus_chains(
1148                    &output.outgoing_chains,
1149                    PaginationMode::Callees,
1150                    offset,
1151                    page_size,
1152                ) {
1153                    Ok(v) => v,
1154                    Err(e) => return Ok(err_to_tool_result(e)),
1155                };
1156
1157                if paginated_next.is_some() || offset > 0 || !verbose {
1158                    let base_path = Path::new(&params.path);
1159                    output.formatted = format_focused_paginated(
1160                        &paginated_items,
1161                        output.outgoing_chains.len(),
1162                        PaginationMode::Callees,
1163                        &params.symbol,
1164                        &output.prod_chains,
1165                        &output.test_chains,
1166                        &output.outgoing_chains,
1167                        output.def_count,
1168                        offset,
1169                        Some(base_path),
1170                        verbose,
1171                    );
1172                    paginated_next
1173                } else {
1174                    None
1175                }
1176            }
1177            PaginationMode::Default => {
1178                unreachable!("SymbolFocus should only use Callers or Callees modes")
1179            }
1180        };
1181
1182        // When callers are exhausted and callees exist, bootstrap callee pagination
1183        // by emitting a {mode:callees, offset:0} cursor. This makes PaginationMode::Callees
1184        // reachable; without it the branch was dead code. Suppressed in summary mode
1185        // because summary and pagination are mutually exclusive.
1186        if callee_cursor.is_none()
1187            && cursor_mode == PaginationMode::Callers
1188            && !output.outgoing_chains.is_empty()
1189            && !use_summary
1190            && let Ok(cursor) = encode_cursor(&CursorData {
1191                mode: PaginationMode::Callees,
1192                offset: 0,
1193            })
1194        {
1195            callee_cursor = Some(cursor);
1196        }
1197
1198        // Update next_cursor in output
1199        output.next_cursor.clone_from(&callee_cursor);
1200
1201        // Build final text output with pagination cursor if present
1202        let mut final_text = output.formatted.clone();
1203        if let Some(cursor) = callee_cursor {
1204            final_text.push('\n');
1205            final_text.push_str("NEXT_CURSOR: ");
1206            final_text.push_str(&cursor);
1207        }
1208
1209        let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1210            .with_meta(Some(no_cache_meta()));
1211        let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1212        result.structured_content = Some(structured);
1213        let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1214        self.metrics_tx.send(crate::metrics::MetricEvent {
1215            ts: crate::metrics::unix_ms(),
1216            tool: "analyze_symbol",
1217            duration_ms: dur,
1218            output_chars: final_text.len(),
1219            param_path_depth: crate::metrics::path_component_count(&param_path),
1220            max_depth: max_depth_val,
1221            result: "ok",
1222            error_type: None,
1223            session_id: sid,
1224            seq: Some(seq),
1225        });
1226        Ok(result)
1227    }
1228
1229    #[instrument(skip(self, _context))]
1230    #[tool(
1231        name = "analyze_module",
1232        description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
1233        output_schema = schema_for_type::<types::ModuleInfo>(),
1234        annotations(
1235            title = "Analyze Module",
1236            read_only_hint = true,
1237            destructive_hint = false,
1238            idempotent_hint = true,
1239            open_world_hint = false
1240        )
1241    )]
1242    async fn analyze_module(
1243        &self,
1244        params: Parameters<AnalyzeModuleParams>,
1245        _context: RequestContext<RoleServer>,
1246    ) -> Result<CallToolResult, ErrorData> {
1247        let params = params.0;
1248        let t_start = std::time::Instant::now();
1249        let param_path = params.path.clone();
1250        let seq = self
1251            .session_call_seq
1252            .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1253        let sid = self.session_id.lock().await.clone();
1254
1255        // Issue 340: Guard against directory paths
1256        if std::fs::metadata(&params.path)
1257            .map(|m| m.is_dir())
1258            .unwrap_or(false)
1259        {
1260            let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1261            self.metrics_tx.send(crate::metrics::MetricEvent {
1262                ts: crate::metrics::unix_ms(),
1263                tool: "analyze_module",
1264                duration_ms: dur,
1265                output_chars: 0,
1266                param_path_depth: crate::metrics::path_component_count(&param_path),
1267                max_depth: None,
1268                result: "error",
1269                error_type: Some("invalid_params".to_string()),
1270                session_id: sid.clone(),
1271                seq: Some(seq),
1272            });
1273            return Ok(err_to_tool_result(ErrorData::new(
1274                rmcp::model::ErrorCode::INVALID_PARAMS,
1275                format!(
1276                    "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1277                    params.path
1278                ),
1279                Some(error_meta(
1280                    "validation",
1281                    false,
1282                    "use analyze_directory for directories",
1283                )),
1284            )));
1285        }
1286
1287        let module_info = match analyze::analyze_module_file(&params.path).map_err(|e| {
1288            ErrorData::new(
1289                rmcp::model::ErrorCode::INVALID_PARAMS,
1290                format!("Failed to analyze module: {e}"),
1291                Some(error_meta(
1292                    "validation",
1293                    false,
1294                    "ensure file exists, is readable, and has a supported extension",
1295                )),
1296            )
1297        }) {
1298            Ok(v) => v,
1299            Err(e) => return Ok(err_to_tool_result(e)),
1300        };
1301
1302        let text = format_module_info(&module_info);
1303        let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1304            .with_meta(Some(no_cache_meta()));
1305        let structured = match serde_json::to_value(&module_info).map_err(|e| {
1306            ErrorData::new(
1307                rmcp::model::ErrorCode::INTERNAL_ERROR,
1308                format!("serialization failed: {e}"),
1309                Some(error_meta("internal", false, "report this as a bug")),
1310            )
1311        }) {
1312            Ok(v) => v,
1313            Err(e) => return Ok(err_to_tool_result(e)),
1314        };
1315        result.structured_content = Some(structured);
1316        let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1317        self.metrics_tx.send(crate::metrics::MetricEvent {
1318            ts: crate::metrics::unix_ms(),
1319            tool: "analyze_module",
1320            duration_ms: dur,
1321            output_chars: text.len(),
1322            param_path_depth: crate::metrics::path_component_count(&param_path),
1323            max_depth: None,
1324            result: "ok",
1325            error_type: None,
1326            session_id: sid,
1327            seq: Some(seq),
1328        });
1329        Ok(result)
1330    }
1331}
1332
1333// Parameters for focused analysis task.
1334#[derive(Clone)]
1335struct FocusedAnalysisParams {
1336    path: std::path::PathBuf,
1337    symbol: String,
1338    match_mode: SymbolMatchMode,
1339    follow_depth: u32,
1340    max_depth: Option<u32>,
1341    ast_recursion_limit: Option<usize>,
1342    use_summary: bool,
1343    impl_only: Option<bool>,
1344}
1345
1346#[tool_handler]
1347impl ServerHandler for CodeAnalyzer {
1348    fn get_info(&self) -> InitializeResult {
1349        let excluded = crate::EXCLUDED_DIRS.join(", ");
1350        let instructions = format!(
1351            "Recommended workflow for unknown repositories:\n\
1352            1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1353            (typically the largest directory by file count; exclude {excluded}).\n\
1354            2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts. Include test directories (e.g., tests/, testutil/, files matching *_test.go, test_*.py, test_*.rs, *_test.rs, *.spec.ts, *.spec.js) in the module map; test files are valid analysis targets and must not be skipped.\n\
1355            3. For key files identified in step 2, prefer analyze_module to get a lightweight function/import index (~75% smaller output) when you only need function names and imports; call analyze_file when you need signatures, types, or class structure.\n\
1356            4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1357            Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1358            Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1359        );
1360        let capabilities = ServerCapabilities::builder()
1361            .enable_logging()
1362            .enable_tools()
1363            .enable_tool_list_changed()
1364            .enable_completions()
1365            .build();
1366        let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1367            .with_title("Code Analyze MCP")
1368            .with_description("MCP server for code structure analysis using tree-sitter");
1369        InitializeResult::new(capabilities)
1370            .with_server_info(server_info)
1371            .with_instructions(&instructions)
1372    }
1373
1374    async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1375        let mut peer_lock = self.peer.lock().await;
1376        *peer_lock = Some(context.peer.clone());
1377        drop(peer_lock);
1378
1379        // Generate session_id in MILLIS-N format
1380        let millis = std::time::SystemTime::now()
1381            .duration_since(std::time::UNIX_EPOCH)
1382            .unwrap_or_default()
1383            .as_millis()
1384            .try_into()
1385            .unwrap_or(u64::MAX);
1386        let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1387        let sid = format!("{millis}-{counter}");
1388        {
1389            let mut session_id_lock = self.session_id.lock().await;
1390            *session_id_lock = Some(sid);
1391        }
1392        self.session_call_seq
1393            .store(0, std::sync::atomic::Ordering::Relaxed);
1394
1395        // Spawn consumer task to drain log events from channel with batching.
1396        let peer = self.peer.clone();
1397        let event_rx = self.event_rx.clone();
1398
1399        tokio::spawn(async move {
1400            let rx = {
1401                let mut rx_lock = event_rx.lock().await;
1402                rx_lock.take()
1403            };
1404
1405            if let Some(mut receiver) = rx {
1406                let mut buffer = Vec::with_capacity(64);
1407                loop {
1408                    // Drain up to 64 events from channel
1409                    receiver.recv_many(&mut buffer, 64).await;
1410
1411                    if buffer.is_empty() {
1412                        // Channel closed, exit consumer task
1413                        break;
1414                    }
1415
1416                    // Acquire peer lock once per batch
1417                    let peer_lock = peer.lock().await;
1418                    if let Some(peer) = peer_lock.as_ref() {
1419                        for log_event in buffer.drain(..) {
1420                            let notification = ServerNotification::LoggingMessageNotification(
1421                                Notification::new(LoggingMessageNotificationParam {
1422                                    level: log_event.level,
1423                                    logger: Some(log_event.logger),
1424                                    data: log_event.data,
1425                                }),
1426                            );
1427                            if let Err(e) = peer.send_notification(notification).await {
1428                                warn!("Failed to send logging notification: {}", e);
1429                            }
1430                        }
1431                    }
1432                }
1433            }
1434        });
1435    }
1436
1437    #[instrument(skip(self, _context))]
1438    async fn on_cancelled(
1439        &self,
1440        notification: CancelledNotificationParam,
1441        _context: NotificationContext<RoleServer>,
1442    ) {
1443        tracing::info!(
1444            request_id = ?notification.request_id,
1445            reason = ?notification.reason,
1446            "Received cancellation notification"
1447        );
1448    }
1449
1450    #[instrument(skip(self, _context))]
1451    async fn complete(
1452        &self,
1453        request: CompleteRequestParams,
1454        _context: RequestContext<RoleServer>,
1455    ) -> Result<CompleteResult, ErrorData> {
1456        // Dispatch on argument name: "path" or "symbol"
1457        let argument_name = &request.argument.name;
1458        let argument_value = &request.argument.value;
1459
1460        let completions = match argument_name.as_str() {
1461            "path" => {
1462                // Path completions: use current directory as root
1463                let root = Path::new(".");
1464                completion::path_completions(root, argument_value)
1465            }
1466            "symbol" => {
1467                // Symbol completions: need the path argument from context
1468                let path_arg = request
1469                    .context
1470                    .as_ref()
1471                    .and_then(|ctx| ctx.get_argument("path"));
1472
1473                match path_arg {
1474                    Some(path_str) => {
1475                        let path = Path::new(path_str);
1476                        completion::symbol_completions(&self.cache, path, argument_value)
1477                    }
1478                    None => Vec::new(),
1479                }
1480            }
1481            _ => Vec::new(),
1482        };
1483
1484        // Create CompletionInfo with has_more flag if >100 results
1485        let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1486        let (values, has_more) = if completions.len() > 100 {
1487            (completions.into_iter().take(100).collect(), true)
1488        } else {
1489            (completions, false)
1490        };
1491
1492        let completion_info =
1493            match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1494                Ok(info) => info,
1495                Err(_) => {
1496                    // Graceful degradation: return empty on error
1497                    CompletionInfo::with_all_values(Vec::new())
1498                        .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1499                }
1500            };
1501
1502        Ok(CompleteResult::new(completion_info))
1503    }
1504
1505    async fn set_level(
1506        &self,
1507        params: SetLevelRequestParams,
1508        _context: RequestContext<RoleServer>,
1509    ) -> Result<(), ErrorData> {
1510        let level_filter = match params.level {
1511            LoggingLevel::Debug => LevelFilter::DEBUG,
1512            LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1513            LoggingLevel::Warning => LevelFilter::WARN,
1514            LoggingLevel::Error
1515            | LoggingLevel::Critical
1516            | LoggingLevel::Alert
1517            | LoggingLevel::Emergency => LevelFilter::ERROR,
1518        };
1519
1520        let mut filter_lock = self.log_level_filter.lock().unwrap();
1521        *filter_lock = level_filter;
1522        Ok(())
1523    }
1524}
1525
1526#[cfg(test)]
1527mod tests {
1528    use super::*;
1529
1530    #[tokio::test]
1531    async fn test_emit_progress_none_peer_is_noop() {
1532        let peer = Arc::new(TokioMutex::new(None));
1533        let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1534        let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1535        let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1536        let analyzer = CodeAnalyzer::new(
1537            peer,
1538            log_level_filter,
1539            rx,
1540            crate::metrics::MetricsSender(metrics_tx),
1541        );
1542        let token = ProgressToken(NumberOrString::String("test".into()));
1543        // Should complete without panic
1544        analyzer
1545            .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1546            .await;
1547    }
1548
1549    fn make_analyzer() -> CodeAnalyzer {
1550        let peer = Arc::new(TokioMutex::new(None));
1551        let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1552        let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1553        let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1554        CodeAnalyzer::new(
1555            peer,
1556            log_level_filter,
1557            rx,
1558            crate::metrics::MetricsSender(metrics_tx),
1559        )
1560    }
1561
1562    #[test]
1563    fn test_summary_cursor_conflict() {
1564        assert!(summary_cursor_conflict(Some(true), Some("cursor")));
1565        assert!(!summary_cursor_conflict(Some(true), None));
1566        assert!(!summary_cursor_conflict(None, Some("x")));
1567        assert!(!summary_cursor_conflict(None, None));
1568    }
1569
1570    #[tokio::test]
1571    async fn test_validate_impl_only_non_rust_returns_invalid_params() {
1572        use tempfile::TempDir;
1573
1574        let dir = TempDir::new().unwrap();
1575        std::fs::write(dir.path().join("main.py"), "def foo(): pass").unwrap();
1576
1577        let analyzer = make_analyzer();
1578        // Call analyze_symbol with impl_only=true on a Python-only directory via the tool API.
1579        // We use handle_focused_mode which calls validate_impl_only internally.
1580        let entries: Vec<traversal::WalkEntry> =
1581            traversal::walk_directory(dir.path(), None).unwrap_or_default();
1582        let result = CodeAnalyzer::validate_impl_only(&entries);
1583        assert!(result.is_err());
1584        let err = result.unwrap_err();
1585        assert_eq!(err.code, rmcp::model::ErrorCode::INVALID_PARAMS);
1586        drop(analyzer); // ensure it compiles with analyzer in scope
1587    }
1588
1589    #[tokio::test]
1590    async fn test_no_cache_meta_on_analyze_directory_result() {
1591        use code_analyze_core::types::{
1592            AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1593        };
1594        use tempfile::TempDir;
1595
1596        let dir = TempDir::new().unwrap();
1597        std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
1598
1599        let analyzer = make_analyzer();
1600        let params = AnalyzeDirectoryParams {
1601            path: dir.path().to_str().unwrap().to_string(),
1602            max_depth: None,
1603            pagination: PaginationParams {
1604                cursor: None,
1605                page_size: None,
1606            },
1607            output_control: OutputControlParams {
1608                summary: None,
1609                force: None,
1610                verbose: None,
1611            },
1612        };
1613        let ct = tokio_util::sync::CancellationToken::new();
1614        let arc_output = analyzer.handle_overview_mode(&params, ct).await.unwrap();
1615        // Verify the no_cache_meta shape by constructing it directly and checking the shape
1616        let meta = no_cache_meta();
1617        assert_eq!(
1618            meta.0.get("cache_hint").and_then(|v| v.as_str()),
1619            Some("no-cache"),
1620        );
1621        drop(arc_output);
1622    }
1623
1624    #[test]
1625    fn test_complete_path_completions_returns_suggestions() {
1626        // Test the underlying completion function (same code path as complete()) directly
1627        // to avoid needing a constructed RequestContext<RoleServer>.
1628        // CARGO_MANIFEST_DIR is <workspace>/code-analyze-mcp; parent is the workspace root,
1629        // which contains code-analyze-core/ and code-analyze-mcp/ matching the "code-" prefix.
1630        let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
1631        let workspace_root = manifest_dir.parent().expect("manifest dir has parent");
1632        let suggestions = completion::path_completions(workspace_root, "code-");
1633        assert!(
1634            !suggestions.is_empty(),
1635            "expected completions for prefix 'code-' in workspace root"
1636        );
1637    }
1638
1639    #[tokio::test]
1640    async fn test_handle_overview_mode_verbose_no_summary_block() {
1641        use code_analyze_core::pagination::{PaginationMode, paginate_slice};
1642        use code_analyze_core::types::{
1643            AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1644        };
1645        use tempfile::TempDir;
1646
1647        let tmp = TempDir::new().unwrap();
1648        std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1649
1650        let peer = Arc::new(TokioMutex::new(None));
1651        let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1652        let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1653        let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1654        let analyzer = CodeAnalyzer::new(
1655            peer,
1656            log_level_filter,
1657            rx,
1658            crate::metrics::MetricsSender(metrics_tx),
1659        );
1660
1661        let params = AnalyzeDirectoryParams {
1662            path: tmp.path().to_str().unwrap().to_string(),
1663            max_depth: None,
1664            pagination: PaginationParams {
1665                cursor: None,
1666                page_size: None,
1667            },
1668            output_control: OutputControlParams {
1669                summary: None,
1670                force: None,
1671                verbose: Some(true),
1672            },
1673        };
1674
1675        let ct = tokio_util::sync::CancellationToken::new();
1676        let output = analyzer.handle_overview_mode(&params, ct).await.unwrap();
1677
1678        // Replicate the handler's formatting path (the fix site)
1679        let use_summary = output.formatted.len() > SIZE_LIMIT; // summary=None, force=None, small output
1680        let paginated =
1681            paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1682        let verbose = true;
1683        let formatted = if !use_summary {
1684            format_structure_paginated(
1685                &paginated.items,
1686                paginated.total,
1687                params.max_depth,
1688                Some(std::path::Path::new(&params.path)),
1689                verbose,
1690            )
1691        } else {
1692            output.formatted.clone()
1693        };
1694
1695        // After the fix: verbose=true must not emit the SUMMARY: block
1696        assert!(
1697            !formatted.contains("SUMMARY:"),
1698            "verbose=true must not emit SUMMARY: block; got: {}",
1699            &formatted[..formatted.len().min(300)]
1700        );
1701        assert!(
1702            formatted.contains("PAGINATED:"),
1703            "verbose=true must emit PAGINATED: header"
1704        );
1705        assert!(
1706            formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1707            "verbose=true must emit FILES section header"
1708        );
1709    }
1710}