Skip to main content

code_analyze_mcp/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod completion;
4pub mod dataflow;
5pub mod formatter;
6pub mod graph;
7pub mod lang;
8pub mod languages;
9pub mod logging;
10pub mod pagination;
11pub mod parser;
12pub mod test_detection;
13pub mod traversal;
14pub mod types;
15
16use cache::AnalysisCache;
17use formatter::{
18    format_file_details_paginated, format_file_details_summary, format_focused_paginated,
19    format_structure_paginated, format_summary,
20};
21use logging::LogEvent;
22use pagination::{
23    CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
24};
25use rmcp::handler::server::tool::ToolRouter;
26use rmcp::handler::server::wrapper::Parameters;
27use rmcp::model::{
28    CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
29    CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
30    LoggingMessageNotificationParam, Notification, NumberOrString, ProgressNotificationParam,
31    ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
32};
33use rmcp::service::{NotificationContext, RequestContext};
34use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
35use serde_json::Value;
36use std::path::Path;
37use std::sync::{Arc, Mutex};
38use tokio::sync::{Mutex as TokioMutex, mpsc};
39use tracing::{instrument, warn};
40use tracing_subscriber::filter::LevelFilter;
41use traversal::walk_directory;
42use types::{AnalysisMode, AnalyzeParams};
43
44const SIZE_LIMIT: usize = 50_000;
45
46/// Helper function for paginating focus chains (callers or callees).
47/// Returns (items, re-encoded_cursor_option).
48fn paginate_focus_chains(
49    chains: &[graph::CallChain],
50    mode: PaginationMode,
51    offset: usize,
52    page_size: usize,
53) -> Result<(Vec<graph::CallChain>, Option<String>), ErrorData> {
54    let paginated = paginate_slice(chains, offset, page_size, mode)
55        .map_err(|e| ErrorData::new(rmcp::model::ErrorCode::INTERNAL_ERROR, e.to_string(), None))?;
56
57    if paginated.next_cursor.is_none() && offset == 0 {
58        return Ok((paginated.items, None));
59    }
60
61    let next = if let Some(raw_cursor) = paginated.next_cursor {
62        let decoded = decode_cursor(&raw_cursor).map_err(|e| {
63            ErrorData::new(rmcp::model::ErrorCode::INTERNAL_ERROR, e.to_string(), None)
64        })?;
65        Some(
66            encode_cursor(&CursorData {
67                mode,
68                offset: decoded.offset,
69            })
70            .map_err(|e| {
71                ErrorData::new(rmcp::model::ErrorCode::INTERNAL_ERROR, e.to_string(), None)
72            })?,
73        )
74    } else {
75        None
76    };
77
78    Ok((paginated.items, next))
79}
80
81/// Helper function to create the output schema for the analyze tool
82fn create_analyze_output_schema() -> std::sync::Arc<serde_json::Map<String, Value>> {
83    use serde_json::json;
84    let schema = json!({
85        "type": "object",
86        "properties": {
87            "mode": {
88                "type": "string",
89                "enum": ["overview", "file_details", "symbol_focus"],
90                "description": "The analysis mode used"
91            },
92            "formatted": {
93                "type": "string",
94                "description": "Formatted text output of the analysis"
95            },
96            "files": {
97                "type": "array",
98                "description": "List of files analyzed (overview mode only)",
99                "items": {
100                    "type": "object",
101                    "properties": {
102                        "path": { "type": "string" },
103                        "language": { "type": "string" },
104                        "loc": { "type": "integer" },
105                        "functions": { "type": "integer" },
106                        "classes": { "type": "integer" }
107                    }
108                }
109            },
110            "semantic": {
111                "type": "object",
112                "description": "Semantic analysis data (file_details mode only)",
113                "properties": {
114                    "functions": { "type": "array" },
115                    "classes": { "type": "array" },
116                    "imports": { "type": "array" }
117                }
118            },
119            "line_count": {
120                "type": "integer",
121                "description": "Total line count (file_details mode only)"
122            },
123            "next_cursor": {
124                "type": ["string", "null"],
125                "description": "Pagination cursor for next page of results"
126            }
127        },
128        "required": ["formatted"]
129    });
130
131    if let Value::Object(map) = schema {
132        std::sync::Arc::new(map)
133    } else {
134        std::sync::Arc::new(serde_json::Map::new())
135    }
136}
137
138#[derive(Clone)]
139pub struct CodeAnalyzer {
140    tool_router: ToolRouter<Self>,
141    cache: AnalysisCache,
142    peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
143    log_level_filter: Arc<Mutex<LevelFilter>>,
144    event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
145}
146
147#[tool_router]
148impl CodeAnalyzer {
149    pub fn new(
150        peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
151        log_level_filter: Arc<Mutex<LevelFilter>>,
152        event_rx: mpsc::UnboundedReceiver<LogEvent>,
153    ) -> Self {
154        CodeAnalyzer {
155            tool_router: Self::tool_router(),
156            cache: AnalysisCache::new(100),
157            peer,
158            log_level_filter,
159            event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
160        }
161    }
162
163    #[instrument(skip(self))]
164    async fn emit_progress(
165        &self,
166        token: &ProgressToken,
167        progress: f64,
168        total: f64,
169        message: String,
170    ) {
171        let peer = self.peer.lock().await.clone();
172        if let Some(peer) = peer {
173            let notification = ServerNotification::ProgressNotification(Notification::new(
174                ProgressNotificationParam {
175                    progress_token: token.clone(),
176                    progress,
177                    total: Some(total),
178                    message: Some(message),
179                },
180            ));
181            if let Err(e) = peer.send_notification(notification).await {
182                warn!("Failed to send progress notification: {}", e);
183            }
184        }
185    }
186
187    #[instrument(skip(self, context))]
188    #[tool(
189        title = "Code Structure Analyzer",
190        description = "Analyze code structure in 3 modes: 1) Overview - directory tree with LOC/function/class counts (use max_depth to limit). 2) FileDetails - functions, classes, imports for one file. 3) SymbolFocus - call graph for a named symbol across a directory (requires focus, case-sensitive). Typical flow: directory overview -> file details -> symbol focus. For large overview output (>50K chars), use summary=true to get totals and top-level structure without per-file detail; output auto-summarizes at the 50K threshold. Use cursor/page_size to paginate files (overview) or functions (file_details) when next_cursor appears in the response. Functions called >3x show N.",
191        output_schema = create_analyze_output_schema(),
192        annotations(
193            read_only_hint = true,
194            destructive_hint = false,
195            idempotent_hint = true,
196            open_world_hint = false
197        )
198    )]
199    async fn analyze(
200        &self,
201        params: Parameters<AnalyzeParams>,
202        context: RequestContext<RoleServer>,
203    ) -> Result<CallToolResult, ErrorData> {
204        let params = params.0;
205        let ct = context.ct.clone();
206
207        // Determine mode if not provided
208        let mode = params
209            .mode
210            .unwrap_or_else(|| analyze::determine_mode(&params.path, params.focus.as_deref()));
211
212        // Dispatch based on mode and construct ModeResult
213        let mode_result = match mode {
214            AnalysisMode::Overview => {
215                let path = Path::new(&params.path);
216                let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
217                let counter_clone = counter.clone();
218                let path_owned = path.to_path_buf();
219                let max_depth = params.max_depth;
220                let ct_clone = ct.clone();
221
222                // Get total file count for progress reporting
223                let total_files = match walk_directory(path, max_depth) {
224                    Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
225                    Err(_) => 0,
226                };
227
228                // Spawn blocking analysis with progress tracking
229                let handle = tokio::task::spawn_blocking(move || {
230                    analyze::analyze_directory_with_progress(
231                        &path_owned,
232                        max_depth,
233                        counter_clone,
234                        ct_clone,
235                    )
236                });
237
238                // Poll and emit progress every 100ms
239                let token = ProgressToken(NumberOrString::String(
240                    format!(
241                        "analyze-overview-{}",
242                        std::time::SystemTime::now()
243                            .duration_since(std::time::UNIX_EPOCH)
244                            .map(|d| d.as_nanos())
245                            .unwrap_or(0)
246                    )
247                    .into(),
248                ));
249                let mut last_progress = 0usize;
250                let mut cancelled = false;
251                loop {
252                    tokio::time::sleep(std::time::Duration::from_millis(100)).await;
253                    if ct.is_cancelled() {
254                        cancelled = true;
255                        break;
256                    }
257                    let current = counter.load(std::sync::atomic::Ordering::Relaxed);
258                    if current != last_progress && total_files > 0 {
259                        self.emit_progress(
260                            &token,
261                            current as f64,
262                            total_files as f64,
263                            format!("Analyzing {}/{} files", current, total_files),
264                        )
265                        .await;
266                        last_progress = current;
267                    }
268                    if handle.is_finished() {
269                        break;
270                    }
271                }
272
273                // Emit final 100% progress only if not cancelled
274                if !cancelled && total_files > 0 {
275                    self.emit_progress(
276                        &token,
277                        total_files as f64,
278                        total_files as f64,
279                        format!("Completed analyzing {} files", total_files),
280                    )
281                    .await;
282                }
283
284                match handle.await {
285                    Ok(Ok(output)) => types::ModeResult::Overview(output),
286                    Ok(Err(analyze::AnalyzeError::Cancelled)) => {
287                        let output = analyze::AnalysisOutput {
288                            formatted: "Analysis cancelled".to_string(),
289                            files: vec![],
290                            entries: vec![],
291                            next_cursor: None,
292                        };
293                        types::ModeResult::Overview(output)
294                    }
295                    Ok(Err(e)) => {
296                        let output = analyze::AnalysisOutput {
297                            formatted: format!("Error analyzing directory: {}", e),
298                            files: vec![],
299                            entries: vec![],
300                            next_cursor: None,
301                        };
302                        types::ModeResult::Overview(output)
303                    }
304                    Err(e) => {
305                        let output = analyze::AnalysisOutput {
306                            formatted: format!("Task join error: {}", e),
307                            files: vec![],
308                            entries: vec![],
309                            next_cursor: None,
310                        };
311                        types::ModeResult::Overview(output)
312                    }
313                }
314            }
315            AnalysisMode::FileDetails => {
316                // Build cache key from file metadata
317                let cache_key = std::fs::metadata(&params.path).ok().and_then(|meta| {
318                    meta.modified().ok().map(|mtime| cache::CacheKey {
319                        path: std::path::PathBuf::from(&params.path),
320                        modified: mtime,
321                        mode: AnalysisMode::FileDetails,
322                    })
323                });
324
325                // Check cache first
326                let output_result = if let Some(ref key) = cache_key {
327                    if let Some(cached) = self.cache.get(key) {
328                        Ok(cached)
329                    } else {
330                        // Cache miss, analyze and store
331                        match analyze::analyze_file(&params.path, params.ast_recursion_limit) {
332                            Ok(output) => {
333                                let arc_output = std::sync::Arc::new(output);
334                                self.cache.put(key.clone(), arc_output.clone());
335                                Ok(arc_output)
336                            }
337                            Err(e) => Err(format!("Error analyzing file: {}", e)),
338                        }
339                    }
340                } else {
341                    // No cache key available, analyze directly
342                    match analyze::analyze_file(&params.path, params.ast_recursion_limit) {
343                        Ok(output) => Ok(std::sync::Arc::new(output)),
344                        Err(e) => Err(format!("Error analyzing file: {}", e)),
345                    }
346                };
347
348                match output_result {
349                    Ok(output) => types::ModeResult::FileDetails((*output).clone()),
350                    Err(e) => {
351                        let output = analyze::FileAnalysisOutput {
352                            formatted: e,
353                            semantic: types::SemanticAnalysis {
354                                functions: vec![],
355                                classes: vec![],
356                                imports: vec![],
357                                references: vec![],
358                                call_frequency: std::collections::HashMap::new(),
359                                calls: vec![],
360                                assignments: vec![],
361                                field_accesses: vec![],
362                            },
363                            line_count: 0,
364                            next_cursor: None,
365                        };
366                        types::ModeResult::FileDetails(output)
367                    }
368                }
369            }
370            AnalysisMode::SymbolFocus => {
371                let focus = params.focus.as_deref().unwrap_or("");
372                let follow_depth = params.follow_depth.unwrap_or(1);
373                let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
374                let counter_clone = counter.clone();
375                let path = Path::new(&params.path);
376                let path_owned = path.to_path_buf();
377                let max_depth = params.max_depth;
378                let focus_owned = focus.to_string();
379                let ast_recursion_limit = params.ast_recursion_limit;
380                let ct_clone = ct.clone();
381
382                // Compute use_summary before spawning: explicit params only
383                // (auto-detect requires full output first, handled after task)
384                let use_summary_for_task =
385                    params.force != Some(true) && params.summary == Some(true);
386
387                // Get total file count for progress reporting
388                let total_files = match walk_directory(path, max_depth) {
389                    Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
390                    Err(_) => 0,
391                };
392
393                // Spawn blocking analysis with progress tracking
394                let handle = tokio::task::spawn_blocking(move || {
395                    analyze::analyze_focused_with_progress(
396                        &path_owned,
397                        &focus_owned,
398                        follow_depth,
399                        max_depth,
400                        ast_recursion_limit,
401                        counter_clone,
402                        ct_clone,
403                        use_summary_for_task,
404                    )
405                });
406
407                // Poll and emit progress every 100ms
408                let token = ProgressToken(NumberOrString::String(
409                    format!(
410                        "analyze-symbol-{}",
411                        std::time::SystemTime::now()
412                            .duration_since(std::time::UNIX_EPOCH)
413                            .map(|d| d.as_nanos())
414                            .unwrap_or(0)
415                    )
416                    .into(),
417                ));
418                let mut last_progress = 0usize;
419                let mut cancelled = false;
420                loop {
421                    tokio::time::sleep(std::time::Duration::from_millis(100)).await;
422                    if ct.is_cancelled() {
423                        cancelled = true;
424                        break;
425                    }
426                    let current = counter.load(std::sync::atomic::Ordering::Relaxed);
427                    if current != last_progress && total_files > 0 {
428                        self.emit_progress(
429                            &token,
430                            current as f64,
431                            total_files as f64,
432                            format!(
433                                "Analyzing {}/{} files for symbol '{}'",
434                                current, total_files, focus
435                            ),
436                        )
437                        .await;
438                        last_progress = current;
439                    }
440                    if handle.is_finished() {
441                        break;
442                    }
443                }
444
445                // Emit final 100% progress only if not cancelled
446                if !cancelled && total_files > 0 {
447                    self.emit_progress(
448                        &token,
449                        total_files as f64,
450                        total_files as f64,
451                        format!(
452                            "Completed analyzing {} files for symbol '{}'",
453                            total_files, focus
454                        ),
455                    )
456                    .await;
457                }
458
459                match handle.await {
460                    Ok(Ok(output)) => types::ModeResult::SymbolFocus(output),
461                    Ok(Err(analyze::AnalyzeError::Cancelled)) => {
462                        let output = analyze::FocusedAnalysisOutput {
463                            formatted: "Analysis cancelled".to_string(),
464                            next_cursor: None,
465                            prod_chains: vec![],
466                            test_chains: vec![],
467                            outgoing_chains: vec![],
468                            def_count: 0,
469                        };
470                        types::ModeResult::SymbolFocus(output)
471                    }
472                    Ok(Err(e)) => {
473                        let output = analyze::FocusedAnalysisOutput {
474                            formatted: format!("Error analyzing symbol focus: {}", e),
475                            next_cursor: None,
476                            prod_chains: vec![],
477                            test_chains: vec![],
478                            outgoing_chains: vec![],
479                            def_count: 0,
480                        };
481                        types::ModeResult::SymbolFocus(output)
482                    }
483                    Err(e) => {
484                        let output = analyze::FocusedAnalysisOutput {
485                            formatted: format!("Task join error: {}", e),
486                            next_cursor: None,
487                            prod_chains: vec![],
488                            test_chains: vec![],
489                            outgoing_chains: vec![],
490                            def_count: 0,
491                        };
492                        types::ModeResult::SymbolFocus(output)
493                    }
494                }
495            }
496        };
497
498        // Decode pagination cursor if provided
499        let page_size = params.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
500        let offset = if let Some(ref cursor_str) = params.cursor {
501            let cursor_data = decode_cursor(cursor_str).map_err(|e| {
502                ErrorData::new(rmcp::model::ErrorCode::INVALID_PARAMS, e.to_string(), None)
503            })?;
504            cursor_data.offset
505        } else {
506            0
507        };
508
509        // Convert ModeResult to text-only content with pagination and capture structured JSON
510        let (formatted_text, next_cursor, structured_value) = match mode_result {
511            types::ModeResult::Overview(mut output) => {
512                // Apply summary/output size limiting logic
513                // Determine if we should use summary
514                let use_summary = if params.force == Some(true) {
515                    false
516                } else if params.summary == Some(true) {
517                    true
518                } else if params.summary == Some(false) {
519                    false
520                } else {
521                    output.formatted.len() > SIZE_LIMIT
522                };
523
524                if use_summary {
525                    output.formatted = format_summary(
526                        &output.entries,
527                        &output.files,
528                        params.max_depth,
529                        Some(Path::new(&params.path)),
530                    );
531                }
532
533                // Apply pagination to files
534                let paginated =
535                    paginate_slice(&output.files, offset, page_size, PaginationMode::Default)
536                        .map_err(|e| {
537                            ErrorData::new(
538                                rmcp::model::ErrorCode::INTERNAL_ERROR,
539                                e.to_string(),
540                                None,
541                            )
542                        })?;
543
544                if paginated.next_cursor.is_some() || offset > 0 {
545                    output.formatted = format_structure_paginated(
546                        &paginated.items,
547                        paginated.total,
548                        params.max_depth,
549                        Some(Path::new(&params.path)),
550                    );
551                }
552
553                // Update next_cursor in output after pagination
554                output.next_cursor = paginated.next_cursor.clone();
555
556                // Serialize after all mutations
557                let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
558                (output.formatted, paginated.next_cursor, structured)
559            }
560            types::ModeResult::FileDetails(mut output) => {
561                // Apply summary/output size limiting logic (same 3-way decision as Overview)
562                let use_summary = if params.force == Some(true) {
563                    false
564                } else if params.summary == Some(true) {
565                    true
566                } else if params.summary == Some(false) {
567                    false
568                } else {
569                    output.formatted.len() > SIZE_LIMIT
570                };
571
572                if use_summary {
573                    output.formatted = format_file_details_summary(
574                        &output.semantic,
575                        &params.path,
576                        output.line_count,
577                    );
578                } else if output.formatted.len() > SIZE_LIMIT && params.force != Some(true) {
579                    let estimated_tokens = output.formatted.len() / 4;
580                    let message = format!(
581                        "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
582                         - force=true to return full output\n\
583                         - Narrow your scope (smaller directory, specific file)\n\
584                         - Use symbol_focus mode for targeted analysis\n\
585                         - Reduce max_depth parameter",
586                        output.formatted.len(),
587                        estimated_tokens
588                    );
589                    return Err(ErrorData::new(
590                        rmcp::model::ErrorCode::INVALID_REQUEST,
591                        message,
592                        None,
593                    ));
594                }
595
596                // Paginate functions (typically the largest collection)
597                let paginated = paginate_slice(
598                    &output.semantic.functions,
599                    offset,
600                    page_size,
601                    PaginationMode::Default,
602                )
603                .map_err(|e| {
604                    ErrorData::new(rmcp::model::ErrorCode::INTERNAL_ERROR, e.to_string(), None)
605                })?;
606
607                // Regenerate formatted output from the paginated slice when pagination is active
608                if paginated.next_cursor.is_some() || offset > 0 {
609                    output.formatted = format_file_details_paginated(
610                        &paginated.items,
611                        paginated.total,
612                        &output.semantic,
613                        &params.path,
614                        output.line_count,
615                        offset,
616                    );
617                }
618
619                // Update next_cursor in output after pagination
620                output.next_cursor = paginated.next_cursor.clone();
621
622                // Serialize after all mutations
623                let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
624                (output.formatted, paginated.next_cursor, structured)
625            }
626            types::ModeResult::SymbolFocus(mut output) => {
627                // Auto-detect: if no explicit summary param and output exceeds limit,
628                // re-run analysis with use_summary=true (double-compute, acceptable)
629                if params.summary.is_none()
630                    && params.force != Some(true)
631                    && output.formatted.len() > SIZE_LIMIT
632                {
633                    let path_owned2 = Path::new(&params.path).to_path_buf();
634                    let focus_owned2 = params.focus.clone().unwrap_or_default();
635                    let follow_depth2 = params.follow_depth.unwrap_or(1);
636                    let max_depth2 = params.max_depth;
637                    let ast_recursion_limit2 = params.ast_recursion_limit;
638                    let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
639                    let ct2 = ct.clone();
640                    let summary_result = tokio::task::spawn_blocking(move || {
641                        analyze::analyze_focused_with_progress(
642                            &path_owned2,
643                            &focus_owned2,
644                            follow_depth2,
645                            max_depth2,
646                            ast_recursion_limit2,
647                            counter2,
648                            ct2,
649                            true, // use_summary=true
650                        )
651                    })
652                    .await;
653                    match summary_result {
654                        Ok(Ok(summary_output)) => {
655                            output.formatted = summary_output.formatted;
656                        }
657                        _ => {
658                            // Fallback: return error (summary generation failed)
659                            let estimated_tokens = output.formatted.len() / 4;
660                            let message = format!(
661                                "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
662                                output.formatted.len(),
663                                estimated_tokens
664                            );
665                            return Err(ErrorData::new(
666                                rmcp::model::ErrorCode::INVALID_REQUEST,
667                                message,
668                                None,
669                            ));
670                        }
671                    }
672                } else if output.formatted.len() > SIZE_LIMIT
673                    && params.force != Some(true)
674                    && params.summary == Some(false)
675                {
676                    // Explicit summary=false with large output: return error
677                    let estimated_tokens = output.formatted.len() / 4;
678                    let message = format!(
679                        "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
680                         - force=true to return full output\n\
681                         - summary=true to get compact summary\n\
682                         - Narrow your scope (smaller directory, specific file)",
683                        output.formatted.len(),
684                        estimated_tokens
685                    );
686                    return Err(ErrorData::new(
687                        rmcp::model::ErrorCode::INVALID_REQUEST,
688                        message,
689                        None,
690                    ));
691                }
692
693                // SymbolFocus pagination: decode cursor mode to determine callers vs callees
694                let cursor_mode = if let Some(ref cursor_str) = params.cursor {
695                    decode_cursor(cursor_str)
696                        .map(|c| c.mode)
697                        .unwrap_or(PaginationMode::Callers)
698                } else {
699                    PaginationMode::Callers
700                };
701
702                let paginated_next_cursor = match cursor_mode {
703                    PaginationMode::Callers => {
704                        let (paginated_items, paginated_next) = paginate_focus_chains(
705                            &output.prod_chains,
706                            PaginationMode::Callers,
707                            offset,
708                            page_size,
709                        )?;
710
711                        if paginated_next.is_some() || offset > 0 {
712                            let focus_symbol = params.focus.as_deref().unwrap_or("");
713                            let base_path = Path::new(&params.path);
714                            output.formatted = format_focused_paginated(
715                                &paginated_items,
716                                output.prod_chains.len(),
717                                PaginationMode::Callers,
718                                focus_symbol,
719                                &output.prod_chains,
720                                &output.test_chains,
721                                &output.outgoing_chains,
722                                output.def_count,
723                                offset,
724                                Some(base_path),
725                            );
726                            paginated_next
727                        } else {
728                            None
729                        }
730                    }
731                    PaginationMode::Callees => {
732                        let (paginated_items, paginated_next) = paginate_focus_chains(
733                            &output.outgoing_chains,
734                            PaginationMode::Callees,
735                            offset,
736                            page_size,
737                        )?;
738
739                        if paginated_next.is_some() || offset > 0 {
740                            let focus_symbol = params.focus.as_deref().unwrap_or("");
741                            let base_path = Path::new(&params.path);
742                            output.formatted = format_focused_paginated(
743                                &paginated_items,
744                                output.outgoing_chains.len(),
745                                PaginationMode::Callees,
746                                focus_symbol,
747                                &output.prod_chains,
748                                &output.test_chains,
749                                &output.outgoing_chains,
750                                output.def_count,
751                                offset,
752                                Some(base_path),
753                            );
754                            paginated_next
755                        } else {
756                            None
757                        }
758                    }
759                    PaginationMode::Default => {
760                        unreachable!("SymbolFocus should only use Callers or Callees modes")
761                    }
762                };
763
764                let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
765                (output.formatted, paginated_next_cursor, structured)
766            }
767        };
768
769        // Build final text output with pagination cursor if present
770        let mut final_text = formatted_text;
771        if let Some(cursor) = next_cursor {
772            final_text.push('\n');
773            final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
774        }
775
776        let mut result = CallToolResult::success(vec![Content::text(final_text)]);
777        result.structured_content = Some(structured_value);
778        Ok(result)
779    }
780}
781
782#[tool_handler]
783impl ServerHandler for CodeAnalyzer {
784    fn get_info(&self) -> InitializeResult {
785        let capabilities = ServerCapabilities::builder()
786            .enable_logging()
787            .enable_tools()
788            .enable_tool_list_changed()
789            .enable_completions()
790            .build();
791        let server_info = Implementation::new("code-analyze-mcp", "0.1.0")
792            .with_title("Code Analyze MCP")
793            .with_description("MCP server for code structure analysis using tree-sitter");
794        InitializeResult::new(capabilities)
795            .with_server_info(server_info)
796            .with_instructions("Use overview mode to map a codebase (pass a directory). Use file_details mode to extract functions, classes, and imports from a specific file (pass a file path). Use symbol_focus mode to trace call graphs for a named function or class (pass a directory and set focus to the symbol name, case-sensitive). Prefer summary=true on large directories to reduce output size. When the response includes next_cursor, pass it back as cursor to retrieve the next page.")
797    }
798
799    async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
800        let mut peer_lock = self.peer.lock().await;
801        *peer_lock = Some(context.peer.clone());
802        drop(peer_lock);
803
804        // Spawn consumer task to drain log events from channel with batching.
805        let peer = self.peer.clone();
806        let event_rx = self.event_rx.clone();
807
808        tokio::spawn(async move {
809            let rx = {
810                let mut rx_lock = event_rx.lock().await;
811                rx_lock.take()
812            };
813
814            if let Some(mut receiver) = rx {
815                let mut buffer = Vec::with_capacity(64);
816                loop {
817                    // Drain up to 64 events from channel
818                    receiver.recv_many(&mut buffer, 64).await;
819
820                    if buffer.is_empty() {
821                        // Channel closed, exit consumer task
822                        break;
823                    }
824
825                    // Acquire peer lock once per batch
826                    let peer_lock = peer.lock().await;
827                    if let Some(peer) = peer_lock.as_ref() {
828                        for log_event in buffer.drain(..) {
829                            let notification = ServerNotification::LoggingMessageNotification(
830                                Notification::new(LoggingMessageNotificationParam {
831                                    level: log_event.level,
832                                    logger: Some(log_event.logger),
833                                    data: log_event.data,
834                                }),
835                            );
836                            if let Err(e) = peer.send_notification(notification).await {
837                                warn!("Failed to send logging notification: {}", e);
838                            }
839                        }
840                    }
841                }
842            }
843        });
844    }
845
846    #[instrument(skip(self, _context))]
847    async fn on_cancelled(
848        &self,
849        notification: CancelledNotificationParam,
850        _context: NotificationContext<RoleServer>,
851    ) {
852        tracing::info!(
853            request_id = ?notification.request_id,
854            reason = ?notification.reason,
855            "Received cancellation notification"
856        );
857    }
858
859    #[instrument(skip(self, _context))]
860    async fn complete(
861        &self,
862        request: CompleteRequestParams,
863        _context: RequestContext<RoleServer>,
864    ) -> Result<CompleteResult, ErrorData> {
865        // Dispatch on argument name: "path" or "focus"
866        let argument_name = &request.argument.name;
867        let argument_value = &request.argument.value;
868
869        let completions = match argument_name.as_str() {
870            "path" => {
871                // Path completions: use current directory as root
872                let root = Path::new(".");
873                completion::path_completions(root, argument_value)
874            }
875            "focus" => {
876                // Focus completions: need the path argument from context
877                let path_arg = request
878                    .context
879                    .as_ref()
880                    .and_then(|ctx| ctx.get_argument("path"));
881
882                match path_arg {
883                    Some(path_str) => {
884                        let path = Path::new(path_str);
885                        completion::symbol_completions(&self.cache, path, argument_value)
886                    }
887                    None => Vec::new(),
888                }
889            }
890            _ => Vec::new(),
891        };
892
893        // Create CompletionInfo with has_more flag if >100 results
894        let total_count = completions.len() as u32;
895        let (values, has_more) = if completions.len() > 100 {
896            (completions.into_iter().take(100).collect(), true)
897        } else {
898            (completions, false)
899        };
900
901        let completion_info =
902            match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
903                Ok(info) => info,
904                Err(_) => {
905                    // Graceful degradation: return empty on error
906                    CompletionInfo::with_all_values(Vec::new())
907                        .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
908                }
909            };
910
911        Ok(CompleteResult::new(completion_info))
912    }
913
914    async fn set_level(
915        &self,
916        params: SetLevelRequestParams,
917        _context: RequestContext<RoleServer>,
918    ) -> Result<(), ErrorData> {
919        let level_filter = match params.level {
920            LoggingLevel::Debug => LevelFilter::DEBUG,
921            LoggingLevel::Info => LevelFilter::INFO,
922            LoggingLevel::Notice => LevelFilter::INFO,
923            LoggingLevel::Warning => LevelFilter::WARN,
924            LoggingLevel::Error => LevelFilter::ERROR,
925            LoggingLevel::Critical => LevelFilter::ERROR,
926            LoggingLevel::Alert => LevelFilter::ERROR,
927            LoggingLevel::Emergency => LevelFilter::ERROR,
928        };
929
930        let mut filter_lock = self.log_level_filter.lock().unwrap();
931        *filter_lock = level_filter;
932        Ok(())
933    }
934}