1pub mod analyze;
17pub mod cache;
18pub mod completion;
19pub mod formatter;
20pub mod graph;
21pub mod lang;
22pub mod languages;
23pub mod logging;
24pub mod pagination;
25pub mod parser;
26pub mod test_detection;
27pub mod traversal;
28pub mod types;
29
30use cache::AnalysisCache;
31use formatter::{
32 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
33 format_structure_paginated, format_summary,
34};
35use logging::LogEvent;
36use pagination::{
37 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
38};
39use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
40use rmcp::handler::server::wrapper::Parameters;
41use rmcp::model::{
42 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
43 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
44 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
45 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
46};
47use rmcp::service::{NotificationContext, RequestContext};
48use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
49use serde_json::Value;
50use std::path::Path;
51use std::sync::{Arc, Mutex};
52use tokio::sync::{Mutex as TokioMutex, mpsc};
53use tracing::{instrument, warn};
54use tracing_subscriber::filter::LevelFilter;
55use traversal::walk_directory;
56use types::{
57 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
58 AnalyzeSymbolParams,
59};
60
61const SIZE_LIMIT: usize = 50_000;
62
63fn error_meta(
64 category: &'static str,
65 is_retryable: bool,
66 suggested_action: &'static str,
67) -> Option<serde_json::Value> {
68 Some(serde_json::json!({
69 "errorCategory": category,
70 "isRetryable": is_retryable,
71 "suggestedAction": suggested_action,
72 }))
73}
74
75fn no_cache_meta() -> Meta {
76 let mut m = serde_json::Map::new();
77 m.insert(
78 "cache_hint".to_string(),
79 serde_json::Value::String("no-cache".to_string()),
80 );
81 Meta(m)
82}
83
84fn paginate_focus_chains(
87 chains: &[graph::CallChain],
88 mode: PaginationMode,
89 offset: usize,
90 page_size: usize,
91) -> Result<(Vec<graph::CallChain>, Option<String>), ErrorData> {
92 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
93 ErrorData::new(
94 rmcp::model::ErrorCode::INTERNAL_ERROR,
95 e.to_string(),
96 error_meta("transient", true, "retry the request"),
97 )
98 })?;
99
100 if paginated.next_cursor.is_none() && offset == 0 {
101 return Ok((paginated.items, None));
102 }
103
104 let next = if let Some(raw_cursor) = paginated.next_cursor {
105 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
106 ErrorData::new(
107 rmcp::model::ErrorCode::INVALID_PARAMS,
108 e.to_string(),
109 error_meta("validation", false, "invalid cursor format"),
110 )
111 })?;
112 Some(
113 encode_cursor(&CursorData {
114 mode,
115 offset: decoded.offset,
116 })
117 .map_err(|e| {
118 ErrorData::new(
119 rmcp::model::ErrorCode::INVALID_PARAMS,
120 e.to_string(),
121 error_meta("validation", false, "invalid cursor format"),
122 )
123 })?,
124 )
125 } else {
126 None
127 };
128
129 Ok((paginated.items, next))
130}
131
132#[derive(Clone)]
133pub struct CodeAnalyzer {
134 tool_router: ToolRouter<Self>,
135 cache: AnalysisCache,
136 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
137 log_level_filter: Arc<Mutex<LevelFilter>>,
138 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
139}
140
141#[tool_router]
142impl CodeAnalyzer {
143 pub fn new(
144 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
145 log_level_filter: Arc<Mutex<LevelFilter>>,
146 event_rx: mpsc::UnboundedReceiver<LogEvent>,
147 ) -> Self {
148 CodeAnalyzer {
149 tool_router: Self::tool_router(),
150 cache: AnalysisCache::new(100),
151 peer,
152 log_level_filter,
153 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
154 }
155 }
156
157 #[instrument(skip(self))]
158 async fn emit_progress(
159 &self,
160 peer: Option<Peer<RoleServer>>,
161 token: &ProgressToken,
162 progress: f64,
163 total: f64,
164 message: String,
165 ) {
166 if let Some(peer) = peer {
167 let notification = ServerNotification::ProgressNotification(Notification::new(
168 ProgressNotificationParam {
169 progress_token: token.clone(),
170 progress,
171 total: Some(total),
172 message: Some(message),
173 },
174 ));
175 if let Err(e) = peer.send_notification(notification).await {
176 warn!("Failed to send progress notification: {}", e);
177 }
178 }
179 }
180
181 #[instrument(skip(self, params, ct))]
185 async fn handle_overview_mode(
186 &self,
187 params: &AnalyzeDirectoryParams,
188 ct: tokio_util::sync::CancellationToken,
189 ) -> Result<analyze::AnalysisOutput, ErrorData> {
190 let path = Path::new(¶ms.path);
191 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
192 let counter_clone = counter.clone();
193 let path_owned = path.to_path_buf();
194 let max_depth = params.max_depth;
195 let ct_clone = ct.clone();
196
197 let entries = walk_directory(path, max_depth).map_err(|e| {
199 ErrorData::new(
200 rmcp::model::ErrorCode::INTERNAL_ERROR,
201 format!("Failed to walk directory: {}", e),
202 error_meta("resource", false, "check path permissions and availability"),
203 )
204 })?;
205
206 let total_files = entries.iter().filter(|e| !e.is_dir).count();
208
209 let handle = tokio::task::spawn_blocking(move || {
211 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
212 });
213
214 let token = ProgressToken(NumberOrString::String(
216 format!(
217 "analyze-overview-{}",
218 std::time::SystemTime::now()
219 .duration_since(std::time::UNIX_EPOCH)
220 .map(|d| d.as_nanos())
221 .unwrap_or(0)
222 )
223 .into(),
224 ));
225 let peer = self.peer.lock().await.clone();
226 let mut last_progress = 0usize;
227 let mut cancelled = false;
228 loop {
229 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
230 if ct.is_cancelled() {
231 cancelled = true;
232 break;
233 }
234 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
235 if current != last_progress && total_files > 0 {
236 self.emit_progress(
237 peer.clone(),
238 &token,
239 current as f64,
240 total_files as f64,
241 format!("Analyzing {}/{} files", current, total_files),
242 )
243 .await;
244 last_progress = current;
245 }
246 if handle.is_finished() {
247 break;
248 }
249 }
250
251 if !cancelled && total_files > 0 {
253 self.emit_progress(
254 peer.clone(),
255 &token,
256 total_files as f64,
257 total_files as f64,
258 format!("Completed analyzing {} files", total_files),
259 )
260 .await;
261 }
262
263 match handle.await {
264 Ok(Ok(output)) => Ok(output),
265 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
266 rmcp::model::ErrorCode::INTERNAL_ERROR,
267 "Analysis cancelled".to_string(),
268 error_meta("transient", true, "analysis was cancelled"),
269 )),
270 Ok(Err(e)) => Err(ErrorData::new(
271 rmcp::model::ErrorCode::INTERNAL_ERROR,
272 format!("Error analyzing directory: {}", e),
273 error_meta("resource", false, "check path and file permissions"),
274 )),
275 Err(e) => Err(ErrorData::new(
276 rmcp::model::ErrorCode::INTERNAL_ERROR,
277 format!("Task join error: {}", e),
278 error_meta("transient", true, "retry the request"),
279 )),
280 }
281 }
282
283 #[instrument(skip(self, params))]
286 async fn handle_file_details_mode(
287 &self,
288 params: &AnalyzeFileParams,
289 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
290 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
292 meta.modified().ok().map(|mtime| cache::CacheKey {
293 path: std::path::PathBuf::from(¶ms.path),
294 modified: mtime,
295 mode: AnalysisMode::FileDetails,
296 })
297 });
298
299 if let Some(ref key) = cache_key
301 && let Some(cached) = self.cache.get(key)
302 {
303 return Ok(cached);
304 }
305
306 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
308 Ok(output) => {
309 let arc_output = std::sync::Arc::new(output);
310 if let Some(ref key) = cache_key {
311 self.cache.put(key.clone(), arc_output.clone());
312 }
313 Ok(arc_output)
314 }
315 Err(e) => Err(ErrorData::new(
316 rmcp::model::ErrorCode::INTERNAL_ERROR,
317 format!("Error analyzing file: {}", e),
318 error_meta("resource", false, "check file path and permissions"),
319 )),
320 }
321 }
322
323 #[instrument(skip(self, params, ct))]
327 async fn handle_focused_mode(
328 &self,
329 params: &AnalyzeSymbolParams,
330 ct: tokio_util::sync::CancellationToken,
331 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
332 let follow_depth = params.follow_depth.unwrap_or(1);
333 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
334 let counter_clone = counter.clone();
335 let path = Path::new(¶ms.path);
336 let path_owned = path.to_path_buf();
337 let max_depth = params.max_depth;
338 let symbol_owned = params.symbol.clone();
339 let match_mode = params.match_mode.clone().unwrap_or_default();
340 let ast_recursion_limit = params.ast_recursion_limit;
341 let ct_clone = ct.clone();
342
343 let use_summary_for_task = params.output_control.force != Some(true)
345 && params.output_control.summary == Some(true);
346
347 let total_files = match walk_directory(path, max_depth) {
349 Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
350 Err(_) => 0,
351 };
352
353 let handle = tokio::task::spawn_blocking(move || {
355 analyze::analyze_focused_with_progress(
356 &path_owned,
357 &symbol_owned,
358 match_mode,
359 follow_depth,
360 max_depth,
361 ast_recursion_limit,
362 counter_clone,
363 ct_clone,
364 use_summary_for_task,
365 )
366 });
367
368 let token = ProgressToken(NumberOrString::String(
370 format!(
371 "analyze-symbol-{}",
372 std::time::SystemTime::now()
373 .duration_since(std::time::UNIX_EPOCH)
374 .map(|d| d.as_nanos())
375 .unwrap_or(0)
376 )
377 .into(),
378 ));
379 let peer = self.peer.lock().await.clone();
380 let mut last_progress = 0usize;
381 let mut cancelled = false;
382 loop {
383 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
384 if ct.is_cancelled() {
385 cancelled = true;
386 break;
387 }
388 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
389 if current != last_progress && total_files > 0 {
390 self.emit_progress(
391 peer.clone(),
392 &token,
393 current as f64,
394 total_files as f64,
395 format!(
396 "Analyzing {}/{} files for symbol '{}'",
397 current, total_files, params.symbol
398 ),
399 )
400 .await;
401 last_progress = current;
402 }
403 if handle.is_finished() {
404 break;
405 }
406 }
407
408 if !cancelled && total_files > 0 {
410 self.emit_progress(
411 peer.clone(),
412 &token,
413 total_files as f64,
414 total_files as f64,
415 format!(
416 "Completed analyzing {} files for symbol '{}'",
417 total_files, params.symbol
418 ),
419 )
420 .await;
421 }
422
423 let mut output = match handle.await {
424 Ok(Ok(output)) => output,
425 Ok(Err(analyze::AnalyzeError::Cancelled)) => {
426 return Err(ErrorData::new(
427 rmcp::model::ErrorCode::INTERNAL_ERROR,
428 "Analysis cancelled".to_string(),
429 error_meta("transient", true, "analysis was cancelled"),
430 ));
431 }
432 Ok(Err(e)) => {
433 return Err(ErrorData::new(
434 rmcp::model::ErrorCode::INTERNAL_ERROR,
435 format!("Error analyzing symbol: {}", e),
436 error_meta("resource", false, "check symbol name and file"),
437 ));
438 }
439 Err(e) => {
440 return Err(ErrorData::new(
441 rmcp::model::ErrorCode::INTERNAL_ERROR,
442 format!("Task join error: {}", e),
443 error_meta("transient", true, "retry the request"),
444 ));
445 }
446 };
447
448 if params.output_control.summary.is_none()
451 && params.output_control.force != Some(true)
452 && output.formatted.len() > SIZE_LIMIT
453 {
454 let path_owned2 = Path::new(¶ms.path).to_path_buf();
455 let symbol_owned2 = params.symbol.clone();
456 let match_mode2 = params.match_mode.clone().unwrap_or_default();
457 let follow_depth2 = params.follow_depth.unwrap_or(1);
458 let max_depth2 = params.max_depth;
459 let ast_recursion_limit2 = params.ast_recursion_limit;
460 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
461 let ct2 = ct.clone();
462 let summary_result = tokio::task::spawn_blocking(move || {
463 analyze::analyze_focused_with_progress(
464 &path_owned2,
465 &symbol_owned2,
466 match_mode2,
467 follow_depth2,
468 max_depth2,
469 ast_recursion_limit2,
470 counter2,
471 ct2,
472 true, )
474 })
475 .await;
476 match summary_result {
477 Ok(Ok(summary_output)) => {
478 output.formatted = summary_output.formatted;
479 }
480 _ => {
481 let estimated_tokens = output.formatted.len() / 4;
483 let message = format!(
484 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
485 output.formatted.len(),
486 estimated_tokens
487 );
488 return Err(ErrorData::new(
489 rmcp::model::ErrorCode::INVALID_PARAMS,
490 message,
491 error_meta("validation", false, "use summary=true or force=true"),
492 ));
493 }
494 }
495 } else if output.formatted.len() > SIZE_LIMIT
496 && params.output_control.force != Some(true)
497 && params.output_control.summary == Some(false)
498 {
499 let estimated_tokens = output.formatted.len() / 4;
501 let message = format!(
502 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
503 - force=true to return full output\n\
504 - summary=true to get compact summary\n\
505 - Narrow your scope (smaller directory, specific file)",
506 output.formatted.len(),
507 estimated_tokens
508 );
509 return Err(ErrorData::new(
510 rmcp::model::ErrorCode::INVALID_PARAMS,
511 message,
512 error_meta(
513 "validation",
514 false,
515 "use force=true, summary=true, or narrow scope",
516 ),
517 ));
518 }
519
520 Ok(output)
521 }
522
523 #[instrument(skip(self, context))]
524 #[tool(
525 name = "analyze_directory",
526 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they?",
527 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
528 annotations(
529 title = "Analyze Directory",
530 read_only_hint = true,
531 destructive_hint = false,
532 idempotent_hint = true,
533 open_world_hint = false
534 )
535 )]
536 async fn analyze_directory(
537 &self,
538 params: Parameters<AnalyzeDirectoryParams>,
539 context: RequestContext<RoleServer>,
540 ) -> Result<CallToolResult, ErrorData> {
541 let params = params.0;
542 let ct = context.ct.clone();
543
544 let mut output = self.handle_overview_mode(¶ms, ct).await?;
546
547 let use_summary = if params.output_control.force == Some(true) {
549 false
550 } else if params.output_control.summary == Some(true) {
551 true
552 } else if params.output_control.summary == Some(false) {
553 false
554 } else {
555 output.formatted.len() > SIZE_LIMIT
556 };
557
558 if use_summary {
559 output.formatted = format_summary(
560 &output.entries,
561 &output.files,
562 params.max_depth,
563 Some(Path::new(¶ms.path)),
564 );
565 }
566
567 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
569 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
570 let cursor_data = decode_cursor(cursor_str).map_err(|e| {
571 ErrorData::new(
572 rmcp::model::ErrorCode::INVALID_PARAMS,
573 e.to_string(),
574 error_meta("validation", false, "invalid cursor format"),
575 )
576 })?;
577 cursor_data.offset
578 } else {
579 0
580 };
581
582 let paginated = paginate_slice(&output.files, offset, page_size, PaginationMode::Default)
584 .map_err(|e| {
585 ErrorData::new(
586 rmcp::model::ErrorCode::INTERNAL_ERROR,
587 e.to_string(),
588 error_meta("transient", true, "retry the request"),
589 )
590 })?;
591
592 let verbose = params.output_control.verbose.unwrap_or(false);
593 if paginated.next_cursor.is_some() || offset > 0 || !verbose {
594 output.formatted = format_structure_paginated(
595 &paginated.items,
596 paginated.total,
597 params.max_depth,
598 Some(Path::new(¶ms.path)),
599 verbose,
600 );
601 }
602
603 output.next_cursor = paginated.next_cursor.clone();
605
606 let mut final_text = output.formatted.clone();
608 if let Some(cursor) = paginated.next_cursor {
609 final_text.push('\n');
610 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
611 }
612
613 let mut result = CallToolResult::success(vec![Content::text(final_text)])
614 .with_meta(Some(no_cache_meta()));
615 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
616 result.structured_content = Some(structured);
617 Ok(result)
618 }
619
620 #[instrument(skip(self, context))]
621 #[tool(
622 name = "analyze_file",
623 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py",
624 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
625 annotations(
626 title = "Analyze File",
627 read_only_hint = true,
628 destructive_hint = false,
629 idempotent_hint = true,
630 open_world_hint = false
631 )
632 )]
633 async fn analyze_file(
634 &self,
635 params: Parameters<AnalyzeFileParams>,
636 context: RequestContext<RoleServer>,
637 ) -> Result<CallToolResult, ErrorData> {
638 let params = params.0;
639 let _ct = context.ct.clone();
640
641 let arc_output = self.handle_file_details_mode(¶ms).await?;
643
644 let mut formatted = arc_output.formatted.clone();
648 let line_count = arc_output.line_count;
649
650 let use_summary = if params.output_control.force == Some(true) {
652 false
653 } else if params.output_control.summary == Some(true) {
654 true
655 } else if params.output_control.summary == Some(false) {
656 false
657 } else {
658 formatted.len() > SIZE_LIMIT
659 };
660
661 if use_summary {
662 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
663 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
664 let estimated_tokens = formatted.len() / 4;
665 let message = format!(
666 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
667 - force=true to return full output\n\
668 - Narrow your scope (smaller directory, specific file)\n\
669 - Use analyze_symbol mode for targeted analysis\n\
670 - Reduce max_depth parameter",
671 formatted.len(),
672 estimated_tokens
673 );
674 return Err(ErrorData::new(
675 rmcp::model::ErrorCode::INVALID_PARAMS,
676 message,
677 error_meta("validation", false, "use force=true or narrow scope"),
678 ));
679 }
680
681 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
683 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
684 let cursor_data = decode_cursor(cursor_str).map_err(|e| {
685 ErrorData::new(
686 rmcp::model::ErrorCode::INVALID_PARAMS,
687 e.to_string(),
688 error_meta("validation", false, "invalid cursor format"),
689 )
690 })?;
691 cursor_data.offset
692 } else {
693 0
694 };
695
696 let paginated = paginate_slice(
698 &arc_output.semantic.functions,
699 offset,
700 page_size,
701 PaginationMode::Default,
702 )
703 .map_err(|e| {
704 ErrorData::new(
705 rmcp::model::ErrorCode::INTERNAL_ERROR,
706 e.to_string(),
707 error_meta("transient", true, "retry the request"),
708 )
709 })?;
710
711 let verbose = params.output_control.verbose.unwrap_or(false);
713 if paginated.next_cursor.is_some() || offset > 0 || !verbose {
714 formatted = format_file_details_paginated(
715 &paginated.items,
716 paginated.total,
717 &arc_output.semantic,
718 ¶ms.path,
719 line_count,
720 offset,
721 verbose,
722 );
723 }
724
725 let next_cursor = paginated.next_cursor.clone();
727
728 let mut final_text = formatted.clone();
730 if let Some(ref cursor) = next_cursor {
731 final_text.push('\n');
732 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
733 }
734
735 let response_output = analyze::FileAnalysisOutput {
737 formatted,
738 semantic: arc_output.semantic.clone(),
739 line_count,
740 next_cursor,
741 };
742
743 let mut result = CallToolResult::success(vec![Content::text(final_text)])
744 .with_meta(Some(no_cache_meta()));
745 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
746 result.structured_content = Some(structured);
747 Ok(result)
748 }
749
750 #[instrument(skip(self, context))]
751 #[tool(
752 name = "analyze_symbol",
753 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep",
754 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
755 annotations(
756 title = "Analyze Symbol",
757 read_only_hint = true,
758 destructive_hint = false,
759 idempotent_hint = true,
760 open_world_hint = false
761 )
762 )]
763 async fn analyze_symbol(
764 &self,
765 params: Parameters<AnalyzeSymbolParams>,
766 context: RequestContext<RoleServer>,
767 ) -> Result<CallToolResult, ErrorData> {
768 let params = params.0;
769 let ct = context.ct.clone();
770
771 let mut output = self.handle_focused_mode(¶ms, ct).await?;
773
774 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
776 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
777 let cursor_data = decode_cursor(cursor_str).map_err(|e| {
778 ErrorData::new(
779 rmcp::model::ErrorCode::INVALID_PARAMS,
780 e.to_string(),
781 error_meta("validation", false, "invalid cursor format"),
782 )
783 })?;
784 cursor_data.offset
785 } else {
786 0
787 };
788
789 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
791 decode_cursor(cursor_str)
792 .map(|c| c.mode)
793 .unwrap_or(PaginationMode::Callers)
794 } else {
795 PaginationMode::Callers
796 };
797
798 let paginated_next_cursor = match cursor_mode {
799 PaginationMode::Callers => {
800 let (paginated_items, paginated_next) = paginate_focus_chains(
801 &output.prod_chains,
802 PaginationMode::Callers,
803 offset,
804 page_size,
805 )?;
806
807 let verbose = params.output_control.verbose.unwrap_or(false);
808 if paginated_next.is_some() || offset > 0 || !verbose {
809 let base_path = Path::new(¶ms.path);
810 output.formatted = format_focused_paginated(
811 &paginated_items,
812 output.prod_chains.len(),
813 PaginationMode::Callers,
814 ¶ms.symbol,
815 &output.prod_chains,
816 &output.test_chains,
817 &output.outgoing_chains,
818 output.def_count,
819 offset,
820 Some(base_path),
821 verbose,
822 );
823 paginated_next
824 } else {
825 None
826 }
827 }
828 PaginationMode::Callees => {
829 let (paginated_items, paginated_next) = paginate_focus_chains(
830 &output.outgoing_chains,
831 PaginationMode::Callees,
832 offset,
833 page_size,
834 )?;
835
836 let verbose = params.output_control.verbose.unwrap_or(false);
837 if paginated_next.is_some() || offset > 0 || !verbose {
838 let base_path = Path::new(¶ms.path);
839 output.formatted = format_focused_paginated(
840 &paginated_items,
841 output.outgoing_chains.len(),
842 PaginationMode::Callees,
843 ¶ms.symbol,
844 &output.prod_chains,
845 &output.test_chains,
846 &output.outgoing_chains,
847 output.def_count,
848 offset,
849 Some(base_path),
850 verbose,
851 );
852 paginated_next
853 } else {
854 None
855 }
856 }
857 PaginationMode::Default => {
858 unreachable!("SymbolFocus should only use Callers or Callees modes")
859 }
860 };
861
862 let mut final_text = output.formatted.clone();
864 if let Some(cursor) = paginated_next_cursor {
865 final_text.push('\n');
866 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
867 }
868
869 let mut result = CallToolResult::success(vec![Content::text(final_text)])
870 .with_meta(Some(no_cache_meta()));
871 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
872 result.structured_content = Some(structured);
873 Ok(result)
874 }
875
876 #[instrument(skip(self))]
877 #[tool(
878 name = "analyze_module",
879 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs",
880 output_schema = schema_for_type::<types::ModuleInfo>(),
881 annotations(
882 title = "Analyze Module",
883 read_only_hint = true,
884 destructive_hint = false,
885 idempotent_hint = true,
886 open_world_hint = false
887 )
888 )]
889 async fn analyze_module(
890 &self,
891 params: Parameters<AnalyzeModuleParams>,
892 _context: RequestContext<RoleServer>,
893 ) -> Result<CallToolResult, ErrorData> {
894 let params = params.0;
895
896 let module_info = analyze::analyze_module_file(¶ms.path).map_err(|e| {
897 ErrorData::new(
898 rmcp::model::ErrorCode::INVALID_PARAMS,
899 format!("Failed to analyze module: {}", e),
900 error_meta(
901 "validation",
902 false,
903 "ensure file exists, is readable, and has a supported extension",
904 ),
905 )
906 })?;
907
908 let mut result = CallToolResult::success(vec![Content::text(
909 serde_json::to_string_pretty(&module_info).unwrap_or_default(),
910 )])
911 .with_meta(Some(no_cache_meta()));
912
913 let structured = serde_json::to_value(&module_info).unwrap_or(Value::Null);
914 result.structured_content = Some(structured);
915 Ok(result)
916 }
917}
918
919#[tool_handler]
920impl ServerHandler for CodeAnalyzer {
921 fn get_info(&self) -> InitializeResult {
922 let capabilities = ServerCapabilities::builder()
923 .enable_logging()
924 .enable_tools()
925 .enable_tool_list_changed()
926 .enable_completions()
927 .build();
928 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
929 .with_title("Code Analyze MCP")
930 .with_description("MCP server for code structure analysis using tree-sitter");
931 InitializeResult::new(capabilities)
932 .with_server_info(server_info)
933 .with_instructions("Use analyze_directory to map a codebase (pass a directory). Use analyze_file to extract functions, classes, and imports from a specific file (pass a file path). Use analyze_module to extract a minimal fixed schema (name, line count, functions, imports) from a single file when token budget is critical. Use analyze_symbol to trace call graphs for a named function or class (pass a directory and set symbol to the function name, case-sensitive). Prefer summary=true on large directories to reduce output size. When the response includes a NEXT_CURSOR: line, pass that value back as cursor to retrieve the next page. For non-interactive single-session workflows (e.g. subagents), disable prompt caching to avoid redundant cache writes: DISABLE_PROMPT_CACHING=1.")
934 }
935
936 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
937 let mut peer_lock = self.peer.lock().await;
938 *peer_lock = Some(context.peer.clone());
939 drop(peer_lock);
940
941 let peer = self.peer.clone();
943 let event_rx = self.event_rx.clone();
944
945 tokio::spawn(async move {
946 let rx = {
947 let mut rx_lock = event_rx.lock().await;
948 rx_lock.take()
949 };
950
951 if let Some(mut receiver) = rx {
952 let mut buffer = Vec::with_capacity(64);
953 loop {
954 receiver.recv_many(&mut buffer, 64).await;
956
957 if buffer.is_empty() {
958 break;
960 }
961
962 let peer_lock = peer.lock().await;
964 if let Some(peer) = peer_lock.as_ref() {
965 for log_event in buffer.drain(..) {
966 let notification = ServerNotification::LoggingMessageNotification(
967 Notification::new(LoggingMessageNotificationParam {
968 level: log_event.level,
969 logger: Some(log_event.logger),
970 data: log_event.data,
971 }),
972 );
973 if let Err(e) = peer.send_notification(notification).await {
974 warn!("Failed to send logging notification: {}", e);
975 }
976 }
977 }
978 }
979 }
980 });
981 }
982
983 #[instrument(skip(self, _context))]
984 async fn on_cancelled(
985 &self,
986 notification: CancelledNotificationParam,
987 _context: NotificationContext<RoleServer>,
988 ) {
989 tracing::info!(
990 request_id = ?notification.request_id,
991 reason = ?notification.reason,
992 "Received cancellation notification"
993 );
994 }
995
996 #[instrument(skip(self, _context))]
997 async fn complete(
998 &self,
999 request: CompleteRequestParams,
1000 _context: RequestContext<RoleServer>,
1001 ) -> Result<CompleteResult, ErrorData> {
1002 let argument_name = &request.argument.name;
1004 let argument_value = &request.argument.value;
1005
1006 let completions = match argument_name.as_str() {
1007 "path" => {
1008 let root = Path::new(".");
1010 completion::path_completions(root, argument_value)
1011 }
1012 "symbol" => {
1013 let path_arg = request
1015 .context
1016 .as_ref()
1017 .and_then(|ctx| ctx.get_argument("path"));
1018
1019 match path_arg {
1020 Some(path_str) => {
1021 let path = Path::new(path_str);
1022 completion::symbol_completions(&self.cache, path, argument_value)
1023 }
1024 None => Vec::new(),
1025 }
1026 }
1027 _ => Vec::new(),
1028 };
1029
1030 let total_count = completions.len() as u32;
1032 let (values, has_more) = if completions.len() > 100 {
1033 (completions.into_iter().take(100).collect(), true)
1034 } else {
1035 (completions, false)
1036 };
1037
1038 let completion_info =
1039 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1040 Ok(info) => info,
1041 Err(_) => {
1042 CompletionInfo::with_all_values(Vec::new())
1044 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1045 }
1046 };
1047
1048 Ok(CompleteResult::new(completion_info))
1049 }
1050
1051 async fn set_level(
1052 &self,
1053 params: SetLevelRequestParams,
1054 _context: RequestContext<RoleServer>,
1055 ) -> Result<(), ErrorData> {
1056 let level_filter = match params.level {
1057 LoggingLevel::Debug => LevelFilter::DEBUG,
1058 LoggingLevel::Info => LevelFilter::INFO,
1059 LoggingLevel::Notice => LevelFilter::INFO,
1060 LoggingLevel::Warning => LevelFilter::WARN,
1061 LoggingLevel::Error => LevelFilter::ERROR,
1062 LoggingLevel::Critical => LevelFilter::ERROR,
1063 LoggingLevel::Alert => LevelFilter::ERROR,
1064 LoggingLevel::Emergency => LevelFilter::ERROR,
1065 };
1066
1067 let mut filter_lock = self.log_level_filter.lock().unwrap();
1068 *filter_lock = level_filter;
1069 Ok(())
1070 }
1071}
1072
1073#[cfg(test)]
1074mod tests {
1075 use super::*;
1076
1077 #[tokio::test]
1078 async fn test_emit_progress_none_peer_is_noop() {
1079 let peer = Arc::new(TokioMutex::new(None));
1080 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1081 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1082 let analyzer = CodeAnalyzer::new(peer, log_level_filter, rx);
1083 let token = ProgressToken(NumberOrString::String("test".into()));
1084 analyzer
1086 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1087 .await;
1088 }
1089}