1pub mod analyze;
17pub mod cache;
18pub mod completion;
19pub mod formatter;
20pub mod graph;
21pub mod lang;
22pub mod languages;
23pub mod logging;
24pub mod metrics;
25pub mod pagination;
26pub mod parser;
27pub(crate) mod schema_helpers;
28pub mod test_detection;
29pub mod traversal;
30pub mod types;
31
32use cache::AnalysisCache;
33use formatter::{
34 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
35 format_module_info, format_structure_paginated, format_summary,
36};
37use logging::LogEvent;
38use pagination::{
39 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
40};
41use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
42use rmcp::handler::server::wrapper::Parameters;
43use rmcp::model::{
44 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
45 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
46 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
47 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
48};
49use rmcp::service::{NotificationContext, RequestContext};
50use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
51use serde_json::Value;
52use std::path::Path;
53use std::sync::{Arc, Mutex};
54use tokio::sync::{Mutex as TokioMutex, mpsc};
55use tracing::{instrument, warn};
56use tracing_subscriber::filter::LevelFilter;
57use traversal::walk_directory;
58use types::{
59 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
60 AnalyzeSymbolParams,
61};
62
63const SIZE_LIMIT: usize = 50_000;
64
65pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
66 summary == Some(true) && cursor.is_some()
67}
68
69fn error_meta(
70 category: &'static str,
71 is_retryable: bool,
72 suggested_action: &'static str,
73) -> Option<serde_json::Value> {
74 Some(serde_json::json!({
75 "errorCategory": category,
76 "isRetryable": is_retryable,
77 "suggestedAction": suggested_action,
78 }))
79}
80
81fn err_to_tool_result(e: ErrorData) -> CallToolResult {
82 CallToolResult::error(vec![Content::text(e.message)])
83}
84
85fn no_cache_meta() -> Meta {
86 let mut m = serde_json::Map::new();
87 m.insert(
88 "cache_hint".to_string(),
89 serde_json::Value::String("no-cache".to_string()),
90 );
91 Meta(m)
92}
93
94fn paginate_focus_chains(
97 chains: &[graph::CallChain],
98 mode: PaginationMode,
99 offset: usize,
100 page_size: usize,
101) -> Result<(Vec<graph::CallChain>, Option<String>), ErrorData> {
102 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
103 ErrorData::new(
104 rmcp::model::ErrorCode::INTERNAL_ERROR,
105 e.to_string(),
106 error_meta("transient", true, "retry the request"),
107 )
108 })?;
109
110 if paginated.next_cursor.is_none() && offset == 0 {
111 return Ok((paginated.items, None));
112 }
113
114 let next = if let Some(raw_cursor) = paginated.next_cursor {
115 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
116 ErrorData::new(
117 rmcp::model::ErrorCode::INVALID_PARAMS,
118 e.to_string(),
119 error_meta("validation", false, "invalid cursor format"),
120 )
121 })?;
122 Some(
123 encode_cursor(&CursorData {
124 mode,
125 offset: decoded.offset,
126 })
127 .map_err(|e| {
128 ErrorData::new(
129 rmcp::model::ErrorCode::INVALID_PARAMS,
130 e.to_string(),
131 error_meta("validation", false, "invalid cursor format"),
132 )
133 })?,
134 )
135 } else {
136 None
137 };
138
139 Ok((paginated.items, next))
140}
141
142#[derive(Clone)]
143pub struct CodeAnalyzer {
144 tool_router: ToolRouter<Self>,
145 cache: AnalysisCache,
146 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
147 log_level_filter: Arc<Mutex<LevelFilter>>,
148 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
149 metrics_tx: crate::metrics::MetricsSender,
150}
151
152#[tool_router]
153impl CodeAnalyzer {
154 pub fn list_tools() -> Vec<rmcp::model::Tool> {
155 Self::tool_router().list_all()
156 }
157
158 pub fn new(
159 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
160 log_level_filter: Arc<Mutex<LevelFilter>>,
161 event_rx: mpsc::UnboundedReceiver<LogEvent>,
162 metrics_tx: crate::metrics::MetricsSender,
163 ) -> Self {
164 CodeAnalyzer {
165 tool_router: Self::tool_router(),
166 cache: AnalysisCache::new(100),
167 peer,
168 log_level_filter,
169 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
170 metrics_tx,
171 }
172 }
173
174 #[instrument(skip(self))]
175 async fn emit_progress(
176 &self,
177 peer: Option<Peer<RoleServer>>,
178 token: &ProgressToken,
179 progress: f64,
180 total: f64,
181 message: String,
182 ) {
183 if let Some(peer) = peer {
184 let notification = ServerNotification::ProgressNotification(Notification::new(
185 ProgressNotificationParam {
186 progress_token: token.clone(),
187 progress,
188 total: Some(total),
189 message: Some(message),
190 },
191 ));
192 if let Err(e) = peer.send_notification(notification).await {
193 warn!("Failed to send progress notification: {}", e);
194 }
195 }
196 }
197
198 #[instrument(skip(self, params, ct))]
202 async fn handle_overview_mode(
203 &self,
204 params: &AnalyzeDirectoryParams,
205 ct: tokio_util::sync::CancellationToken,
206 ) -> Result<analyze::AnalysisOutput, ErrorData> {
207 let path = Path::new(¶ms.path);
208 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
209 let counter_clone = counter.clone();
210 let path_owned = path.to_path_buf();
211 let max_depth = params.max_depth;
212 let ct_clone = ct.clone();
213
214 let entries = walk_directory(path, max_depth).map_err(|e| {
216 ErrorData::new(
217 rmcp::model::ErrorCode::INTERNAL_ERROR,
218 format!("Failed to walk directory: {}", e),
219 error_meta("resource", false, "check path permissions and availability"),
220 )
221 })?;
222
223 let total_files = entries.iter().filter(|e| !e.is_dir).count();
225
226 let handle = tokio::task::spawn_blocking(move || {
228 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
229 });
230
231 let token = ProgressToken(NumberOrString::String(
233 format!(
234 "analyze-overview-{}",
235 std::time::SystemTime::now()
236 .duration_since(std::time::UNIX_EPOCH)
237 .map(|d| d.as_nanos())
238 .unwrap_or(0)
239 )
240 .into(),
241 ));
242 let peer = self.peer.lock().await.clone();
243 let mut last_progress = 0usize;
244 let mut cancelled = false;
245 loop {
246 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
247 if ct.is_cancelled() {
248 cancelled = true;
249 break;
250 }
251 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
252 if current != last_progress && total_files > 0 {
253 self.emit_progress(
254 peer.clone(),
255 &token,
256 current as f64,
257 total_files as f64,
258 format!("Analyzing {}/{} files", current, total_files),
259 )
260 .await;
261 last_progress = current;
262 }
263 if handle.is_finished() {
264 break;
265 }
266 }
267
268 if !cancelled && total_files > 0 {
270 self.emit_progress(
271 peer.clone(),
272 &token,
273 total_files as f64,
274 total_files as f64,
275 format!("Completed analyzing {} files", total_files),
276 )
277 .await;
278 }
279
280 match handle.await {
281 Ok(Ok(output)) => Ok(output),
282 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
283 rmcp::model::ErrorCode::INTERNAL_ERROR,
284 "Analysis cancelled".to_string(),
285 error_meta("transient", true, "analysis was cancelled"),
286 )),
287 Ok(Err(e)) => Err(ErrorData::new(
288 rmcp::model::ErrorCode::INTERNAL_ERROR,
289 format!("Error analyzing directory: {}", e),
290 error_meta("resource", false, "check path and file permissions"),
291 )),
292 Err(e) => Err(ErrorData::new(
293 rmcp::model::ErrorCode::INTERNAL_ERROR,
294 format!("Task join error: {}", e),
295 error_meta("transient", true, "retry the request"),
296 )),
297 }
298 }
299
300 #[instrument(skip(self, params))]
303 async fn handle_file_details_mode(
304 &self,
305 params: &AnalyzeFileParams,
306 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
307 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
309 meta.modified().ok().map(|mtime| cache::CacheKey {
310 path: std::path::PathBuf::from(¶ms.path),
311 modified: mtime,
312 mode: AnalysisMode::FileDetails,
313 })
314 });
315
316 if let Some(ref key) = cache_key
318 && let Some(cached) = self.cache.get(key)
319 {
320 return Ok(cached);
321 }
322
323 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
325 Ok(output) => {
326 let arc_output = std::sync::Arc::new(output);
327 if let Some(ref key) = cache_key {
328 self.cache.put(key.clone(), arc_output.clone());
329 }
330 Ok(arc_output)
331 }
332 Err(e) => Err(ErrorData::new(
333 rmcp::model::ErrorCode::INTERNAL_ERROR,
334 format!("Error analyzing file: {}", e),
335 error_meta("resource", false, "check file path and permissions"),
336 )),
337 }
338 }
339
340 #[instrument(skip(self, params, ct))]
344 async fn handle_focused_mode(
345 &self,
346 params: &AnalyzeSymbolParams,
347 ct: tokio_util::sync::CancellationToken,
348 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
349 let follow_depth = params.follow_depth.unwrap_or(1);
350 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
351 let counter_clone = counter.clone();
352 let path = Path::new(¶ms.path);
353 let path_owned = path.to_path_buf();
354 let max_depth = params.max_depth;
355 let symbol_owned = params.symbol.clone();
356 let match_mode = params.match_mode.clone().unwrap_or_default();
357 let ast_recursion_limit = params.ast_recursion_limit;
358 let ct_clone = ct.clone();
359
360 let use_summary_for_task = params.output_control.force != Some(true)
362 && params.output_control.summary == Some(true);
363
364 let total_files = match walk_directory(path, max_depth) {
366 Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
367 Err(_) => 0,
368 };
369
370 let handle = tokio::task::spawn_blocking(move || {
372 analyze::analyze_focused_with_progress(
373 &path_owned,
374 &symbol_owned,
375 match_mode,
376 follow_depth,
377 max_depth,
378 ast_recursion_limit,
379 counter_clone,
380 ct_clone,
381 use_summary_for_task,
382 )
383 });
384
385 let token = ProgressToken(NumberOrString::String(
387 format!(
388 "analyze-symbol-{}",
389 std::time::SystemTime::now()
390 .duration_since(std::time::UNIX_EPOCH)
391 .map(|d| d.as_nanos())
392 .unwrap_or(0)
393 )
394 .into(),
395 ));
396 let peer = self.peer.lock().await.clone();
397 let mut last_progress = 0usize;
398 let mut cancelled = false;
399 loop {
400 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
401 if ct.is_cancelled() {
402 cancelled = true;
403 break;
404 }
405 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
406 if current != last_progress && total_files > 0 {
407 self.emit_progress(
408 peer.clone(),
409 &token,
410 current as f64,
411 total_files as f64,
412 format!(
413 "Analyzing {}/{} files for symbol '{}'",
414 current, total_files, params.symbol
415 ),
416 )
417 .await;
418 last_progress = current;
419 }
420 if handle.is_finished() {
421 break;
422 }
423 }
424
425 if !cancelled && total_files > 0 {
427 self.emit_progress(
428 peer.clone(),
429 &token,
430 total_files as f64,
431 total_files as f64,
432 format!(
433 "Completed analyzing {} files for symbol '{}'",
434 total_files, params.symbol
435 ),
436 )
437 .await;
438 }
439
440 let mut output = match handle.await {
441 Ok(Ok(output)) => output,
442 Ok(Err(analyze::AnalyzeError::Cancelled)) => {
443 return Err(ErrorData::new(
444 rmcp::model::ErrorCode::INTERNAL_ERROR,
445 "Analysis cancelled".to_string(),
446 error_meta("transient", true, "analysis was cancelled"),
447 ));
448 }
449 Ok(Err(e)) => {
450 return Err(ErrorData::new(
451 rmcp::model::ErrorCode::INTERNAL_ERROR,
452 format!("Error analyzing symbol: {}", e),
453 error_meta("resource", false, "check symbol name and file"),
454 ));
455 }
456 Err(e) => {
457 return Err(ErrorData::new(
458 rmcp::model::ErrorCode::INTERNAL_ERROR,
459 format!("Task join error: {}", e),
460 error_meta("transient", true, "retry the request"),
461 ));
462 }
463 };
464
465 if params.output_control.summary.is_none()
468 && params.output_control.force != Some(true)
469 && output.formatted.len() > SIZE_LIMIT
470 {
471 let path_owned2 = Path::new(¶ms.path).to_path_buf();
472 let symbol_owned2 = params.symbol.clone();
473 let match_mode2 = params.match_mode.clone().unwrap_or_default();
474 let follow_depth2 = params.follow_depth.unwrap_or(1);
475 let max_depth2 = params.max_depth;
476 let ast_recursion_limit2 = params.ast_recursion_limit;
477 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
478 let ct2 = ct.clone();
479 let summary_result = tokio::task::spawn_blocking(move || {
480 analyze::analyze_focused_with_progress(
481 &path_owned2,
482 &symbol_owned2,
483 match_mode2,
484 follow_depth2,
485 max_depth2,
486 ast_recursion_limit2,
487 counter2,
488 ct2,
489 true, )
491 })
492 .await;
493 match summary_result {
494 Ok(Ok(summary_output)) => {
495 output.formatted = summary_output.formatted;
496 }
497 _ => {
498 let estimated_tokens = output.formatted.len() / 4;
500 let message = format!(
501 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
502 output.formatted.len(),
503 estimated_tokens
504 );
505 return Err(ErrorData::new(
506 rmcp::model::ErrorCode::INVALID_PARAMS,
507 message,
508 error_meta("validation", false, "use summary=true or force=true"),
509 ));
510 }
511 }
512 } else if output.formatted.len() > SIZE_LIMIT
513 && params.output_control.force != Some(true)
514 && params.output_control.summary == Some(false)
515 {
516 let estimated_tokens = output.formatted.len() / 4;
518 let message = format!(
519 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
520 - force=true to return full output\n\
521 - summary=true to get compact summary\n\
522 - Narrow your scope (smaller directory, specific file)",
523 output.formatted.len(),
524 estimated_tokens
525 );
526 return Err(ErrorData::new(
527 rmcp::model::ErrorCode::INVALID_PARAMS,
528 message,
529 error_meta(
530 "validation",
531 false,
532 "use force=true, summary=true, or narrow scope",
533 ),
534 ));
535 }
536
537 Ok(output)
538 }
539
540 #[instrument(skip(self, context))]
541 #[tool(
542 name = "analyze_directory",
543 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
544 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
545 annotations(
546 title = "Analyze Directory",
547 read_only_hint = true,
548 destructive_hint = false,
549 idempotent_hint = true,
550 open_world_hint = false
551 )
552 )]
553 async fn analyze_directory(
554 &self,
555 params: Parameters<AnalyzeDirectoryParams>,
556 context: RequestContext<RoleServer>,
557 ) -> Result<CallToolResult, ErrorData> {
558 let params = params.0;
559 let ct = context.ct.clone();
560 let _t_start = std::time::Instant::now();
561 let _param_path = params.path.clone();
562 let _max_depth_val = params.max_depth;
563
564 let mut output = match self.handle_overview_mode(¶ms, ct).await {
566 Ok(v) => v,
567 Err(e) => return Ok(err_to_tool_result(e)),
568 };
569
570 if summary_cursor_conflict(
573 params.output_control.summary,
574 params.pagination.cursor.as_deref(),
575 ) {
576 return Ok(err_to_tool_result(ErrorData::new(
577 rmcp::model::ErrorCode::INVALID_PARAMS,
578 "summary=true is incompatible with a pagination cursor; use one or the other"
579 .to_string(),
580 error_meta("validation", false, "remove cursor or set summary=false"),
581 )));
582 }
583
584 let use_summary = if params.output_control.force == Some(true) {
586 false
587 } else if params.output_control.summary == Some(true) {
588 true
589 } else if params.output_control.summary == Some(false) {
590 false
591 } else {
592 output.formatted.len() > SIZE_LIMIT
593 };
594
595 if use_summary {
596 output.formatted = format_summary(
597 &output.entries,
598 &output.files,
599 params.max_depth,
600 Some(Path::new(¶ms.path)),
601 );
602 }
603
604 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
606 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
607 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
608 ErrorData::new(
609 rmcp::model::ErrorCode::INVALID_PARAMS,
610 e.to_string(),
611 error_meta("validation", false, "invalid cursor format"),
612 )
613 }) {
614 Ok(v) => v,
615 Err(e) => return Ok(err_to_tool_result(e)),
616 };
617 cursor_data.offset
618 } else {
619 0
620 };
621
622 let paginated =
624 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
625 Ok(v) => v,
626 Err(e) => {
627 return Ok(err_to_tool_result(ErrorData::new(
628 rmcp::model::ErrorCode::INTERNAL_ERROR,
629 e.to_string(),
630 error_meta("transient", true, "retry the request"),
631 )));
632 }
633 };
634
635 let verbose = params.output_control.verbose.unwrap_or(false);
636 if !use_summary {
637 output.formatted = format_structure_paginated(
638 &paginated.items,
639 paginated.total,
640 params.max_depth,
641 Some(Path::new(¶ms.path)),
642 verbose,
643 );
644 }
645
646 if use_summary {
648 output.next_cursor = None;
649 } else {
650 output.next_cursor = paginated.next_cursor.clone();
651 }
652
653 let mut final_text = output.formatted.clone();
655 if !use_summary && let Some(cursor) = paginated.next_cursor {
656 final_text.push('\n');
657 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
658 }
659
660 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
661 .with_meta(Some(no_cache_meta()));
662 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
663 result.structured_content = Some(structured);
664 let _dur = _t_start.elapsed().as_millis() as u64;
665 self.metrics_tx.send(crate::metrics::MetricEvent {
666 ts: crate::metrics::unix_ms(),
667 tool: "analyze_directory",
668 duration_ms: _dur,
669 output_chars: final_text.chars().count(),
670 param_path_depth: crate::metrics::path_component_count(&_param_path),
671 max_depth: _max_depth_val,
672 result: "ok",
673 error_type: None,
674 });
675 Ok(result)
676 }
677
678 #[instrument(skip(self, context))]
679 #[tool(
680 name = "analyze_file",
681 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py",
682 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
683 annotations(
684 title = "Analyze File",
685 read_only_hint = true,
686 destructive_hint = false,
687 idempotent_hint = true,
688 open_world_hint = false
689 )
690 )]
691 async fn analyze_file(
692 &self,
693 params: Parameters<AnalyzeFileParams>,
694 context: RequestContext<RoleServer>,
695 ) -> Result<CallToolResult, ErrorData> {
696 let params = params.0;
697 let _ct = context.ct.clone();
698 let _t_start = std::time::Instant::now();
699 let _param_path = params.path.clone();
700
701 let arc_output = match self.handle_file_details_mode(¶ms).await {
703 Ok(v) => v,
704 Err(e) => return Ok(err_to_tool_result(e)),
705 };
706
707 let mut formatted = arc_output.formatted.clone();
711 let line_count = arc_output.line_count;
712
713 let use_summary = if params.output_control.force == Some(true) {
715 false
716 } else if params.output_control.summary == Some(true) {
717 true
718 } else if params.output_control.summary == Some(false) {
719 false
720 } else {
721 formatted.len() > SIZE_LIMIT
722 };
723
724 if use_summary {
725 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
726 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
727 let estimated_tokens = formatted.len() / 4;
728 let message = format!(
729 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
730 - force=true to return full output\n\
731 - Narrow your scope (smaller directory, specific file)\n\
732 - Use analyze_symbol mode for targeted analysis\n\
733 - Reduce max_depth parameter",
734 formatted.len(),
735 estimated_tokens
736 );
737 return Ok(err_to_tool_result(ErrorData::new(
738 rmcp::model::ErrorCode::INVALID_PARAMS,
739 message,
740 error_meta("validation", false, "use force=true or narrow scope"),
741 )));
742 }
743
744 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
746 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
747 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
748 ErrorData::new(
749 rmcp::model::ErrorCode::INVALID_PARAMS,
750 e.to_string(),
751 error_meta("validation", false, "invalid cursor format"),
752 )
753 }) {
754 Ok(v) => v,
755 Err(e) => return Ok(err_to_tool_result(e)),
756 };
757 cursor_data.offset
758 } else {
759 0
760 };
761
762 let paginated = match paginate_slice(
764 &arc_output.semantic.functions,
765 offset,
766 page_size,
767 PaginationMode::Default,
768 ) {
769 Ok(v) => v,
770 Err(e) => {
771 return Ok(err_to_tool_result(ErrorData::new(
772 rmcp::model::ErrorCode::INTERNAL_ERROR,
773 e.to_string(),
774 error_meta("transient", true, "retry the request"),
775 )));
776 }
777 };
778
779 let verbose = params.output_control.verbose.unwrap_or(false);
781 if !use_summary {
782 formatted = format_file_details_paginated(
783 &paginated.items,
784 paginated.total,
785 &arc_output.semantic,
786 ¶ms.path,
787 line_count,
788 offset,
789 verbose,
790 );
791 }
792
793 let next_cursor = if use_summary {
795 None
796 } else {
797 paginated.next_cursor.clone()
798 };
799
800 let mut final_text = formatted.clone();
802 if !use_summary && let Some(ref cursor) = next_cursor {
803 final_text.push('\n');
804 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
805 }
806
807 let response_output = analyze::FileAnalysisOutput {
809 formatted,
810 semantic: arc_output.semantic.clone(),
811 line_count,
812 next_cursor,
813 };
814
815 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
816 .with_meta(Some(no_cache_meta()));
817 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
818 result.structured_content = Some(structured);
819 let _dur = _t_start.elapsed().as_millis() as u64;
820 self.metrics_tx.send(crate::metrics::MetricEvent {
821 ts: crate::metrics::unix_ms(),
822 tool: "analyze_file",
823 duration_ms: _dur,
824 output_chars: final_text.chars().count(),
825 param_path_depth: crate::metrics::path_component_count(&_param_path),
826 max_depth: None,
827 result: "ok",
828 error_type: None,
829 });
830 Ok(result)
831 }
832
833 #[instrument(skip(self, context))]
834 #[tool(
835 name = "analyze_symbol",
836 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep",
837 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
838 annotations(
839 title = "Analyze Symbol",
840 read_only_hint = true,
841 destructive_hint = false,
842 idempotent_hint = true,
843 open_world_hint = false
844 )
845 )]
846 async fn analyze_symbol(
847 &self,
848 params: Parameters<AnalyzeSymbolParams>,
849 context: RequestContext<RoleServer>,
850 ) -> Result<CallToolResult, ErrorData> {
851 let params = params.0;
852 let ct = context.ct.clone();
853 let _t_start = std::time::Instant::now();
854 let _param_path = params.path.clone();
855 let _max_depth_val = params.follow_depth;
856
857 let mut output = match self.handle_focused_mode(¶ms, ct).await {
859 Ok(v) => v,
860 Err(e) => return Ok(err_to_tool_result(e)),
861 };
862
863 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
865 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
866 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
867 ErrorData::new(
868 rmcp::model::ErrorCode::INVALID_PARAMS,
869 e.to_string(),
870 error_meta("validation", false, "invalid cursor format"),
871 )
872 }) {
873 Ok(v) => v,
874 Err(e) => return Ok(err_to_tool_result(e)),
875 };
876 cursor_data.offset
877 } else {
878 0
879 };
880
881 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
883 decode_cursor(cursor_str)
884 .map(|c| c.mode)
885 .unwrap_or(PaginationMode::Callers)
886 } else {
887 PaginationMode::Callers
888 };
889
890 let paginated_next_cursor = match cursor_mode {
891 PaginationMode::Callers => {
892 let (paginated_items, paginated_next) = match paginate_focus_chains(
893 &output.prod_chains,
894 PaginationMode::Callers,
895 offset,
896 page_size,
897 ) {
898 Ok(v) => v,
899 Err(e) => return Ok(err_to_tool_result(e)),
900 };
901
902 let verbose = params.output_control.verbose.unwrap_or(false);
903 if paginated_next.is_some() || offset > 0 || !verbose {
904 let base_path = Path::new(¶ms.path);
905 output.formatted = format_focused_paginated(
906 &paginated_items,
907 output.prod_chains.len(),
908 PaginationMode::Callers,
909 ¶ms.symbol,
910 &output.prod_chains,
911 &output.test_chains,
912 &output.outgoing_chains,
913 output.def_count,
914 offset,
915 Some(base_path),
916 verbose,
917 );
918 paginated_next
919 } else {
920 None
921 }
922 }
923 PaginationMode::Callees => {
924 let (paginated_items, paginated_next) = match paginate_focus_chains(
925 &output.outgoing_chains,
926 PaginationMode::Callees,
927 offset,
928 page_size,
929 ) {
930 Ok(v) => v,
931 Err(e) => return Ok(err_to_tool_result(e)),
932 };
933
934 let verbose = params.output_control.verbose.unwrap_or(false);
935 if paginated_next.is_some() || offset > 0 || !verbose {
936 let base_path = Path::new(¶ms.path);
937 output.formatted = format_focused_paginated(
938 &paginated_items,
939 output.outgoing_chains.len(),
940 PaginationMode::Callees,
941 ¶ms.symbol,
942 &output.prod_chains,
943 &output.test_chains,
944 &output.outgoing_chains,
945 output.def_count,
946 offset,
947 Some(base_path),
948 verbose,
949 );
950 paginated_next
951 } else {
952 None
953 }
954 }
955 PaginationMode::Default => {
956 unreachable!("SymbolFocus should only use Callers or Callees modes")
957 }
958 };
959
960 let mut final_text = output.formatted.clone();
962 if let Some(cursor) = paginated_next_cursor {
963 final_text.push('\n');
964 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
965 }
966
967 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
968 .with_meta(Some(no_cache_meta()));
969 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
970 result.structured_content = Some(structured);
971 let _dur = _t_start.elapsed().as_millis() as u64;
972 self.metrics_tx.send(crate::metrics::MetricEvent {
973 ts: crate::metrics::unix_ms(),
974 tool: "analyze_symbol",
975 duration_ms: _dur,
976 output_chars: final_text.chars().count(),
977 param_path_depth: crate::metrics::path_component_count(&_param_path),
978 max_depth: _max_depth_val,
979 result: "ok",
980 error_type: None,
981 });
982 Ok(result)
983 }
984
985 #[instrument(skip(self))]
986 #[tool(
987 name = "analyze_module",
988 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
989 output_schema = schema_for_type::<types::ModuleInfo>(),
990 annotations(
991 title = "Analyze Module",
992 read_only_hint = true,
993 destructive_hint = false,
994 idempotent_hint = true,
995 open_world_hint = false
996 )
997 )]
998 async fn analyze_module(
999 &self,
1000 params: Parameters<AnalyzeModuleParams>,
1001 _context: RequestContext<RoleServer>,
1002 ) -> Result<CallToolResult, ErrorData> {
1003 let params = params.0;
1004 let _t_start = std::time::Instant::now();
1005 let _param_path = params.path.clone();
1006
1007 if std::fs::metadata(¶ms.path)
1009 .map(|m| m.is_dir())
1010 .unwrap_or(false)
1011 {
1012 let _dur = _t_start.elapsed().as_millis() as u64;
1013 self.metrics_tx.send(crate::metrics::MetricEvent {
1014 ts: crate::metrics::unix_ms(),
1015 tool: "analyze_module",
1016 duration_ms: _dur,
1017 output_chars: 0,
1018 param_path_depth: crate::metrics::path_component_count(&_param_path),
1019 max_depth: None,
1020 result: "error",
1021 error_type: Some("invalid_params".to_string()),
1022 });
1023 return Ok(err_to_tool_result(ErrorData::new(
1024 rmcp::model::ErrorCode::INVALID_PARAMS,
1025 format!(
1026 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1027 params.path
1028 ),
1029 error_meta("validation", false, "use analyze_directory for directories"),
1030 )));
1031 }
1032
1033 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1034 ErrorData::new(
1035 rmcp::model::ErrorCode::INVALID_PARAMS,
1036 format!("Failed to analyze module: {}", e),
1037 error_meta(
1038 "validation",
1039 false,
1040 "ensure file exists, is readable, and has a supported extension",
1041 ),
1042 )
1043 }) {
1044 Ok(v) => v,
1045 Err(e) => return Ok(err_to_tool_result(e)),
1046 };
1047
1048 let text = format_module_info(&module_info);
1049 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1050 .with_meta(Some(no_cache_meta()));
1051 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1052 ErrorData::new(
1053 rmcp::model::ErrorCode::INTERNAL_ERROR,
1054 format!("serialization failed: {}", e),
1055 error_meta("internal", false, "report this as a bug"),
1056 )
1057 }) {
1058 Ok(v) => v,
1059 Err(e) => return Ok(err_to_tool_result(e)),
1060 };
1061 result.structured_content = Some(structured);
1062 let _dur = _t_start.elapsed().as_millis() as u64;
1063 self.metrics_tx.send(crate::metrics::MetricEvent {
1064 ts: crate::metrics::unix_ms(),
1065 tool: "analyze_module",
1066 duration_ms: _dur,
1067 output_chars: text.chars().count(),
1068 param_path_depth: crate::metrics::path_component_count(&_param_path),
1069 max_depth: None,
1070 result: "ok",
1071 error_type: None,
1072 });
1073 Ok(result)
1074 }
1075}
1076
1077#[tool_handler]
1078impl ServerHandler for CodeAnalyzer {
1079 fn get_info(&self) -> InitializeResult {
1080 let excluded = crate::formatter::EXCLUDED_DIRS.join(", ");
1081 let instructions = format!(
1082 "Recommended workflow for unknown repositories:\n\
1083 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1084 (typically the largest directory by file count; exclude {excluded}).\n\
1085 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts.\n\
1086 3. Use analyze_file on key files identified in step 2 (prefer files with high class counts for framework entry points).\n\
1087 4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1088 Use analyze_module for a minimal schema (name, line count, functions, imports) when token budget is critical. \
1089 Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1090 Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1091 );
1092 let capabilities = ServerCapabilities::builder()
1093 .enable_logging()
1094 .enable_tools()
1095 .enable_tool_list_changed()
1096 .enable_completions()
1097 .build();
1098 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1099 .with_title("Code Analyze MCP")
1100 .with_description("MCP server for code structure analysis using tree-sitter");
1101 InitializeResult::new(capabilities)
1102 .with_server_info(server_info)
1103 .with_instructions(&instructions)
1104 }
1105
1106 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1107 let mut peer_lock = self.peer.lock().await;
1108 *peer_lock = Some(context.peer.clone());
1109 drop(peer_lock);
1110
1111 let peer = self.peer.clone();
1113 let event_rx = self.event_rx.clone();
1114
1115 tokio::spawn(async move {
1116 let rx = {
1117 let mut rx_lock = event_rx.lock().await;
1118 rx_lock.take()
1119 };
1120
1121 if let Some(mut receiver) = rx {
1122 let mut buffer = Vec::with_capacity(64);
1123 loop {
1124 receiver.recv_many(&mut buffer, 64).await;
1126
1127 if buffer.is_empty() {
1128 break;
1130 }
1131
1132 let peer_lock = peer.lock().await;
1134 if let Some(peer) = peer_lock.as_ref() {
1135 for log_event in buffer.drain(..) {
1136 let notification = ServerNotification::LoggingMessageNotification(
1137 Notification::new(LoggingMessageNotificationParam {
1138 level: log_event.level,
1139 logger: Some(log_event.logger),
1140 data: log_event.data,
1141 }),
1142 );
1143 if let Err(e) = peer.send_notification(notification).await {
1144 warn!("Failed to send logging notification: {}", e);
1145 }
1146 }
1147 }
1148 }
1149 }
1150 });
1151 }
1152
1153 #[instrument(skip(self, _context))]
1154 async fn on_cancelled(
1155 &self,
1156 notification: CancelledNotificationParam,
1157 _context: NotificationContext<RoleServer>,
1158 ) {
1159 tracing::info!(
1160 request_id = ?notification.request_id,
1161 reason = ?notification.reason,
1162 "Received cancellation notification"
1163 );
1164 }
1165
1166 #[instrument(skip(self, _context))]
1167 async fn complete(
1168 &self,
1169 request: CompleteRequestParams,
1170 _context: RequestContext<RoleServer>,
1171 ) -> Result<CompleteResult, ErrorData> {
1172 let argument_name = &request.argument.name;
1174 let argument_value = &request.argument.value;
1175
1176 let completions = match argument_name.as_str() {
1177 "path" => {
1178 let root = Path::new(".");
1180 completion::path_completions(root, argument_value)
1181 }
1182 "symbol" => {
1183 let path_arg = request
1185 .context
1186 .as_ref()
1187 .and_then(|ctx| ctx.get_argument("path"));
1188
1189 match path_arg {
1190 Some(path_str) => {
1191 let path = Path::new(path_str);
1192 completion::symbol_completions(&self.cache, path, argument_value)
1193 }
1194 None => Vec::new(),
1195 }
1196 }
1197 _ => Vec::new(),
1198 };
1199
1200 let total_count = completions.len() as u32;
1202 let (values, has_more) = if completions.len() > 100 {
1203 (completions.into_iter().take(100).collect(), true)
1204 } else {
1205 (completions, false)
1206 };
1207
1208 let completion_info =
1209 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1210 Ok(info) => info,
1211 Err(_) => {
1212 CompletionInfo::with_all_values(Vec::new())
1214 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1215 }
1216 };
1217
1218 Ok(CompleteResult::new(completion_info))
1219 }
1220
1221 async fn set_level(
1222 &self,
1223 params: SetLevelRequestParams,
1224 _context: RequestContext<RoleServer>,
1225 ) -> Result<(), ErrorData> {
1226 let level_filter = match params.level {
1227 LoggingLevel::Debug => LevelFilter::DEBUG,
1228 LoggingLevel::Info => LevelFilter::INFO,
1229 LoggingLevel::Notice => LevelFilter::INFO,
1230 LoggingLevel::Warning => LevelFilter::WARN,
1231 LoggingLevel::Error => LevelFilter::ERROR,
1232 LoggingLevel::Critical => LevelFilter::ERROR,
1233 LoggingLevel::Alert => LevelFilter::ERROR,
1234 LoggingLevel::Emergency => LevelFilter::ERROR,
1235 };
1236
1237 let mut filter_lock = self.log_level_filter.lock().unwrap();
1238 *filter_lock = level_filter;
1239 Ok(())
1240 }
1241}
1242
1243#[cfg(test)]
1244mod tests {
1245 use super::*;
1246
1247 #[tokio::test]
1248 async fn test_emit_progress_none_peer_is_noop() {
1249 let peer = Arc::new(TokioMutex::new(None));
1250 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1251 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1252 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1253 let analyzer = CodeAnalyzer::new(
1254 peer,
1255 log_level_filter,
1256 rx,
1257 crate::metrics::MetricsSender(metrics_tx),
1258 );
1259 let token = ProgressToken(NumberOrString::String("test".into()));
1260 analyzer
1262 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1263 .await;
1264 }
1265
1266 #[tokio::test]
1267 async fn test_handle_overview_mode_verbose_no_summary_block() {
1268 use crate::pagination::{PaginationMode, paginate_slice};
1269 use crate::types::{AnalyzeDirectoryParams, OutputControlParams, PaginationParams};
1270 use tempfile::TempDir;
1271
1272 let tmp = TempDir::new().unwrap();
1273 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1274
1275 let peer = Arc::new(TokioMutex::new(None));
1276 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1277 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1278 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1279 let analyzer = CodeAnalyzer::new(
1280 peer,
1281 log_level_filter,
1282 rx,
1283 crate::metrics::MetricsSender(metrics_tx),
1284 );
1285
1286 let params = AnalyzeDirectoryParams {
1287 path: tmp.path().to_str().unwrap().to_string(),
1288 max_depth: None,
1289 pagination: PaginationParams {
1290 cursor: None,
1291 page_size: None,
1292 },
1293 output_control: OutputControlParams {
1294 summary: None,
1295 force: None,
1296 verbose: Some(true),
1297 },
1298 };
1299
1300 let ct = tokio_util::sync::CancellationToken::new();
1301 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1302
1303 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1306 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1307 let verbose = true;
1308 let formatted = if !use_summary {
1309 format_structure_paginated(
1310 &paginated.items,
1311 paginated.total,
1312 params.max_depth,
1313 Some(std::path::Path::new(¶ms.path)),
1314 verbose,
1315 )
1316 } else {
1317 output.formatted.clone()
1318 };
1319
1320 assert!(
1322 !formatted.contains("SUMMARY:"),
1323 "verbose=true must not emit SUMMARY: block; got: {}",
1324 &formatted[..formatted.len().min(300)]
1325 );
1326 assert!(
1327 formatted.contains("PAGINATED:"),
1328 "verbose=true must emit PAGINATED: header"
1329 );
1330 assert!(
1331 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1332 "verbose=true must emit FILES section header"
1333 );
1334 }
1335}