1pub mod logging;
20pub mod metrics;
21
22pub use code_analyze_core::analyze;
23use code_analyze_core::{cache, completion, graph, traversal, types};
24
25pub(crate) const EXCLUDED_DIRS: &[&str] = &[
26 "node_modules",
27 "vendor",
28 ".git",
29 "__pycache__",
30 "target",
31 "dist",
32 "build",
33 ".venv",
34];
35
36use code_analyze_core::cache::AnalysisCache;
37use code_analyze_core::formatter::{
38 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
39 format_module_info, format_structure_paginated, format_summary,
40};
41use code_analyze_core::pagination::{
42 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
43};
44use code_analyze_core::traversal::{WalkEntry, walk_directory};
45use code_analyze_core::types::{
46 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
47 AnalyzeSymbolParams, SymbolMatchMode,
48};
49use logging::LogEvent;
50use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
51use rmcp::handler::server::wrapper::Parameters;
52use rmcp::model::{
53 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
54 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
55 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
56 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
57};
58use rmcp::service::{NotificationContext, RequestContext};
59use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
60use serde_json::Value;
61use std::path::Path;
62use std::sync::{Arc, Mutex};
63use tokio::sync::{Mutex as TokioMutex, mpsc};
64use tracing::{instrument, warn};
65use tracing_subscriber::filter::LevelFilter;
66
67static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
68
69const SIZE_LIMIT: usize = 50_000;
70
71#[must_use]
74pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
75 summary == Some(true) && cursor.is_some()
76}
77
78#[must_use]
79fn error_meta(
80 category: &'static str,
81 is_retryable: bool,
82 suggested_action: &'static str,
83) -> serde_json::Value {
84 serde_json::json!({
85 "errorCategory": category,
86 "isRetryable": is_retryable,
87 "suggestedAction": suggested_action,
88 })
89}
90
91#[must_use]
92fn err_to_tool_result(e: ErrorData) -> CallToolResult {
93 CallToolResult::error(vec![Content::text(e.message)])
94}
95
96fn no_cache_meta() -> Meta {
97 let mut m = serde_json::Map::new();
98 m.insert(
99 "cache_hint".to_string(),
100 serde_json::Value::String("no-cache".to_string()),
101 );
102 Meta(m)
103}
104
105fn paginate_focus_chains(
108 chains: &[graph::InternalCallChain],
109 mode: PaginationMode,
110 offset: usize,
111 page_size: usize,
112) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
113 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
114 ErrorData::new(
115 rmcp::model::ErrorCode::INTERNAL_ERROR,
116 e.to_string(),
117 Some(error_meta("transient", true, "retry the request")),
118 )
119 })?;
120
121 if paginated.next_cursor.is_none() && offset == 0 {
122 return Ok((paginated.items, None));
123 }
124
125 let next = if let Some(raw_cursor) = paginated.next_cursor {
126 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
127 ErrorData::new(
128 rmcp::model::ErrorCode::INVALID_PARAMS,
129 e.to_string(),
130 Some(error_meta("validation", false, "invalid cursor format")),
131 )
132 })?;
133 Some(
134 encode_cursor(&CursorData {
135 mode,
136 offset: decoded.offset,
137 })
138 .map_err(|e| {
139 ErrorData::new(
140 rmcp::model::ErrorCode::INVALID_PARAMS,
141 e.to_string(),
142 Some(error_meta("validation", false, "invalid cursor format")),
143 )
144 })?,
145 )
146 } else {
147 None
148 };
149
150 Ok((paginated.items, next))
151}
152
153#[derive(Clone)]
158pub struct CodeAnalyzer {
159 tool_router: ToolRouter<Self>,
160 cache: AnalysisCache,
161 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
162 log_level_filter: Arc<Mutex<LevelFilter>>,
163 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
164 metrics_tx: crate::metrics::MetricsSender,
165 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
166 session_id: Arc<TokioMutex<Option<String>>>,
167}
168
169#[tool_router]
170impl CodeAnalyzer {
171 #[must_use]
172 pub fn list_tools() -> Vec<rmcp::model::Tool> {
173 Self::tool_router().list_all()
174 }
175
176 pub fn new(
177 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
178 log_level_filter: Arc<Mutex<LevelFilter>>,
179 event_rx: mpsc::UnboundedReceiver<LogEvent>,
180 metrics_tx: crate::metrics::MetricsSender,
181 ) -> Self {
182 CodeAnalyzer {
183 tool_router: Self::tool_router(),
184 cache: AnalysisCache::new(100),
185 peer,
186 log_level_filter,
187 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
188 metrics_tx,
189 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
190 session_id: Arc::new(TokioMutex::new(None)),
191 }
192 }
193
194 #[instrument(skip(self))]
195 async fn emit_progress(
196 &self,
197 peer: Option<Peer<RoleServer>>,
198 token: &ProgressToken,
199 progress: f64,
200 total: f64,
201 message: String,
202 ) {
203 if let Some(peer) = peer {
204 let notification = ServerNotification::ProgressNotification(Notification::new(
205 ProgressNotificationParam {
206 progress_token: token.clone(),
207 progress,
208 total: Some(total),
209 message: Some(message),
210 },
211 ));
212 if let Err(e) = peer.send_notification(notification).await {
213 warn!("Failed to send progress notification: {}", e);
214 }
215 }
216 }
217
218 #[allow(clippy::too_many_lines)] #[allow(clippy::cast_precision_loss)] #[instrument(skip(self, params, ct))]
224 async fn handle_overview_mode(
225 &self,
226 params: &AnalyzeDirectoryParams,
227 ct: tokio_util::sync::CancellationToken,
228 ) -> Result<std::sync::Arc<analyze::AnalysisOutput>, ErrorData> {
229 let path = Path::new(¶ms.path);
230 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
231 let counter_clone = counter.clone();
232 let path_owned = path.to_path_buf();
233 let max_depth = params.max_depth;
234 let ct_clone = ct.clone();
235
236 let all_entries = walk_directory(path, None).map_err(|e| {
238 ErrorData::new(
239 rmcp::model::ErrorCode::INTERNAL_ERROR,
240 format!("Failed to walk directory: {e}"),
241 Some(error_meta(
242 "resource",
243 false,
244 "check path permissions and availability",
245 )),
246 )
247 })?;
248
249 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
251
252 let cache_key = cache::DirectoryCacheKey::from_entries(
254 &all_entries,
255 canonical_max_depth,
256 AnalysisMode::Overview,
257 );
258
259 if let Some(cached) = self.cache.get_directory(&cache_key) {
261 return Ok(cached);
262 }
263
264 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
266 Some(traversal::subtree_counts_from_entries(path, &all_entries))
267 } else {
268 None
269 };
270
271 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
273 && depth > 0
274 {
275 all_entries
276 .into_iter()
277 .filter(|e| e.depth <= depth as usize)
278 .collect()
279 } else {
280 all_entries
281 };
282
283 let total_files = entries.iter().filter(|e| !e.is_dir).count();
285
286 let handle = tokio::task::spawn_blocking(move || {
288 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
289 });
290
291 let token = ProgressToken(NumberOrString::String(
293 format!(
294 "analyze-overview-{}",
295 std::time::SystemTime::now()
296 .duration_since(std::time::UNIX_EPOCH)
297 .map(|d| d.as_nanos())
298 .unwrap_or(0)
299 )
300 .into(),
301 ));
302 let peer = self.peer.lock().await.clone();
303 let mut last_progress = 0usize;
304 let mut cancelled = false;
305 loop {
306 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
307 if ct.is_cancelled() {
308 cancelled = true;
309 break;
310 }
311 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
312 if current != last_progress && total_files > 0 {
313 self.emit_progress(
314 peer.clone(),
315 &token,
316 current as f64,
317 total_files as f64,
318 format!("Analyzing {current}/{total_files} files"),
319 )
320 .await;
321 last_progress = current;
322 }
323 if handle.is_finished() {
324 break;
325 }
326 }
327
328 if !cancelled && total_files > 0 {
330 self.emit_progress(
331 peer.clone(),
332 &token,
333 total_files as f64,
334 total_files as f64,
335 format!("Completed analyzing {total_files} files"),
336 )
337 .await;
338 }
339
340 match handle.await {
341 Ok(Ok(mut output)) => {
342 output.subtree_counts = subtree_counts;
343 let arc_output = std::sync::Arc::new(output);
344 self.cache.put_directory(cache_key, arc_output.clone());
345 Ok(arc_output)
346 }
347 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
348 rmcp::model::ErrorCode::INTERNAL_ERROR,
349 "Analysis cancelled".to_string(),
350 Some(error_meta("transient", true, "analysis was cancelled")),
351 )),
352 Ok(Err(e)) => Err(ErrorData::new(
353 rmcp::model::ErrorCode::INTERNAL_ERROR,
354 format!("Error analyzing directory: {e}"),
355 Some(error_meta(
356 "resource",
357 false,
358 "check path and file permissions",
359 )),
360 )),
361 Err(e) => Err(ErrorData::new(
362 rmcp::model::ErrorCode::INTERNAL_ERROR,
363 format!("Task join error: {e}"),
364 Some(error_meta("transient", true, "retry the request")),
365 )),
366 }
367 }
368
369 #[instrument(skip(self, params))]
372 async fn handle_file_details_mode(
373 &self,
374 params: &AnalyzeFileParams,
375 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
376 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
378 meta.modified().ok().map(|mtime| cache::CacheKey {
379 path: std::path::PathBuf::from(¶ms.path),
380 modified: mtime,
381 mode: AnalysisMode::FileDetails,
382 })
383 });
384
385 if let Some(ref key) = cache_key
387 && let Some(cached) = self.cache.get(key)
388 {
389 return Ok(cached);
390 }
391
392 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
394 Ok(output) => {
395 let arc_output = std::sync::Arc::new(output);
396 if let Some(key) = cache_key {
397 self.cache.put(key, arc_output.clone());
398 }
399 Ok(arc_output)
400 }
401 Err(e) => Err(ErrorData::new(
402 rmcp::model::ErrorCode::INTERNAL_ERROR,
403 format!("Error analyzing file: {e}"),
404 Some(error_meta(
405 "resource",
406 false,
407 "check file path and permissions",
408 )),
409 )),
410 }
411 }
412
413 fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
415 let has_rust = entries.iter().any(|e| {
416 !e.is_dir
417 && e.path
418 .extension()
419 .and_then(|x: &std::ffi::OsStr| x.to_str())
420 == Some("rs")
421 });
422
423 if !has_rust {
424 return Err(ErrorData::new(
425 rmcp::model::ErrorCode::INVALID_PARAMS,
426 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
427 Some(error_meta(
428 "validation",
429 false,
430 "remove impl_only or point to a directory containing .rs files",
431 )),
432 ));
433 }
434 Ok(())
435 }
436
437 #[allow(clippy::cast_precision_loss)] async fn poll_progress_until_done(
440 &self,
441 analysis_params: &FocusedAnalysisParams,
442 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
443 ct: tokio_util::sync::CancellationToken,
444 entries: std::sync::Arc<Vec<WalkEntry>>,
445 total_files: usize,
446 symbol_display: &str,
447 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
448 let counter_clone = counter.clone();
449 let ct_clone = ct.clone();
450 let entries_clone = std::sync::Arc::clone(&entries);
451 let path_owned = analysis_params.path.clone();
452 let symbol_owned = analysis_params.symbol.clone();
453 let match_mode_owned = analysis_params.match_mode.clone();
454 let follow_depth = analysis_params.follow_depth;
455 let max_depth = analysis_params.max_depth;
456 let ast_recursion_limit = analysis_params.ast_recursion_limit;
457 let use_summary = analysis_params.use_summary;
458 let impl_only = analysis_params.impl_only;
459 let handle = tokio::task::spawn_blocking(move || {
460 let params = analyze::FocusedAnalysisConfig {
461 focus: symbol_owned,
462 match_mode: match_mode_owned,
463 follow_depth,
464 max_depth,
465 ast_recursion_limit,
466 use_summary,
467 impl_only,
468 };
469 analyze::analyze_focused_with_progress_with_entries(
470 &path_owned,
471 ¶ms,
472 &counter_clone,
473 &ct_clone,
474 &entries_clone,
475 )
476 });
477
478 let token = ProgressToken(NumberOrString::String(
479 format!(
480 "analyze-symbol-{}",
481 std::time::SystemTime::now()
482 .duration_since(std::time::UNIX_EPOCH)
483 .map(|d| d.as_nanos())
484 .unwrap_or(0)
485 )
486 .into(),
487 ));
488 let peer = self.peer.lock().await.clone();
489 let mut last_progress = 0usize;
490 let mut cancelled = false;
491
492 loop {
493 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
494 if ct.is_cancelled() {
495 cancelled = true;
496 break;
497 }
498 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
499 if current != last_progress && total_files > 0 {
500 self.emit_progress(
501 peer.clone(),
502 &token,
503 current as f64,
504 total_files as f64,
505 format!(
506 "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
507 ),
508 )
509 .await;
510 last_progress = current;
511 }
512 if handle.is_finished() {
513 break;
514 }
515 }
516
517 if !cancelled && total_files > 0 {
518 self.emit_progress(
519 peer.clone(),
520 &token,
521 total_files as f64,
522 total_files as f64,
523 format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
524 )
525 .await;
526 }
527
528 match handle.await {
529 Ok(Ok(output)) => Ok(output),
530 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
531 rmcp::model::ErrorCode::INTERNAL_ERROR,
532 "Analysis cancelled".to_string(),
533 Some(error_meta("transient", true, "analysis was cancelled")),
534 )),
535 Ok(Err(e)) => Err(ErrorData::new(
536 rmcp::model::ErrorCode::INTERNAL_ERROR,
537 format!("Error analyzing symbol: {e}"),
538 Some(error_meta("resource", false, "check symbol name and file")),
539 )),
540 Err(e) => Err(ErrorData::new(
541 rmcp::model::ErrorCode::INTERNAL_ERROR,
542 format!("Task join error: {e}"),
543 Some(error_meta("transient", true, "retry the request")),
544 )),
545 }
546 }
547
548 async fn run_focused_with_auto_summary(
550 &self,
551 params: &AnalyzeSymbolParams,
552 analysis_params: &FocusedAnalysisParams,
553 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
554 ct: tokio_util::sync::CancellationToken,
555 entries: std::sync::Arc<Vec<WalkEntry>>,
556 total_files: usize,
557 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
558 let use_summary_for_task = params.output_control.force != Some(true)
559 && params.output_control.summary == Some(true);
560
561 let analysis_params_initial = FocusedAnalysisParams {
562 use_summary: use_summary_for_task,
563 ..analysis_params.clone()
564 };
565
566 let mut output = self
567 .poll_progress_until_done(
568 &analysis_params_initial,
569 counter.clone(),
570 ct.clone(),
571 entries.clone(),
572 total_files,
573 ¶ms.symbol,
574 )
575 .await?;
576
577 if params.output_control.summary.is_none()
578 && params.output_control.force != Some(true)
579 && output.formatted.len() > SIZE_LIMIT
580 {
581 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
582 let analysis_params_retry = FocusedAnalysisParams {
583 use_summary: true,
584 ..analysis_params.clone()
585 };
586 let summary_result = self
587 .poll_progress_until_done(
588 &analysis_params_retry,
589 counter2,
590 ct,
591 entries,
592 total_files,
593 ¶ms.symbol,
594 )
595 .await;
596
597 if let Ok(summary_output) = summary_result {
598 output.formatted = summary_output.formatted;
599 } else {
600 let estimated_tokens = output.formatted.len() / 4;
601 let message = format!(
602 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
603 output.formatted.len(),
604 estimated_tokens
605 );
606 return Err(ErrorData::new(
607 rmcp::model::ErrorCode::INVALID_PARAMS,
608 message,
609 Some(error_meta(
610 "validation",
611 false,
612 "use summary=true or force=true",
613 )),
614 ));
615 }
616 } else if output.formatted.len() > SIZE_LIMIT
617 && params.output_control.force != Some(true)
618 && params.output_control.summary == Some(false)
619 {
620 let estimated_tokens = output.formatted.len() / 4;
621 let message = format!(
622 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
623 - force=true to return full output\n\
624 - summary=true to get compact summary\n\
625 - Narrow your scope (smaller directory, specific file)",
626 output.formatted.len(),
627 estimated_tokens
628 );
629 return Err(ErrorData::new(
630 rmcp::model::ErrorCode::INVALID_PARAMS,
631 message,
632 Some(error_meta(
633 "validation",
634 false,
635 "use force=true, summary=true, or narrow scope",
636 )),
637 ));
638 }
639
640 Ok(output)
641 }
642
643 #[instrument(skip(self, params, ct))]
647 async fn handle_focused_mode(
648 &self,
649 params: &AnalyzeSymbolParams,
650 ct: tokio_util::sync::CancellationToken,
651 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
652 let path = Path::new(¶ms.path);
653 let entries = match walk_directory(path, params.max_depth) {
654 Ok(e) => e,
655 Err(e) => {
656 return Err(ErrorData::new(
657 rmcp::model::ErrorCode::INTERNAL_ERROR,
658 format!("Failed to walk directory: {e}"),
659 Some(error_meta(
660 "resource",
661 false,
662 "check path permissions and availability",
663 )),
664 ));
665 }
666 };
667 let entries = std::sync::Arc::new(entries);
668
669 if params.impl_only == Some(true) {
670 Self::validate_impl_only(&entries)?;
671 }
672
673 let total_files = entries.iter().filter(|e| !e.is_dir).count();
674 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
675
676 let analysis_params = FocusedAnalysisParams {
677 path: path.to_path_buf(),
678 symbol: params.symbol.clone(),
679 match_mode: params.match_mode.clone().unwrap_or_default(),
680 follow_depth: params.follow_depth.unwrap_or(1),
681 max_depth: params.max_depth,
682 ast_recursion_limit: params.ast_recursion_limit,
683 use_summary: false,
684 impl_only: params.impl_only,
685 };
686
687 let mut output = self
688 .run_focused_with_auto_summary(
689 params,
690 &analysis_params,
691 counter,
692 ct,
693 entries,
694 total_files,
695 )
696 .await?;
697
698 if params.impl_only == Some(true) {
699 let filter_line = format!(
700 "FILTER: impl_only=true ({} of {} callers shown)\n",
701 output.impl_trait_caller_count, output.unfiltered_caller_count
702 );
703 output.formatted = format!("{}{}", filter_line, output.formatted);
704
705 if output.impl_trait_caller_count == 0 {
706 output.formatted.push_str(
707 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
708 );
709 }
710 }
711
712 Ok(output)
713 }
714
715 #[instrument(skip(self, context))]
716 #[tool(
717 name = "analyze_directory",
718 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
719 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
720 annotations(
721 title = "Analyze Directory",
722 read_only_hint = true,
723 destructive_hint = false,
724 idempotent_hint = true,
725 open_world_hint = false
726 )
727 )]
728 async fn analyze_directory(
729 &self,
730 params: Parameters<AnalyzeDirectoryParams>,
731 context: RequestContext<RoleServer>,
732 ) -> Result<CallToolResult, ErrorData> {
733 let params = params.0;
734 let ct = context.ct.clone();
735 let t_start = std::time::Instant::now();
736 let param_path = params.path.clone();
737 let max_depth_val = params.max_depth;
738 let seq = self
739 .session_call_seq
740 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
741 let sid = self.session_id.lock().await.clone();
742
743 let arc_output = match self.handle_overview_mode(¶ms, ct).await {
745 Ok(v) => v,
746 Err(e) => return Ok(err_to_tool_result(e)),
747 };
748 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
751 Ok(owned) => owned,
752 Err(arc) => (*arc).clone(),
753 };
754
755 if summary_cursor_conflict(
758 params.output_control.summary,
759 params.pagination.cursor.as_deref(),
760 ) {
761 return Ok(err_to_tool_result(ErrorData::new(
762 rmcp::model::ErrorCode::INVALID_PARAMS,
763 "summary=true is incompatible with a pagination cursor; use one or the other"
764 .to_string(),
765 Some(error_meta(
766 "validation",
767 false,
768 "remove cursor or set summary=false",
769 )),
770 )));
771 }
772
773 let use_summary = if params.output_control.force == Some(true) {
775 false
776 } else if params.output_control.summary == Some(true) {
777 true
778 } else if params.output_control.summary == Some(false) {
779 false
780 } else {
781 output.formatted.len() > SIZE_LIMIT
782 };
783
784 if use_summary {
785 output.formatted = format_summary(
786 &output.entries,
787 &output.files,
788 params.max_depth,
789 output.subtree_counts.as_deref(),
790 );
791 }
792
793 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
795 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
796 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
797 ErrorData::new(
798 rmcp::model::ErrorCode::INVALID_PARAMS,
799 e.to_string(),
800 Some(error_meta("validation", false, "invalid cursor format")),
801 )
802 }) {
803 Ok(v) => v,
804 Err(e) => return Ok(err_to_tool_result(e)),
805 };
806 cursor_data.offset
807 } else {
808 0
809 };
810
811 let paginated =
813 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
814 Ok(v) => v,
815 Err(e) => {
816 return Ok(err_to_tool_result(ErrorData::new(
817 rmcp::model::ErrorCode::INTERNAL_ERROR,
818 e.to_string(),
819 Some(error_meta("transient", true, "retry the request")),
820 )));
821 }
822 };
823
824 let verbose = params.output_control.verbose.unwrap_or(false);
825 if !use_summary {
826 output.formatted = format_structure_paginated(
827 &paginated.items,
828 paginated.total,
829 params.max_depth,
830 Some(Path::new(¶ms.path)),
831 verbose,
832 );
833 }
834
835 if use_summary {
837 output.next_cursor = None;
838 } else {
839 output.next_cursor.clone_from(&paginated.next_cursor);
840 }
841
842 let mut final_text = output.formatted.clone();
844 if !use_summary && let Some(cursor) = paginated.next_cursor {
845 final_text.push('\n');
846 final_text.push_str("NEXT_CURSOR: ");
847 final_text.push_str(&cursor);
848 }
849
850 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
851 .with_meta(Some(no_cache_meta()));
852 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
853 result.structured_content = Some(structured);
854 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
855 self.metrics_tx.send(crate::metrics::MetricEvent {
856 ts: crate::metrics::unix_ms(),
857 tool: "analyze_directory",
858 duration_ms: dur,
859 output_chars: final_text.len(),
860 param_path_depth: crate::metrics::path_component_count(¶m_path),
861 max_depth: max_depth_val,
862 result: "ok",
863 error_type: None,
864 session_id: sid,
865 seq: Some(seq),
866 });
867 Ok(result)
868 }
869
870 #[instrument(skip(self, _context))]
871 #[tool(
872 name = "analyze_file",
873 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py. The fields parameter limits output to specific sections. Valid values: \"functions\", \"classes\", \"imports\". The FILE header (path, line count, section counts) is always emitted. Omit fields to return all sections. When summary=true, fields is ignored. When fields explicitly lists \"imports\", the imports section is rendered regardless of the verbose flag; in all other cases imports require verbose=true.",
874 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
875 annotations(
876 title = "Analyze File",
877 read_only_hint = true,
878 destructive_hint = false,
879 idempotent_hint = true,
880 open_world_hint = false
881 )
882 )]
883 async fn analyze_file(
884 &self,
885 params: Parameters<AnalyzeFileParams>,
886 _context: RequestContext<RoleServer>,
887 ) -> Result<CallToolResult, ErrorData> {
888 let params = params.0;
889 let t_start = std::time::Instant::now();
890 let param_path = params.path.clone();
891 let seq = self
892 .session_call_seq
893 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
894 let sid = self.session_id.lock().await.clone();
895
896 let arc_output = match self.handle_file_details_mode(¶ms).await {
898 Ok(v) => v,
899 Err(e) => return Ok(err_to_tool_result(e)),
900 };
901
902 let mut formatted = arc_output.formatted.clone();
906 let line_count = arc_output.line_count;
907
908 let use_summary = if params.output_control.force == Some(true) {
910 false
911 } else if params.output_control.summary == Some(true) {
912 true
913 } else if params.output_control.summary == Some(false) {
914 false
915 } else {
916 formatted.len() > SIZE_LIMIT
917 };
918
919 if use_summary {
920 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
921 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
922 let estimated_tokens = formatted.len() / 4;
923 let message = format!(
924 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
925 - force=true to return full output\n\
926 - Narrow your scope (smaller directory, specific file)\n\
927 - Use analyze_symbol mode for targeted analysis\n\
928 - Reduce max_depth parameter",
929 formatted.len(),
930 estimated_tokens
931 );
932 return Ok(err_to_tool_result(ErrorData::new(
933 rmcp::model::ErrorCode::INVALID_PARAMS,
934 message,
935 Some(error_meta(
936 "validation",
937 false,
938 "use force=true or narrow scope",
939 )),
940 )));
941 }
942
943 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
945 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
946 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
947 ErrorData::new(
948 rmcp::model::ErrorCode::INVALID_PARAMS,
949 e.to_string(),
950 Some(error_meta("validation", false, "invalid cursor format")),
951 )
952 }) {
953 Ok(v) => v,
954 Err(e) => return Ok(err_to_tool_result(e)),
955 };
956 cursor_data.offset
957 } else {
958 0
959 };
960
961 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
963 .semantic
964 .functions
965 .iter()
966 .filter(|func| {
967 !arc_output
968 .semantic
969 .classes
970 .iter()
971 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
972 })
973 .cloned()
974 .collect();
975
976 let paginated =
978 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
979 Ok(v) => v,
980 Err(e) => {
981 return Ok(err_to_tool_result(ErrorData::new(
982 rmcp::model::ErrorCode::INTERNAL_ERROR,
983 e.to_string(),
984 Some(error_meta("transient", true, "retry the request")),
985 )));
986 }
987 };
988
989 let verbose = params.output_control.verbose.unwrap_or(false);
991 if !use_summary {
992 formatted = format_file_details_paginated(
994 &paginated.items,
995 paginated.total,
996 &arc_output.semantic,
997 ¶ms.path,
998 line_count,
999 offset,
1000 verbose,
1001 params.fields.as_deref(),
1002 );
1003 }
1004
1005 let next_cursor = if use_summary {
1007 None
1008 } else {
1009 paginated.next_cursor.clone()
1010 };
1011
1012 let mut final_text = formatted.clone();
1014 if !use_summary && let Some(ref cursor) = next_cursor {
1015 final_text.push('\n');
1016 final_text.push_str("NEXT_CURSOR: ");
1017 final_text.push_str(cursor);
1018 }
1019
1020 let response_output = analyze::FileAnalysisOutput::new(
1022 formatted,
1023 arc_output.semantic.clone(),
1024 line_count,
1025 next_cursor,
1026 );
1027
1028 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1029 .with_meta(Some(no_cache_meta()));
1030 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1031 result.structured_content = Some(structured);
1032 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1033 self.metrics_tx.send(crate::metrics::MetricEvent {
1034 ts: crate::metrics::unix_ms(),
1035 tool: "analyze_file",
1036 duration_ms: dur,
1037 output_chars: final_text.len(),
1038 param_path_depth: crate::metrics::path_component_count(¶m_path),
1039 max_depth: None,
1040 result: "ok",
1041 error_type: None,
1042 session_id: sid,
1043 seq: Some(seq),
1044 });
1045 Ok(result)
1046 }
1047
1048 #[instrument(skip(self, context))]
1049 #[tool(
1050 name = "analyze_symbol",
1051 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. impl_only=true: restrict callers to only those from 'impl Trait for Type' blocks (Rust only); returns INVALID_PARAMS for non-Rust directories; emits a FILTER header showing how many callers were retained. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method",
1052 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1053 annotations(
1054 title = "Analyze Symbol",
1055 read_only_hint = true,
1056 destructive_hint = false,
1057 idempotent_hint = true,
1058 open_world_hint = false
1059 )
1060 )]
1061 async fn analyze_symbol(
1062 &self,
1063 params: Parameters<AnalyzeSymbolParams>,
1064 context: RequestContext<RoleServer>,
1065 ) -> Result<CallToolResult, ErrorData> {
1066 let params = params.0;
1067 let ct = context.ct.clone();
1068 let t_start = std::time::Instant::now();
1069 let param_path = params.path.clone();
1070 let max_depth_val = params.follow_depth;
1071 let seq = self
1072 .session_call_seq
1073 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1074 let sid = self.session_id.lock().await.clone();
1075
1076 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1078 Ok(v) => v,
1079 Err(e) => return Ok(err_to_tool_result(e)),
1080 };
1081
1082 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1084 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1085 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1086 ErrorData::new(
1087 rmcp::model::ErrorCode::INVALID_PARAMS,
1088 e.to_string(),
1089 Some(error_meta("validation", false, "invalid cursor format")),
1090 )
1091 }) {
1092 Ok(v) => v,
1093 Err(e) => return Ok(err_to_tool_result(e)),
1094 };
1095 cursor_data.offset
1096 } else {
1097 0
1098 };
1099
1100 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1102 decode_cursor(cursor_str)
1103 .map(|c| c.mode)
1104 .unwrap_or(PaginationMode::Callers)
1105 } else {
1106 PaginationMode::Callers
1107 };
1108
1109 let use_summary = params.output_control.summary == Some(true);
1110 let verbose = params.output_control.verbose.unwrap_or(false);
1111
1112 let mut callee_cursor = match cursor_mode {
1113 PaginationMode::Callers => {
1114 let (paginated_items, paginated_next) = match paginate_focus_chains(
1115 &output.prod_chains,
1116 PaginationMode::Callers,
1117 offset,
1118 page_size,
1119 ) {
1120 Ok(v) => v,
1121 Err(e) => return Ok(err_to_tool_result(e)),
1122 };
1123
1124 if !use_summary
1125 && (paginated_next.is_some()
1126 || offset > 0
1127 || !verbose
1128 || !output.outgoing_chains.is_empty())
1129 {
1130 let base_path = Path::new(¶ms.path);
1131 output.formatted = format_focused_paginated(
1132 &paginated_items,
1133 output.prod_chains.len(),
1134 PaginationMode::Callers,
1135 ¶ms.symbol,
1136 &output.prod_chains,
1137 &output.test_chains,
1138 &output.outgoing_chains,
1139 output.def_count,
1140 offset,
1141 Some(base_path),
1142 verbose,
1143 );
1144 paginated_next
1145 } else {
1146 None
1147 }
1148 }
1149 PaginationMode::Callees => {
1150 let (paginated_items, paginated_next) = match paginate_focus_chains(
1151 &output.outgoing_chains,
1152 PaginationMode::Callees,
1153 offset,
1154 page_size,
1155 ) {
1156 Ok(v) => v,
1157 Err(e) => return Ok(err_to_tool_result(e)),
1158 };
1159
1160 if paginated_next.is_some() || offset > 0 || !verbose {
1161 let base_path = Path::new(¶ms.path);
1162 output.formatted = format_focused_paginated(
1163 &paginated_items,
1164 output.outgoing_chains.len(),
1165 PaginationMode::Callees,
1166 ¶ms.symbol,
1167 &output.prod_chains,
1168 &output.test_chains,
1169 &output.outgoing_chains,
1170 output.def_count,
1171 offset,
1172 Some(base_path),
1173 verbose,
1174 );
1175 paginated_next
1176 } else {
1177 None
1178 }
1179 }
1180 PaginationMode::Default => {
1181 unreachable!("SymbolFocus should only use Callers or Callees modes")
1182 }
1183 };
1184
1185 if callee_cursor.is_none()
1190 && cursor_mode == PaginationMode::Callers
1191 && !output.outgoing_chains.is_empty()
1192 && !use_summary
1193 && let Ok(cursor) = encode_cursor(&CursorData {
1194 mode: PaginationMode::Callees,
1195 offset: 0,
1196 })
1197 {
1198 callee_cursor = Some(cursor);
1199 }
1200
1201 output.next_cursor.clone_from(&callee_cursor);
1203
1204 let mut final_text = output.formatted.clone();
1206 if let Some(cursor) = callee_cursor {
1207 final_text.push('\n');
1208 final_text.push_str("NEXT_CURSOR: ");
1209 final_text.push_str(&cursor);
1210 }
1211
1212 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1213 .with_meta(Some(no_cache_meta()));
1214 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1215 result.structured_content = Some(structured);
1216 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1217 self.metrics_tx.send(crate::metrics::MetricEvent {
1218 ts: crate::metrics::unix_ms(),
1219 tool: "analyze_symbol",
1220 duration_ms: dur,
1221 output_chars: final_text.len(),
1222 param_path_depth: crate::metrics::path_component_count(¶m_path),
1223 max_depth: max_depth_val,
1224 result: "ok",
1225 error_type: None,
1226 session_id: sid,
1227 seq: Some(seq),
1228 });
1229 Ok(result)
1230 }
1231
1232 #[instrument(skip(self, _context))]
1233 #[tool(
1234 name = "analyze_module",
1235 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
1236 output_schema = schema_for_type::<types::ModuleInfo>(),
1237 annotations(
1238 title = "Analyze Module",
1239 read_only_hint = true,
1240 destructive_hint = false,
1241 idempotent_hint = true,
1242 open_world_hint = false
1243 )
1244 )]
1245 async fn analyze_module(
1246 &self,
1247 params: Parameters<AnalyzeModuleParams>,
1248 _context: RequestContext<RoleServer>,
1249 ) -> Result<CallToolResult, ErrorData> {
1250 let params = params.0;
1251 let t_start = std::time::Instant::now();
1252 let param_path = params.path.clone();
1253 let seq = self
1254 .session_call_seq
1255 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1256 let sid = self.session_id.lock().await.clone();
1257
1258 if std::fs::metadata(¶ms.path)
1260 .map(|m| m.is_dir())
1261 .unwrap_or(false)
1262 {
1263 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1264 self.metrics_tx.send(crate::metrics::MetricEvent {
1265 ts: crate::metrics::unix_ms(),
1266 tool: "analyze_module",
1267 duration_ms: dur,
1268 output_chars: 0,
1269 param_path_depth: crate::metrics::path_component_count(¶m_path),
1270 max_depth: None,
1271 result: "error",
1272 error_type: Some("invalid_params".to_string()),
1273 session_id: sid.clone(),
1274 seq: Some(seq),
1275 });
1276 return Ok(err_to_tool_result(ErrorData::new(
1277 rmcp::model::ErrorCode::INVALID_PARAMS,
1278 format!(
1279 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1280 params.path
1281 ),
1282 Some(error_meta(
1283 "validation",
1284 false,
1285 "use analyze_directory for directories",
1286 )),
1287 )));
1288 }
1289
1290 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1291 ErrorData::new(
1292 rmcp::model::ErrorCode::INVALID_PARAMS,
1293 format!("Failed to analyze module: {e}"),
1294 Some(error_meta(
1295 "validation",
1296 false,
1297 "ensure file exists, is readable, and has a supported extension",
1298 )),
1299 )
1300 }) {
1301 Ok(v) => v,
1302 Err(e) => return Ok(err_to_tool_result(e)),
1303 };
1304
1305 let text = format_module_info(&module_info);
1306 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1307 .with_meta(Some(no_cache_meta()));
1308 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1309 ErrorData::new(
1310 rmcp::model::ErrorCode::INTERNAL_ERROR,
1311 format!("serialization failed: {e}"),
1312 Some(error_meta("internal", false, "report this as a bug")),
1313 )
1314 }) {
1315 Ok(v) => v,
1316 Err(e) => return Ok(err_to_tool_result(e)),
1317 };
1318 result.structured_content = Some(structured);
1319 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1320 self.metrics_tx.send(crate::metrics::MetricEvent {
1321 ts: crate::metrics::unix_ms(),
1322 tool: "analyze_module",
1323 duration_ms: dur,
1324 output_chars: text.len(),
1325 param_path_depth: crate::metrics::path_component_count(¶m_path),
1326 max_depth: None,
1327 result: "ok",
1328 error_type: None,
1329 session_id: sid,
1330 seq: Some(seq),
1331 });
1332 Ok(result)
1333 }
1334}
1335
1336#[derive(Clone)]
1338struct FocusedAnalysisParams {
1339 path: std::path::PathBuf,
1340 symbol: String,
1341 match_mode: SymbolMatchMode,
1342 follow_depth: u32,
1343 max_depth: Option<u32>,
1344 ast_recursion_limit: Option<usize>,
1345 use_summary: bool,
1346 impl_only: Option<bool>,
1347}
1348
1349#[tool_handler]
1350impl ServerHandler for CodeAnalyzer {
1351 fn get_info(&self) -> InitializeResult {
1352 let excluded = crate::EXCLUDED_DIRS.join(", ");
1353 let instructions = format!(
1354 "Recommended workflow for unknown repositories:\n\
1355 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1356 (typically the largest directory by file count; exclude {excluded}).\n\
1357 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts. Include test directories (e.g., tests/, testutil/, files matching *_test.go, test_*.py, test_*.rs, *_test.rs, *.spec.ts, *.spec.js) in the module map; test files are valid analysis targets and must not be skipped.\n\
1358 3. For key files identified in step 2, prefer analyze_module to get a lightweight function/import index (~75% smaller output) when you only need function names and imports; call analyze_file when you need signatures, types, or class structure.\n\
1359 4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1360 Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1361 Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1362 );
1363 let capabilities = ServerCapabilities::builder()
1364 .enable_logging()
1365 .enable_tools()
1366 .enable_tool_list_changed()
1367 .enable_completions()
1368 .build();
1369 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1370 .with_title("Code Analyze MCP")
1371 .with_description("MCP server for code structure analysis using tree-sitter");
1372 InitializeResult::new(capabilities)
1373 .with_server_info(server_info)
1374 .with_instructions(&instructions)
1375 }
1376
1377 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1378 let mut peer_lock = self.peer.lock().await;
1379 *peer_lock = Some(context.peer.clone());
1380 drop(peer_lock);
1381
1382 let millis = std::time::SystemTime::now()
1384 .duration_since(std::time::UNIX_EPOCH)
1385 .unwrap_or_default()
1386 .as_millis()
1387 .try_into()
1388 .unwrap_or(u64::MAX);
1389 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1390 let sid = format!("{millis}-{counter}");
1391 {
1392 let mut session_id_lock = self.session_id.lock().await;
1393 *session_id_lock = Some(sid);
1394 }
1395 self.session_call_seq
1396 .store(0, std::sync::atomic::Ordering::Relaxed);
1397
1398 let peer = self.peer.clone();
1400 let event_rx = self.event_rx.clone();
1401
1402 tokio::spawn(async move {
1403 let rx = {
1404 let mut rx_lock = event_rx.lock().await;
1405 rx_lock.take()
1406 };
1407
1408 if let Some(mut receiver) = rx {
1409 let mut buffer = Vec::with_capacity(64);
1410 loop {
1411 receiver.recv_many(&mut buffer, 64).await;
1413
1414 if buffer.is_empty() {
1415 break;
1417 }
1418
1419 let peer_lock = peer.lock().await;
1421 if let Some(peer) = peer_lock.as_ref() {
1422 for log_event in buffer.drain(..) {
1423 let notification = ServerNotification::LoggingMessageNotification(
1424 Notification::new(LoggingMessageNotificationParam {
1425 level: log_event.level,
1426 logger: Some(log_event.logger),
1427 data: log_event.data,
1428 }),
1429 );
1430 if let Err(e) = peer.send_notification(notification).await {
1431 warn!("Failed to send logging notification: {}", e);
1432 }
1433 }
1434 }
1435 }
1436 }
1437 });
1438 }
1439
1440 #[instrument(skip(self, _context))]
1441 async fn on_cancelled(
1442 &self,
1443 notification: CancelledNotificationParam,
1444 _context: NotificationContext<RoleServer>,
1445 ) {
1446 tracing::info!(
1447 request_id = ?notification.request_id,
1448 reason = ?notification.reason,
1449 "Received cancellation notification"
1450 );
1451 }
1452
1453 #[instrument(skip(self, _context))]
1454 async fn complete(
1455 &self,
1456 request: CompleteRequestParams,
1457 _context: RequestContext<RoleServer>,
1458 ) -> Result<CompleteResult, ErrorData> {
1459 let argument_name = &request.argument.name;
1461 let argument_value = &request.argument.value;
1462
1463 let completions = match argument_name.as_str() {
1464 "path" => {
1465 let root = Path::new(".");
1467 completion::path_completions(root, argument_value)
1468 }
1469 "symbol" => {
1470 let path_arg = request
1472 .context
1473 .as_ref()
1474 .and_then(|ctx| ctx.get_argument("path"));
1475
1476 match path_arg {
1477 Some(path_str) => {
1478 let path = Path::new(path_str);
1479 completion::symbol_completions(&self.cache, path, argument_value)
1480 }
1481 None => Vec::new(),
1482 }
1483 }
1484 _ => Vec::new(),
1485 };
1486
1487 let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1489 let (values, has_more) = if completions.len() > 100 {
1490 (completions.into_iter().take(100).collect(), true)
1491 } else {
1492 (completions, false)
1493 };
1494
1495 let completion_info =
1496 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1497 Ok(info) => info,
1498 Err(_) => {
1499 CompletionInfo::with_all_values(Vec::new())
1501 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1502 }
1503 };
1504
1505 Ok(CompleteResult::new(completion_info))
1506 }
1507
1508 async fn set_level(
1509 &self,
1510 params: SetLevelRequestParams,
1511 _context: RequestContext<RoleServer>,
1512 ) -> Result<(), ErrorData> {
1513 let level_filter = match params.level {
1514 LoggingLevel::Debug => LevelFilter::DEBUG,
1515 LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1516 LoggingLevel::Warning => LevelFilter::WARN,
1517 LoggingLevel::Error
1518 | LoggingLevel::Critical
1519 | LoggingLevel::Alert
1520 | LoggingLevel::Emergency => LevelFilter::ERROR,
1521 };
1522
1523 let mut filter_lock = self.log_level_filter.lock().unwrap();
1524 *filter_lock = level_filter;
1525 Ok(())
1526 }
1527}
1528
1529#[cfg(test)]
1530mod tests {
1531 use super::*;
1532
1533 #[tokio::test]
1534 async fn test_emit_progress_none_peer_is_noop() {
1535 let peer = Arc::new(TokioMutex::new(None));
1536 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1537 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1538 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1539 let analyzer = CodeAnalyzer::new(
1540 peer,
1541 log_level_filter,
1542 rx,
1543 crate::metrics::MetricsSender(metrics_tx),
1544 );
1545 let token = ProgressToken(NumberOrString::String("test".into()));
1546 analyzer
1548 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1549 .await;
1550 }
1551
1552 fn make_analyzer() -> CodeAnalyzer {
1553 let peer = Arc::new(TokioMutex::new(None));
1554 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1555 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1556 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1557 CodeAnalyzer::new(
1558 peer,
1559 log_level_filter,
1560 rx,
1561 crate::metrics::MetricsSender(metrics_tx),
1562 )
1563 }
1564
1565 #[test]
1566 fn test_summary_cursor_conflict() {
1567 assert!(summary_cursor_conflict(Some(true), Some("cursor")));
1568 assert!(!summary_cursor_conflict(Some(true), None));
1569 assert!(!summary_cursor_conflict(None, Some("x")));
1570 assert!(!summary_cursor_conflict(None, None));
1571 }
1572
1573 #[tokio::test]
1574 async fn test_validate_impl_only_non_rust_returns_invalid_params() {
1575 use tempfile::TempDir;
1576
1577 let dir = TempDir::new().unwrap();
1578 std::fs::write(dir.path().join("main.py"), "def foo(): pass").unwrap();
1579
1580 let analyzer = make_analyzer();
1581 let entries: Vec<traversal::WalkEntry> =
1584 traversal::walk_directory(dir.path(), None).unwrap_or_default();
1585 let result = CodeAnalyzer::validate_impl_only(&entries);
1586 assert!(result.is_err());
1587 let err = result.unwrap_err();
1588 assert_eq!(err.code, rmcp::model::ErrorCode::INVALID_PARAMS);
1589 drop(analyzer); }
1591
1592 #[tokio::test]
1593 async fn test_no_cache_meta_on_analyze_directory_result() {
1594 use code_analyze_core::types::{
1595 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1596 };
1597 use tempfile::TempDir;
1598
1599 let dir = TempDir::new().unwrap();
1600 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
1601
1602 let analyzer = make_analyzer();
1603 let params = AnalyzeDirectoryParams {
1604 path: dir.path().to_str().unwrap().to_string(),
1605 max_depth: None,
1606 pagination: PaginationParams {
1607 cursor: None,
1608 page_size: None,
1609 },
1610 output_control: OutputControlParams {
1611 summary: None,
1612 force: None,
1613 verbose: None,
1614 },
1615 };
1616 let ct = tokio_util::sync::CancellationToken::new();
1617 let arc_output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1618 let meta = no_cache_meta();
1620 assert_eq!(
1621 meta.0.get("cache_hint").and_then(|v| v.as_str()),
1622 Some("no-cache"),
1623 );
1624 drop(arc_output);
1625 }
1626
1627 #[test]
1628 fn test_complete_path_completions_returns_suggestions() {
1629 let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
1634 let workspace_root = manifest_dir.parent().expect("manifest dir has parent");
1635 let suggestions = completion::path_completions(workspace_root, "code-");
1636 assert!(
1637 !suggestions.is_empty(),
1638 "expected completions for prefix 'code-' in workspace root"
1639 );
1640 }
1641
1642 #[tokio::test]
1643 async fn test_handle_overview_mode_verbose_no_summary_block() {
1644 use code_analyze_core::pagination::{PaginationMode, paginate_slice};
1645 use code_analyze_core::types::{
1646 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1647 };
1648 use tempfile::TempDir;
1649
1650 let tmp = TempDir::new().unwrap();
1651 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1652
1653 let peer = Arc::new(TokioMutex::new(None));
1654 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1655 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1656 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1657 let analyzer = CodeAnalyzer::new(
1658 peer,
1659 log_level_filter,
1660 rx,
1661 crate::metrics::MetricsSender(metrics_tx),
1662 );
1663
1664 let params = AnalyzeDirectoryParams {
1665 path: tmp.path().to_str().unwrap().to_string(),
1666 max_depth: None,
1667 pagination: PaginationParams {
1668 cursor: None,
1669 page_size: None,
1670 },
1671 output_control: OutputControlParams {
1672 summary: None,
1673 force: None,
1674 verbose: Some(true),
1675 },
1676 };
1677
1678 let ct = tokio_util::sync::CancellationToken::new();
1679 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1680
1681 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1684 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1685 let verbose = true;
1686 let formatted = if !use_summary {
1687 format_structure_paginated(
1688 &paginated.items,
1689 paginated.total,
1690 params.max_depth,
1691 Some(std::path::Path::new(¶ms.path)),
1692 verbose,
1693 )
1694 } else {
1695 output.formatted.clone()
1696 };
1697
1698 assert!(
1700 !formatted.contains("SUMMARY:"),
1701 "verbose=true must not emit SUMMARY: block; got: {}",
1702 &formatted[..formatted.len().min(300)]
1703 );
1704 assert!(
1705 formatted.contains("PAGINATED:"),
1706 "verbose=true must emit PAGINATED: header"
1707 );
1708 assert!(
1709 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1710 "verbose=true must emit FILES section header"
1711 );
1712 }
1713}