1pub mod analyze;
18pub mod cache;
19pub mod completion;
20pub mod formatter;
21pub mod graph;
22pub mod lang;
23pub mod languages;
24pub mod logging;
25pub mod metrics;
26pub mod pagination;
27pub mod parser;
28pub(crate) mod schema_helpers;
29pub mod test_detection;
30pub mod traversal;
31pub mod types;
32
33pub(crate) const EXCLUDED_DIRS: &[&str] = &[
34 "node_modules",
35 "vendor",
36 ".git",
37 "__pycache__",
38 "target",
39 "dist",
40 "build",
41 ".venv",
42];
43
44use cache::AnalysisCache;
45use formatter::{
46 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
47 format_module_info, format_structure_paginated, format_summary,
48};
49use logging::LogEvent;
50use pagination::{
51 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
52};
53use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
54use rmcp::handler::server::wrapper::Parameters;
55use rmcp::model::{
56 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
57 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
58 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
59 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
60};
61use rmcp::service::{NotificationContext, RequestContext};
62use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
63use serde_json::Value;
64use std::path::Path;
65use std::sync::{Arc, Mutex};
66use tokio::sync::{Mutex as TokioMutex, mpsc};
67use tracing::{instrument, warn};
68use tracing_subscriber::filter::LevelFilter;
69use traversal::{WalkEntry, walk_directory};
70use types::{
71 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
72 AnalyzeSymbolParams, SymbolMatchMode,
73};
74
75static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
76
77const SIZE_LIMIT: usize = 50_000;
78
79#[must_use]
82pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
83 summary == Some(true) && cursor.is_some()
84}
85
86#[must_use]
87fn error_meta(
88 category: &'static str,
89 is_retryable: bool,
90 suggested_action: &'static str,
91) -> serde_json::Value {
92 serde_json::json!({
93 "errorCategory": category,
94 "isRetryable": is_retryable,
95 "suggestedAction": suggested_action,
96 })
97}
98
99#[must_use]
100fn err_to_tool_result(e: ErrorData) -> CallToolResult {
101 CallToolResult::error(vec![Content::text(e.message)])
102}
103
104fn no_cache_meta() -> Meta {
105 let mut m = serde_json::Map::new();
106 m.insert(
107 "cache_hint".to_string(),
108 serde_json::Value::String("no-cache".to_string()),
109 );
110 Meta(m)
111}
112
113fn paginate_focus_chains(
116 chains: &[graph::InternalCallChain],
117 mode: PaginationMode,
118 offset: usize,
119 page_size: usize,
120) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
121 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
122 ErrorData::new(
123 rmcp::model::ErrorCode::INTERNAL_ERROR,
124 e.to_string(),
125 Some(error_meta("transient", true, "retry the request")),
126 )
127 })?;
128
129 if paginated.next_cursor.is_none() && offset == 0 {
130 return Ok((paginated.items, None));
131 }
132
133 let next = if let Some(raw_cursor) = paginated.next_cursor {
134 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
135 ErrorData::new(
136 rmcp::model::ErrorCode::INVALID_PARAMS,
137 e.to_string(),
138 Some(error_meta("validation", false, "invalid cursor format")),
139 )
140 })?;
141 Some(
142 encode_cursor(&CursorData {
143 mode,
144 offset: decoded.offset,
145 })
146 .map_err(|e| {
147 ErrorData::new(
148 rmcp::model::ErrorCode::INVALID_PARAMS,
149 e.to_string(),
150 Some(error_meta("validation", false, "invalid cursor format")),
151 )
152 })?,
153 )
154 } else {
155 None
156 };
157
158 Ok((paginated.items, next))
159}
160
161#[derive(Clone)]
166pub struct CodeAnalyzer {
167 tool_router: ToolRouter<Self>,
168 cache: AnalysisCache,
169 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
170 log_level_filter: Arc<Mutex<LevelFilter>>,
171 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
172 metrics_tx: crate::metrics::MetricsSender,
173 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
174 session_id: Arc<TokioMutex<Option<String>>>,
175}
176
177#[tool_router]
178impl CodeAnalyzer {
179 #[must_use]
180 pub fn list_tools() -> Vec<rmcp::model::Tool> {
181 Self::tool_router().list_all()
182 }
183
184 pub fn new(
185 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
186 log_level_filter: Arc<Mutex<LevelFilter>>,
187 event_rx: mpsc::UnboundedReceiver<LogEvent>,
188 metrics_tx: crate::metrics::MetricsSender,
189 ) -> Self {
190 CodeAnalyzer {
191 tool_router: Self::tool_router(),
192 cache: AnalysisCache::new(100),
193 peer,
194 log_level_filter,
195 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
196 metrics_tx,
197 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
198 session_id: Arc::new(TokioMutex::new(None)),
199 }
200 }
201
202 #[instrument(skip(self))]
203 async fn emit_progress(
204 &self,
205 peer: Option<Peer<RoleServer>>,
206 token: &ProgressToken,
207 progress: f64,
208 total: f64,
209 message: String,
210 ) {
211 if let Some(peer) = peer {
212 let notification = ServerNotification::ProgressNotification(Notification::new(
213 ProgressNotificationParam {
214 progress_token: token.clone(),
215 progress,
216 total: Some(total),
217 message: Some(message),
218 },
219 ));
220 if let Err(e) = peer.send_notification(notification).await {
221 warn!("Failed to send progress notification: {}", e);
222 }
223 }
224 }
225
226 #[allow(clippy::too_many_lines)] #[allow(clippy::cast_precision_loss)] #[instrument(skip(self, params, ct))]
232 async fn handle_overview_mode(
233 &self,
234 params: &AnalyzeDirectoryParams,
235 ct: tokio_util::sync::CancellationToken,
236 ) -> Result<std::sync::Arc<analyze::AnalysisOutput>, ErrorData> {
237 let path = Path::new(¶ms.path);
238 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
239 let counter_clone = counter.clone();
240 let path_owned = path.to_path_buf();
241 let max_depth = params.max_depth;
242 let ct_clone = ct.clone();
243
244 let all_entries = walk_directory(path, None).map_err(|e| {
246 ErrorData::new(
247 rmcp::model::ErrorCode::INTERNAL_ERROR,
248 format!("Failed to walk directory: {e}"),
249 Some(error_meta(
250 "resource",
251 false,
252 "check path permissions and availability",
253 )),
254 )
255 })?;
256
257 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
259
260 let cache_key = cache::DirectoryCacheKey::from_entries(
262 &all_entries,
263 canonical_max_depth,
264 AnalysisMode::Overview,
265 );
266
267 if let Some(cached) = self.cache.get_directory(&cache_key) {
269 return Ok(cached);
270 }
271
272 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
274 Some(traversal::subtree_counts_from_entries(path, &all_entries))
275 } else {
276 None
277 };
278
279 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
281 && depth > 0
282 {
283 all_entries
284 .into_iter()
285 .filter(|e| e.depth <= depth as usize)
286 .collect()
287 } else {
288 all_entries
289 };
290
291 let total_files = entries.iter().filter(|e| !e.is_dir).count();
293
294 let handle = tokio::task::spawn_blocking(move || {
296 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
297 });
298
299 let token = ProgressToken(NumberOrString::String(
301 format!(
302 "analyze-overview-{}",
303 std::time::SystemTime::now()
304 .duration_since(std::time::UNIX_EPOCH)
305 .map(|d| d.as_nanos())
306 .unwrap_or(0)
307 )
308 .into(),
309 ));
310 let peer = self.peer.lock().await.clone();
311 let mut last_progress = 0usize;
312 let mut cancelled = false;
313 loop {
314 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
315 if ct.is_cancelled() {
316 cancelled = true;
317 break;
318 }
319 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
320 if current != last_progress && total_files > 0 {
321 self.emit_progress(
322 peer.clone(),
323 &token,
324 current as f64,
325 total_files as f64,
326 format!("Analyzing {current}/{total_files} files"),
327 )
328 .await;
329 last_progress = current;
330 }
331 if handle.is_finished() {
332 break;
333 }
334 }
335
336 if !cancelled && total_files > 0 {
338 self.emit_progress(
339 peer.clone(),
340 &token,
341 total_files as f64,
342 total_files as f64,
343 format!("Completed analyzing {total_files} files"),
344 )
345 .await;
346 }
347
348 match handle.await {
349 Ok(Ok(mut output)) => {
350 output.subtree_counts = subtree_counts;
351 let arc_output = std::sync::Arc::new(output);
352 self.cache.put_directory(cache_key, arc_output.clone());
353 Ok(arc_output)
354 }
355 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
356 rmcp::model::ErrorCode::INTERNAL_ERROR,
357 "Analysis cancelled".to_string(),
358 Some(error_meta("transient", true, "analysis was cancelled")),
359 )),
360 Ok(Err(e)) => Err(ErrorData::new(
361 rmcp::model::ErrorCode::INTERNAL_ERROR,
362 format!("Error analyzing directory: {e}"),
363 Some(error_meta(
364 "resource",
365 false,
366 "check path and file permissions",
367 )),
368 )),
369 Err(e) => Err(ErrorData::new(
370 rmcp::model::ErrorCode::INTERNAL_ERROR,
371 format!("Task join error: {e}"),
372 Some(error_meta("transient", true, "retry the request")),
373 )),
374 }
375 }
376
377 #[instrument(skip(self, params))]
380 async fn handle_file_details_mode(
381 &self,
382 params: &AnalyzeFileParams,
383 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
384 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
386 meta.modified().ok().map(|mtime| cache::CacheKey {
387 path: std::path::PathBuf::from(¶ms.path),
388 modified: mtime,
389 mode: AnalysisMode::FileDetails,
390 })
391 });
392
393 if let Some(ref key) = cache_key
395 && let Some(cached) = self.cache.get(key)
396 {
397 return Ok(cached);
398 }
399
400 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
402 Ok(output) => {
403 let arc_output = std::sync::Arc::new(output);
404 if let Some(key) = cache_key {
405 self.cache.put(key, arc_output.clone());
406 }
407 Ok(arc_output)
408 }
409 Err(e) => Err(ErrorData::new(
410 rmcp::model::ErrorCode::INTERNAL_ERROR,
411 format!("Error analyzing file: {e}"),
412 Some(error_meta(
413 "resource",
414 false,
415 "check file path and permissions",
416 )),
417 )),
418 }
419 }
420
421 fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
423 let has_rust = entries.iter().any(|e| {
424 !e.is_dir
425 && e.path
426 .extension()
427 .and_then(|x: &std::ffi::OsStr| x.to_str())
428 == Some("rs")
429 });
430
431 if !has_rust {
432 return Err(ErrorData::new(
433 rmcp::model::ErrorCode::INVALID_PARAMS,
434 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
435 Some(error_meta(
436 "validation",
437 false,
438 "remove impl_only or point to a directory containing .rs files",
439 )),
440 ));
441 }
442 Ok(())
443 }
444
445 #[allow(clippy::cast_precision_loss)] async fn poll_progress_until_done(
448 &self,
449 analysis_params: &FocusedAnalysisParams,
450 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
451 ct: tokio_util::sync::CancellationToken,
452 entries: std::sync::Arc<Vec<WalkEntry>>,
453 total_files: usize,
454 symbol_display: &str,
455 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
456 let counter_clone = counter.clone();
457 let ct_clone = ct.clone();
458 let entries_clone = std::sync::Arc::clone(&entries);
459 let path_owned = analysis_params.path.clone();
460 let symbol_owned = analysis_params.symbol.clone();
461 let match_mode_owned = analysis_params.match_mode.clone();
462 let follow_depth = analysis_params.follow_depth;
463 let max_depth = analysis_params.max_depth;
464 let ast_recursion_limit = analysis_params.ast_recursion_limit;
465 let use_summary = analysis_params.use_summary;
466 let impl_only = analysis_params.impl_only;
467 let handle = tokio::task::spawn_blocking(move || {
468 let params = analyze::AnalyzeSymbolParams {
469 focus: symbol_owned,
470 match_mode: match_mode_owned,
471 follow_depth,
472 max_depth,
473 ast_recursion_limit,
474 use_summary,
475 impl_only,
476 };
477 analyze::analyze_focused_with_progress_with_entries(
478 &path_owned,
479 ¶ms,
480 &counter_clone,
481 &ct_clone,
482 &entries_clone,
483 )
484 });
485
486 let token = ProgressToken(NumberOrString::String(
487 format!(
488 "analyze-symbol-{}",
489 std::time::SystemTime::now()
490 .duration_since(std::time::UNIX_EPOCH)
491 .map(|d| d.as_nanos())
492 .unwrap_or(0)
493 )
494 .into(),
495 ));
496 let peer = self.peer.lock().await.clone();
497 let mut last_progress = 0usize;
498 let mut cancelled = false;
499
500 loop {
501 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
502 if ct.is_cancelled() {
503 cancelled = true;
504 break;
505 }
506 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
507 if current != last_progress && total_files > 0 {
508 self.emit_progress(
509 peer.clone(),
510 &token,
511 current as f64,
512 total_files as f64,
513 format!(
514 "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
515 ),
516 )
517 .await;
518 last_progress = current;
519 }
520 if handle.is_finished() {
521 break;
522 }
523 }
524
525 if !cancelled && total_files > 0 {
526 self.emit_progress(
527 peer.clone(),
528 &token,
529 total_files as f64,
530 total_files as f64,
531 format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
532 )
533 .await;
534 }
535
536 match handle.await {
537 Ok(Ok(output)) => Ok(output),
538 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
539 rmcp::model::ErrorCode::INTERNAL_ERROR,
540 "Analysis cancelled".to_string(),
541 Some(error_meta("transient", true, "analysis was cancelled")),
542 )),
543 Ok(Err(e)) => Err(ErrorData::new(
544 rmcp::model::ErrorCode::INTERNAL_ERROR,
545 format!("Error analyzing symbol: {e}"),
546 Some(error_meta("resource", false, "check symbol name and file")),
547 )),
548 Err(e) => Err(ErrorData::new(
549 rmcp::model::ErrorCode::INTERNAL_ERROR,
550 format!("Task join error: {e}"),
551 Some(error_meta("transient", true, "retry the request")),
552 )),
553 }
554 }
555
556 async fn run_focused_with_auto_summary(
558 &self,
559 params: &AnalyzeSymbolParams,
560 analysis_params: &FocusedAnalysisParams,
561 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
562 ct: tokio_util::sync::CancellationToken,
563 entries: std::sync::Arc<Vec<WalkEntry>>,
564 total_files: usize,
565 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
566 let use_summary_for_task = params.output_control.force != Some(true)
567 && params.output_control.summary == Some(true);
568
569 let analysis_params_initial = FocusedAnalysisParams {
570 use_summary: use_summary_for_task,
571 ..analysis_params.clone()
572 };
573
574 let mut output = self
575 .poll_progress_until_done(
576 &analysis_params_initial,
577 counter.clone(),
578 ct.clone(),
579 entries.clone(),
580 total_files,
581 ¶ms.symbol,
582 )
583 .await?;
584
585 if params.output_control.summary.is_none()
586 && params.output_control.force != Some(true)
587 && output.formatted.len() > SIZE_LIMIT
588 {
589 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
590 let analysis_params_retry = FocusedAnalysisParams {
591 use_summary: true,
592 ..analysis_params.clone()
593 };
594 let summary_result = self
595 .poll_progress_until_done(
596 &analysis_params_retry,
597 counter2,
598 ct,
599 entries,
600 total_files,
601 ¶ms.symbol,
602 )
603 .await;
604
605 if let Ok(summary_output) = summary_result {
606 output.formatted = summary_output.formatted;
607 } else {
608 let estimated_tokens = output.formatted.len() / 4;
609 let message = format!(
610 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
611 output.formatted.len(),
612 estimated_tokens
613 );
614 return Err(ErrorData::new(
615 rmcp::model::ErrorCode::INVALID_PARAMS,
616 message,
617 Some(error_meta(
618 "validation",
619 false,
620 "use summary=true or force=true",
621 )),
622 ));
623 }
624 } else if output.formatted.len() > SIZE_LIMIT
625 && params.output_control.force != Some(true)
626 && params.output_control.summary == Some(false)
627 {
628 let estimated_tokens = output.formatted.len() / 4;
629 let message = format!(
630 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
631 - force=true to return full output\n\
632 - summary=true to get compact summary\n\
633 - Narrow your scope (smaller directory, specific file)",
634 output.formatted.len(),
635 estimated_tokens
636 );
637 return Err(ErrorData::new(
638 rmcp::model::ErrorCode::INVALID_PARAMS,
639 message,
640 Some(error_meta(
641 "validation",
642 false,
643 "use force=true, summary=true, or narrow scope",
644 )),
645 ));
646 }
647
648 Ok(output)
649 }
650
651 #[instrument(skip(self, params, ct))]
655 async fn handle_focused_mode(
656 &self,
657 params: &AnalyzeSymbolParams,
658 ct: tokio_util::sync::CancellationToken,
659 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
660 let path = Path::new(¶ms.path);
661 let entries = match walk_directory(path, params.max_depth) {
662 Ok(e) => e,
663 Err(e) => {
664 return Err(ErrorData::new(
665 rmcp::model::ErrorCode::INTERNAL_ERROR,
666 format!("Failed to walk directory: {e}"),
667 Some(error_meta(
668 "resource",
669 false,
670 "check path permissions and availability",
671 )),
672 ));
673 }
674 };
675 let entries = std::sync::Arc::new(entries);
676
677 if params.impl_only == Some(true) {
678 Self::validate_impl_only(&entries)?;
679 }
680
681 let total_files = entries.iter().filter(|e| !e.is_dir).count();
682 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
683
684 let analysis_params = FocusedAnalysisParams {
685 path: path.to_path_buf(),
686 symbol: params.symbol.clone(),
687 match_mode: params.match_mode.clone().unwrap_or_default(),
688 follow_depth: params.follow_depth.unwrap_or(1),
689 max_depth: params.max_depth,
690 ast_recursion_limit: params.ast_recursion_limit,
691 use_summary: false,
692 impl_only: params.impl_only,
693 };
694
695 let mut output = self
696 .run_focused_with_auto_summary(
697 params,
698 &analysis_params,
699 counter,
700 ct,
701 entries,
702 total_files,
703 )
704 .await?;
705
706 if params.impl_only == Some(true) {
707 let filter_line = format!(
708 "FILTER: impl_only=true ({} of {} callers shown)\n",
709 output.impl_trait_caller_count, output.unfiltered_caller_count
710 );
711 output.formatted = format!("{}{}", filter_line, output.formatted);
712
713 if output.impl_trait_caller_count == 0 {
714 output.formatted.push_str(
715 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
716 );
717 }
718 }
719
720 Ok(output)
721 }
722
723 #[instrument(skip(self, context))]
724 #[tool(
725 name = "analyze_directory",
726 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
727 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
728 annotations(
729 title = "Analyze Directory",
730 read_only_hint = true,
731 destructive_hint = false,
732 idempotent_hint = true,
733 open_world_hint = false
734 )
735 )]
736 async fn analyze_directory(
737 &self,
738 params: Parameters<AnalyzeDirectoryParams>,
739 context: RequestContext<RoleServer>,
740 ) -> Result<CallToolResult, ErrorData> {
741 let params = params.0;
742 let ct = context.ct.clone();
743 let t_start = std::time::Instant::now();
744 let param_path = params.path.clone();
745 let max_depth_val = params.max_depth;
746 let seq = self
747 .session_call_seq
748 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
749 let sid = self.session_id.lock().await.clone();
750
751 let arc_output = match self.handle_overview_mode(¶ms, ct).await {
753 Ok(v) => v,
754 Err(e) => return Ok(err_to_tool_result(e)),
755 };
756 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
759 Ok(owned) => owned,
760 Err(arc) => (*arc).clone(),
761 };
762
763 if summary_cursor_conflict(
766 params.output_control.summary,
767 params.pagination.cursor.as_deref(),
768 ) {
769 return Ok(err_to_tool_result(ErrorData::new(
770 rmcp::model::ErrorCode::INVALID_PARAMS,
771 "summary=true is incompatible with a pagination cursor; use one or the other"
772 .to_string(),
773 Some(error_meta(
774 "validation",
775 false,
776 "remove cursor or set summary=false",
777 )),
778 )));
779 }
780
781 let use_summary = if params.output_control.force == Some(true) {
783 false
784 } else if params.output_control.summary == Some(true) {
785 true
786 } else if params.output_control.summary == Some(false) {
787 false
788 } else {
789 output.formatted.len() > SIZE_LIMIT
790 };
791
792 if use_summary {
793 output.formatted = format_summary(
794 &output.entries,
795 &output.files,
796 params.max_depth,
797 output.subtree_counts.as_deref(),
798 );
799 }
800
801 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
803 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
804 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
805 ErrorData::new(
806 rmcp::model::ErrorCode::INVALID_PARAMS,
807 e.to_string(),
808 Some(error_meta("validation", false, "invalid cursor format")),
809 )
810 }) {
811 Ok(v) => v,
812 Err(e) => return Ok(err_to_tool_result(e)),
813 };
814 cursor_data.offset
815 } else {
816 0
817 };
818
819 let paginated =
821 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
822 Ok(v) => v,
823 Err(e) => {
824 return Ok(err_to_tool_result(ErrorData::new(
825 rmcp::model::ErrorCode::INTERNAL_ERROR,
826 e.to_string(),
827 Some(error_meta("transient", true, "retry the request")),
828 )));
829 }
830 };
831
832 let verbose = params.output_control.verbose.unwrap_or(false);
833 if !use_summary {
834 output.formatted = format_structure_paginated(
835 &paginated.items,
836 paginated.total,
837 params.max_depth,
838 Some(Path::new(¶ms.path)),
839 verbose,
840 );
841 }
842
843 if use_summary {
845 output.next_cursor = None;
846 } else {
847 output.next_cursor.clone_from(&paginated.next_cursor);
848 }
849
850 let mut final_text = output.formatted.clone();
852 if !use_summary && let Some(cursor) = paginated.next_cursor {
853 final_text.push('\n');
854 final_text.push_str("NEXT_CURSOR: ");
855 final_text.push_str(&cursor);
856 }
857
858 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
859 .with_meta(Some(no_cache_meta()));
860 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
861 result.structured_content = Some(structured);
862 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
863 self.metrics_tx.send(crate::metrics::MetricEvent {
864 ts: crate::metrics::unix_ms(),
865 tool: "analyze_directory",
866 duration_ms: dur,
867 output_chars: final_text.len(),
868 param_path_depth: crate::metrics::path_component_count(¶m_path),
869 max_depth: max_depth_val,
870 result: "ok",
871 error_type: None,
872 session_id: sid,
873 seq: Some(seq),
874 });
875 Ok(result)
876 }
877
878 #[instrument(skip(self, _context))]
879 #[tool(
880 name = "analyze_file",
881 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py. The fields parameter limits output to specific sections. Valid values: \"functions\", \"classes\", \"imports\". The FILE header (path, line count, section counts) is always emitted. Omit fields to return all sections. When summary=true, fields is ignored. When fields explicitly lists \"imports\", the imports section is rendered regardless of the verbose flag; in all other cases imports require verbose=true.",
882 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
883 annotations(
884 title = "Analyze File",
885 read_only_hint = true,
886 destructive_hint = false,
887 idempotent_hint = true,
888 open_world_hint = false
889 )
890 )]
891 async fn analyze_file(
892 &self,
893 params: Parameters<AnalyzeFileParams>,
894 _context: RequestContext<RoleServer>,
895 ) -> Result<CallToolResult, ErrorData> {
896 let params = params.0;
897 let t_start = std::time::Instant::now();
898 let param_path = params.path.clone();
899 let seq = self
900 .session_call_seq
901 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
902 let sid = self.session_id.lock().await.clone();
903
904 let arc_output = match self.handle_file_details_mode(¶ms).await {
906 Ok(v) => v,
907 Err(e) => return Ok(err_to_tool_result(e)),
908 };
909
910 let mut formatted = arc_output.formatted.clone();
914 let line_count = arc_output.line_count;
915
916 let use_summary = if params.output_control.force == Some(true) {
918 false
919 } else if params.output_control.summary == Some(true) {
920 true
921 } else if params.output_control.summary == Some(false) {
922 false
923 } else {
924 formatted.len() > SIZE_LIMIT
925 };
926
927 if use_summary {
928 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
929 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
930 let estimated_tokens = formatted.len() / 4;
931 let message = format!(
932 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
933 - force=true to return full output\n\
934 - Narrow your scope (smaller directory, specific file)\n\
935 - Use analyze_symbol mode for targeted analysis\n\
936 - Reduce max_depth parameter",
937 formatted.len(),
938 estimated_tokens
939 );
940 return Ok(err_to_tool_result(ErrorData::new(
941 rmcp::model::ErrorCode::INVALID_PARAMS,
942 message,
943 Some(error_meta(
944 "validation",
945 false,
946 "use force=true or narrow scope",
947 )),
948 )));
949 }
950
951 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
953 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
954 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
955 ErrorData::new(
956 rmcp::model::ErrorCode::INVALID_PARAMS,
957 e.to_string(),
958 Some(error_meta("validation", false, "invalid cursor format")),
959 )
960 }) {
961 Ok(v) => v,
962 Err(e) => return Ok(err_to_tool_result(e)),
963 };
964 cursor_data.offset
965 } else {
966 0
967 };
968
969 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
971 .semantic
972 .functions
973 .iter()
974 .filter(|func| {
975 !arc_output
976 .semantic
977 .classes
978 .iter()
979 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
980 })
981 .cloned()
982 .collect();
983
984 let paginated =
986 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
987 Ok(v) => v,
988 Err(e) => {
989 return Ok(err_to_tool_result(ErrorData::new(
990 rmcp::model::ErrorCode::INTERNAL_ERROR,
991 e.to_string(),
992 Some(error_meta("transient", true, "retry the request")),
993 )));
994 }
995 };
996
997 let verbose = params.output_control.verbose.unwrap_or(false);
999 if !use_summary {
1000 formatted = format_file_details_paginated(
1002 &paginated.items,
1003 paginated.total,
1004 &arc_output.semantic,
1005 ¶ms.path,
1006 line_count,
1007 offset,
1008 verbose,
1009 params.fields.as_deref(),
1010 );
1011 }
1012
1013 let next_cursor = if use_summary {
1015 None
1016 } else {
1017 paginated.next_cursor.clone()
1018 };
1019
1020 let mut final_text = formatted.clone();
1022 if !use_summary && let Some(ref cursor) = next_cursor {
1023 final_text.push('\n');
1024 final_text.push_str("NEXT_CURSOR: ");
1025 final_text.push_str(cursor);
1026 }
1027
1028 let response_output = analyze::FileAnalysisOutput {
1030 formatted,
1031 semantic: arc_output.semantic.clone(),
1032 line_count,
1033 next_cursor,
1034 };
1035
1036 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1037 .with_meta(Some(no_cache_meta()));
1038 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1039 result.structured_content = Some(structured);
1040 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1041 self.metrics_tx.send(crate::metrics::MetricEvent {
1042 ts: crate::metrics::unix_ms(),
1043 tool: "analyze_file",
1044 duration_ms: dur,
1045 output_chars: final_text.len(),
1046 param_path_depth: crate::metrics::path_component_count(¶m_path),
1047 max_depth: None,
1048 result: "ok",
1049 error_type: None,
1050 session_id: sid,
1051 seq: Some(seq),
1052 });
1053 Ok(result)
1054 }
1055
1056 #[instrument(skip(self, context))]
1057 #[tool(
1058 name = "analyze_symbol",
1059 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. impl_only=true: restrict callers to only those from 'impl Trait for Type' blocks (Rust only); returns INVALID_PARAMS for non-Rust directories; emits a FILTER header showing how many callers were retained. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method",
1060 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1061 annotations(
1062 title = "Analyze Symbol",
1063 read_only_hint = true,
1064 destructive_hint = false,
1065 idempotent_hint = true,
1066 open_world_hint = false
1067 )
1068 )]
1069 async fn analyze_symbol(
1070 &self,
1071 params: Parameters<AnalyzeSymbolParams>,
1072 context: RequestContext<RoleServer>,
1073 ) -> Result<CallToolResult, ErrorData> {
1074 let params = params.0;
1075 let ct = context.ct.clone();
1076 let t_start = std::time::Instant::now();
1077 let param_path = params.path.clone();
1078 let max_depth_val = params.follow_depth;
1079 let seq = self
1080 .session_call_seq
1081 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1082 let sid = self.session_id.lock().await.clone();
1083
1084 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1086 Ok(v) => v,
1087 Err(e) => return Ok(err_to_tool_result(e)),
1088 };
1089
1090 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1092 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1093 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1094 ErrorData::new(
1095 rmcp::model::ErrorCode::INVALID_PARAMS,
1096 e.to_string(),
1097 Some(error_meta("validation", false, "invalid cursor format")),
1098 )
1099 }) {
1100 Ok(v) => v,
1101 Err(e) => return Ok(err_to_tool_result(e)),
1102 };
1103 cursor_data.offset
1104 } else {
1105 0
1106 };
1107
1108 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1110 decode_cursor(cursor_str)
1111 .map(|c| c.mode)
1112 .unwrap_or(PaginationMode::Callers)
1113 } else {
1114 PaginationMode::Callers
1115 };
1116
1117 let use_summary = params.output_control.summary == Some(true);
1118 let verbose = params.output_control.verbose.unwrap_or(false);
1119
1120 let mut callee_cursor = match cursor_mode {
1121 PaginationMode::Callers => {
1122 let (paginated_items, paginated_next) = match paginate_focus_chains(
1123 &output.prod_chains,
1124 PaginationMode::Callers,
1125 offset,
1126 page_size,
1127 ) {
1128 Ok(v) => v,
1129 Err(e) => return Ok(err_to_tool_result(e)),
1130 };
1131
1132 if !use_summary
1133 && (paginated_next.is_some()
1134 || offset > 0
1135 || !verbose
1136 || !output.outgoing_chains.is_empty())
1137 {
1138 let base_path = Path::new(¶ms.path);
1139 output.formatted = format_focused_paginated(
1140 &paginated_items,
1141 output.prod_chains.len(),
1142 PaginationMode::Callers,
1143 ¶ms.symbol,
1144 &output.prod_chains,
1145 &output.test_chains,
1146 &output.outgoing_chains,
1147 output.def_count,
1148 offset,
1149 Some(base_path),
1150 verbose,
1151 );
1152 paginated_next
1153 } else {
1154 None
1155 }
1156 }
1157 PaginationMode::Callees => {
1158 let (paginated_items, paginated_next) = match paginate_focus_chains(
1159 &output.outgoing_chains,
1160 PaginationMode::Callees,
1161 offset,
1162 page_size,
1163 ) {
1164 Ok(v) => v,
1165 Err(e) => return Ok(err_to_tool_result(e)),
1166 };
1167
1168 if paginated_next.is_some() || offset > 0 || !verbose {
1169 let base_path = Path::new(¶ms.path);
1170 output.formatted = format_focused_paginated(
1171 &paginated_items,
1172 output.outgoing_chains.len(),
1173 PaginationMode::Callees,
1174 ¶ms.symbol,
1175 &output.prod_chains,
1176 &output.test_chains,
1177 &output.outgoing_chains,
1178 output.def_count,
1179 offset,
1180 Some(base_path),
1181 verbose,
1182 );
1183 paginated_next
1184 } else {
1185 None
1186 }
1187 }
1188 PaginationMode::Default => {
1189 unreachable!("SymbolFocus should only use Callers or Callees modes")
1190 }
1191 };
1192
1193 if callee_cursor.is_none()
1198 && cursor_mode == PaginationMode::Callers
1199 && !output.outgoing_chains.is_empty()
1200 && !use_summary
1201 && let Ok(cursor) = encode_cursor(&CursorData {
1202 mode: PaginationMode::Callees,
1203 offset: 0,
1204 })
1205 {
1206 callee_cursor = Some(cursor);
1207 }
1208
1209 output.next_cursor.clone_from(&callee_cursor);
1211
1212 let mut final_text = output.formatted.clone();
1214 if let Some(cursor) = callee_cursor {
1215 final_text.push('\n');
1216 final_text.push_str("NEXT_CURSOR: ");
1217 final_text.push_str(&cursor);
1218 }
1219
1220 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1221 .with_meta(Some(no_cache_meta()));
1222 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1223 result.structured_content = Some(structured);
1224 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1225 self.metrics_tx.send(crate::metrics::MetricEvent {
1226 ts: crate::metrics::unix_ms(),
1227 tool: "analyze_symbol",
1228 duration_ms: dur,
1229 output_chars: final_text.len(),
1230 param_path_depth: crate::metrics::path_component_count(¶m_path),
1231 max_depth: max_depth_val,
1232 result: "ok",
1233 error_type: None,
1234 session_id: sid,
1235 seq: Some(seq),
1236 });
1237 Ok(result)
1238 }
1239
1240 #[instrument(skip(self, _context))]
1241 #[tool(
1242 name = "analyze_module",
1243 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
1244 output_schema = schema_for_type::<types::ModuleInfo>(),
1245 annotations(
1246 title = "Analyze Module",
1247 read_only_hint = true,
1248 destructive_hint = false,
1249 idempotent_hint = true,
1250 open_world_hint = false
1251 )
1252 )]
1253 async fn analyze_module(
1254 &self,
1255 params: Parameters<AnalyzeModuleParams>,
1256 _context: RequestContext<RoleServer>,
1257 ) -> Result<CallToolResult, ErrorData> {
1258 let params = params.0;
1259 let t_start = std::time::Instant::now();
1260 let param_path = params.path.clone();
1261 let seq = self
1262 .session_call_seq
1263 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1264 let sid = self.session_id.lock().await.clone();
1265
1266 if std::fs::metadata(¶ms.path)
1268 .map(|m| m.is_dir())
1269 .unwrap_or(false)
1270 {
1271 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1272 self.metrics_tx.send(crate::metrics::MetricEvent {
1273 ts: crate::metrics::unix_ms(),
1274 tool: "analyze_module",
1275 duration_ms: dur,
1276 output_chars: 0,
1277 param_path_depth: crate::metrics::path_component_count(¶m_path),
1278 max_depth: None,
1279 result: "error",
1280 error_type: Some("invalid_params".to_string()),
1281 session_id: sid.clone(),
1282 seq: Some(seq),
1283 });
1284 return Ok(err_to_tool_result(ErrorData::new(
1285 rmcp::model::ErrorCode::INVALID_PARAMS,
1286 format!(
1287 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1288 params.path
1289 ),
1290 Some(error_meta(
1291 "validation",
1292 false,
1293 "use analyze_directory for directories",
1294 )),
1295 )));
1296 }
1297
1298 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1299 ErrorData::new(
1300 rmcp::model::ErrorCode::INVALID_PARAMS,
1301 format!("Failed to analyze module: {e}"),
1302 Some(error_meta(
1303 "validation",
1304 false,
1305 "ensure file exists, is readable, and has a supported extension",
1306 )),
1307 )
1308 }) {
1309 Ok(v) => v,
1310 Err(e) => return Ok(err_to_tool_result(e)),
1311 };
1312
1313 let text = format_module_info(&module_info);
1314 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1315 .with_meta(Some(no_cache_meta()));
1316 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1317 ErrorData::new(
1318 rmcp::model::ErrorCode::INTERNAL_ERROR,
1319 format!("serialization failed: {e}"),
1320 Some(error_meta("internal", false, "report this as a bug")),
1321 )
1322 }) {
1323 Ok(v) => v,
1324 Err(e) => return Ok(err_to_tool_result(e)),
1325 };
1326 result.structured_content = Some(structured);
1327 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1328 self.metrics_tx.send(crate::metrics::MetricEvent {
1329 ts: crate::metrics::unix_ms(),
1330 tool: "analyze_module",
1331 duration_ms: dur,
1332 output_chars: text.len(),
1333 param_path_depth: crate::metrics::path_component_count(¶m_path),
1334 max_depth: None,
1335 result: "ok",
1336 error_type: None,
1337 session_id: sid,
1338 seq: Some(seq),
1339 });
1340 Ok(result)
1341 }
1342}
1343
1344#[derive(Clone)]
1346struct FocusedAnalysisParams {
1347 path: std::path::PathBuf,
1348 symbol: String,
1349 match_mode: SymbolMatchMode,
1350 follow_depth: u32,
1351 max_depth: Option<u32>,
1352 ast_recursion_limit: Option<usize>,
1353 use_summary: bool,
1354 impl_only: Option<bool>,
1355}
1356
1357#[tool_handler]
1358impl ServerHandler for CodeAnalyzer {
1359 fn get_info(&self) -> InitializeResult {
1360 let excluded = crate::EXCLUDED_DIRS.join(", ");
1361 let instructions = format!(
1362 "Recommended workflow for unknown repositories:\n\
1363 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1364 (typically the largest directory by file count; exclude {excluded}).\n\
1365 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts. Include test directories (e.g., tests/, testutil/, files matching *_test.go, test_*.py, test_*.rs, *_test.rs, *.spec.ts, *.spec.js) in the module map; test files are valid analysis targets and must not be skipped.\n\
1366 3. For key files identified in step 2, prefer analyze_module to get a lightweight function/import index (~75% smaller output) when you only need function names and imports; call analyze_file when you need signatures, types, or class structure.\n\
1367 4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1368 Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1369 Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1370 );
1371 let capabilities = ServerCapabilities::builder()
1372 .enable_logging()
1373 .enable_tools()
1374 .enable_tool_list_changed()
1375 .enable_completions()
1376 .build();
1377 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1378 .with_title("Code Analyze MCP")
1379 .with_description("MCP server for code structure analysis using tree-sitter");
1380 InitializeResult::new(capabilities)
1381 .with_server_info(server_info)
1382 .with_instructions(&instructions)
1383 }
1384
1385 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1386 let mut peer_lock = self.peer.lock().await;
1387 *peer_lock = Some(context.peer.clone());
1388 drop(peer_lock);
1389
1390 let millis = std::time::SystemTime::now()
1392 .duration_since(std::time::UNIX_EPOCH)
1393 .unwrap_or_default()
1394 .as_millis()
1395 .try_into()
1396 .unwrap_or(u64::MAX);
1397 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1398 let sid = format!("{millis}-{counter}");
1399 {
1400 let mut session_id_lock = self.session_id.lock().await;
1401 *session_id_lock = Some(sid);
1402 }
1403 self.session_call_seq
1404 .store(0, std::sync::atomic::Ordering::Relaxed);
1405
1406 let peer = self.peer.clone();
1408 let event_rx = self.event_rx.clone();
1409
1410 tokio::spawn(async move {
1411 let rx = {
1412 let mut rx_lock = event_rx.lock().await;
1413 rx_lock.take()
1414 };
1415
1416 if let Some(mut receiver) = rx {
1417 let mut buffer = Vec::with_capacity(64);
1418 loop {
1419 receiver.recv_many(&mut buffer, 64).await;
1421
1422 if buffer.is_empty() {
1423 break;
1425 }
1426
1427 let peer_lock = peer.lock().await;
1429 if let Some(peer) = peer_lock.as_ref() {
1430 for log_event in buffer.drain(..) {
1431 let notification = ServerNotification::LoggingMessageNotification(
1432 Notification::new(LoggingMessageNotificationParam {
1433 level: log_event.level,
1434 logger: Some(log_event.logger),
1435 data: log_event.data,
1436 }),
1437 );
1438 if let Err(e) = peer.send_notification(notification).await {
1439 warn!("Failed to send logging notification: {}", e);
1440 }
1441 }
1442 }
1443 }
1444 }
1445 });
1446 }
1447
1448 #[instrument(skip(self, _context))]
1449 async fn on_cancelled(
1450 &self,
1451 notification: CancelledNotificationParam,
1452 _context: NotificationContext<RoleServer>,
1453 ) {
1454 tracing::info!(
1455 request_id = ?notification.request_id,
1456 reason = ?notification.reason,
1457 "Received cancellation notification"
1458 );
1459 }
1460
1461 #[instrument(skip(self, _context))]
1462 async fn complete(
1463 &self,
1464 request: CompleteRequestParams,
1465 _context: RequestContext<RoleServer>,
1466 ) -> Result<CompleteResult, ErrorData> {
1467 let argument_name = &request.argument.name;
1469 let argument_value = &request.argument.value;
1470
1471 let completions = match argument_name.as_str() {
1472 "path" => {
1473 let root = Path::new(".");
1475 completion::path_completions(root, argument_value)
1476 }
1477 "symbol" => {
1478 let path_arg = request
1480 .context
1481 .as_ref()
1482 .and_then(|ctx| ctx.get_argument("path"));
1483
1484 match path_arg {
1485 Some(path_str) => {
1486 let path = Path::new(path_str);
1487 completion::symbol_completions(&self.cache, path, argument_value)
1488 }
1489 None => Vec::new(),
1490 }
1491 }
1492 _ => Vec::new(),
1493 };
1494
1495 let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1497 let (values, has_more) = if completions.len() > 100 {
1498 (completions.into_iter().take(100).collect(), true)
1499 } else {
1500 (completions, false)
1501 };
1502
1503 let completion_info =
1504 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1505 Ok(info) => info,
1506 Err(_) => {
1507 CompletionInfo::with_all_values(Vec::new())
1509 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1510 }
1511 };
1512
1513 Ok(CompleteResult::new(completion_info))
1514 }
1515
1516 async fn set_level(
1517 &self,
1518 params: SetLevelRequestParams,
1519 _context: RequestContext<RoleServer>,
1520 ) -> Result<(), ErrorData> {
1521 let level_filter = match params.level {
1522 LoggingLevel::Debug => LevelFilter::DEBUG,
1523 LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1524 LoggingLevel::Warning => LevelFilter::WARN,
1525 LoggingLevel::Error
1526 | LoggingLevel::Critical
1527 | LoggingLevel::Alert
1528 | LoggingLevel::Emergency => LevelFilter::ERROR,
1529 };
1530
1531 let mut filter_lock = self.log_level_filter.lock().unwrap();
1532 *filter_lock = level_filter;
1533 Ok(())
1534 }
1535}
1536
1537#[cfg(test)]
1538mod tests {
1539 use super::*;
1540
1541 #[tokio::test]
1542 async fn test_emit_progress_none_peer_is_noop() {
1543 let peer = Arc::new(TokioMutex::new(None));
1544 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1545 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1546 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1547 let analyzer = CodeAnalyzer::new(
1548 peer,
1549 log_level_filter,
1550 rx,
1551 crate::metrics::MetricsSender(metrics_tx),
1552 );
1553 let token = ProgressToken(NumberOrString::String("test".into()));
1554 analyzer
1556 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1557 .await;
1558 }
1559
1560 #[tokio::test]
1561 async fn test_handle_overview_mode_verbose_no_summary_block() {
1562 use crate::pagination::{PaginationMode, paginate_slice};
1563 use crate::types::{AnalyzeDirectoryParams, OutputControlParams, PaginationParams};
1564 use tempfile::TempDir;
1565
1566 let tmp = TempDir::new().unwrap();
1567 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1568
1569 let peer = Arc::new(TokioMutex::new(None));
1570 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1571 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1572 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1573 let analyzer = CodeAnalyzer::new(
1574 peer,
1575 log_level_filter,
1576 rx,
1577 crate::metrics::MetricsSender(metrics_tx),
1578 );
1579
1580 let params = AnalyzeDirectoryParams {
1581 path: tmp.path().to_str().unwrap().to_string(),
1582 max_depth: None,
1583 pagination: PaginationParams {
1584 cursor: None,
1585 page_size: None,
1586 },
1587 output_control: OutputControlParams {
1588 summary: None,
1589 force: None,
1590 verbose: Some(true),
1591 },
1592 };
1593
1594 let ct = tokio_util::sync::CancellationToken::new();
1595 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1596
1597 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1600 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1601 let verbose = true;
1602 let formatted = if !use_summary {
1603 format_structure_paginated(
1604 &paginated.items,
1605 paginated.total,
1606 params.max_depth,
1607 Some(std::path::Path::new(¶ms.path)),
1608 verbose,
1609 )
1610 } else {
1611 output.formatted.clone()
1612 };
1613
1614 assert!(
1616 !formatted.contains("SUMMARY:"),
1617 "verbose=true must not emit SUMMARY: block; got: {}",
1618 &formatted[..formatted.len().min(300)]
1619 );
1620 assert!(
1621 formatted.contains("PAGINATED:"),
1622 "verbose=true must emit PAGINATED: header"
1623 );
1624 assert!(
1625 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1626 "verbose=true must emit FILES section header"
1627 );
1628 }
1629}