1pub mod logging;
19pub mod metrics;
20
21pub use code_analyze_core::analyze;
22use code_analyze_core::{cache, completion, graph, traversal, types};
23
24pub(crate) const EXCLUDED_DIRS: &[&str] = &[
25 "node_modules",
26 "vendor",
27 ".git",
28 "__pycache__",
29 "target",
30 "dist",
31 "build",
32 ".venv",
33];
34
35use code_analyze_core::cache::AnalysisCache;
36use code_analyze_core::formatter::{
37 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
38 format_module_info, format_structure_paginated, format_summary,
39};
40use code_analyze_core::pagination::{
41 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
42};
43use code_analyze_core::traversal::{WalkEntry, walk_directory};
44use code_analyze_core::types::{
45 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
46 AnalyzeSymbolParams, SymbolMatchMode,
47};
48use logging::LogEvent;
49use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
50use rmcp::handler::server::wrapper::Parameters;
51use rmcp::model::{
52 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
53 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
54 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
55 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
56};
57use rmcp::service::{NotificationContext, RequestContext};
58use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
59use serde_json::Value;
60use std::path::Path;
61use std::sync::{Arc, Mutex};
62use tokio::sync::{Mutex as TokioMutex, mpsc};
63use tracing::{instrument, warn};
64use tracing_subscriber::filter::LevelFilter;
65
66static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
67
68const SIZE_LIMIT: usize = 50_000;
69
70#[must_use]
73pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
74 summary == Some(true) && cursor.is_some()
75}
76
77#[must_use]
78fn error_meta(
79 category: &'static str,
80 is_retryable: bool,
81 suggested_action: &'static str,
82) -> serde_json::Value {
83 serde_json::json!({
84 "errorCategory": category,
85 "isRetryable": is_retryable,
86 "suggestedAction": suggested_action,
87 })
88}
89
90#[must_use]
91fn err_to_tool_result(e: ErrorData) -> CallToolResult {
92 CallToolResult::error(vec![Content::text(e.message)])
93}
94
95fn no_cache_meta() -> Meta {
96 let mut m = serde_json::Map::new();
97 m.insert(
98 "cache_hint".to_string(),
99 serde_json::Value::String("no-cache".to_string()),
100 );
101 Meta(m)
102}
103
104fn paginate_focus_chains(
107 chains: &[graph::InternalCallChain],
108 mode: PaginationMode,
109 offset: usize,
110 page_size: usize,
111) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
112 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
113 ErrorData::new(
114 rmcp::model::ErrorCode::INTERNAL_ERROR,
115 e.to_string(),
116 Some(error_meta("transient", true, "retry the request")),
117 )
118 })?;
119
120 if paginated.next_cursor.is_none() && offset == 0 {
121 return Ok((paginated.items, None));
122 }
123
124 let next = if let Some(raw_cursor) = paginated.next_cursor {
125 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
126 ErrorData::new(
127 rmcp::model::ErrorCode::INVALID_PARAMS,
128 e.to_string(),
129 Some(error_meta("validation", false, "invalid cursor format")),
130 )
131 })?;
132 Some(
133 encode_cursor(&CursorData {
134 mode,
135 offset: decoded.offset,
136 })
137 .map_err(|e| {
138 ErrorData::new(
139 rmcp::model::ErrorCode::INVALID_PARAMS,
140 e.to_string(),
141 Some(error_meta("validation", false, "invalid cursor format")),
142 )
143 })?,
144 )
145 } else {
146 None
147 };
148
149 Ok((paginated.items, next))
150}
151
152#[derive(Clone)]
157pub struct CodeAnalyzer {
158 #[allow(dead_code)]
161 tool_router: ToolRouter<Self>,
162 cache: AnalysisCache,
163 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
164 log_level_filter: Arc<Mutex<LevelFilter>>,
165 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
166 metrics_tx: crate::metrics::MetricsSender,
167 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
168 session_id: Arc<TokioMutex<Option<String>>>,
169}
170
171#[tool_router]
172impl CodeAnalyzer {
173 #[must_use]
174 pub fn list_tools() -> Vec<rmcp::model::Tool> {
175 Self::tool_router().list_all()
176 }
177
178 pub fn new(
179 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
180 log_level_filter: Arc<Mutex<LevelFilter>>,
181 event_rx: mpsc::UnboundedReceiver<LogEvent>,
182 metrics_tx: crate::metrics::MetricsSender,
183 ) -> Self {
184 CodeAnalyzer {
185 tool_router: Self::tool_router(),
186 cache: AnalysisCache::new(100),
187 peer,
188 log_level_filter,
189 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
190 metrics_tx,
191 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
192 session_id: Arc::new(TokioMutex::new(None)),
193 }
194 }
195
196 #[instrument(skip(self))]
197 async fn emit_progress(
198 &self,
199 peer: Option<Peer<RoleServer>>,
200 token: &ProgressToken,
201 progress: f64,
202 total: f64,
203 message: String,
204 ) {
205 if let Some(peer) = peer {
206 let notification = ServerNotification::ProgressNotification(Notification::new(
207 ProgressNotificationParam {
208 progress_token: token.clone(),
209 progress,
210 total: Some(total),
211 message: Some(message),
212 },
213 ));
214 if let Err(e) = peer.send_notification(notification).await {
215 warn!("Failed to send progress notification: {}", e);
216 }
217 }
218 }
219
220 #[allow(clippy::too_many_lines)] #[allow(clippy::cast_precision_loss)] #[instrument(skip(self, params, ct))]
226 async fn handle_overview_mode(
227 &self,
228 params: &AnalyzeDirectoryParams,
229 ct: tokio_util::sync::CancellationToken,
230 ) -> Result<std::sync::Arc<analyze::AnalysisOutput>, ErrorData> {
231 let path = Path::new(¶ms.path);
232 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
233 let counter_clone = counter.clone();
234 let path_owned = path.to_path_buf();
235 let max_depth = params.max_depth;
236 let ct_clone = ct.clone();
237
238 let all_entries = walk_directory(path, None).map_err(|e| {
240 ErrorData::new(
241 rmcp::model::ErrorCode::INTERNAL_ERROR,
242 format!("Failed to walk directory: {e}"),
243 Some(error_meta(
244 "resource",
245 false,
246 "check path permissions and availability",
247 )),
248 )
249 })?;
250
251 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
253
254 let cache_key = cache::DirectoryCacheKey::from_entries(
256 &all_entries,
257 canonical_max_depth,
258 AnalysisMode::Overview,
259 );
260
261 if let Some(cached) = self.cache.get_directory(&cache_key) {
263 return Ok(cached);
264 }
265
266 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
268 Some(traversal::subtree_counts_from_entries(path, &all_entries))
269 } else {
270 None
271 };
272
273 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
275 && depth > 0
276 {
277 all_entries
278 .into_iter()
279 .filter(|e| e.depth <= depth as usize)
280 .collect()
281 } else {
282 all_entries
283 };
284
285 let total_files = entries.iter().filter(|e| !e.is_dir).count();
287
288 let handle = tokio::task::spawn_blocking(move || {
290 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
291 });
292
293 let token = ProgressToken(NumberOrString::String(
295 format!(
296 "analyze-overview-{}",
297 std::time::SystemTime::now()
298 .duration_since(std::time::UNIX_EPOCH)
299 .map(|d| d.as_nanos())
300 .unwrap_or(0)
301 )
302 .into(),
303 ));
304 let peer = self.peer.lock().await.clone();
305 let mut last_progress = 0usize;
306 let mut cancelled = false;
307 loop {
308 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
309 if ct.is_cancelled() {
310 cancelled = true;
311 break;
312 }
313 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
314 if current != last_progress && total_files > 0 {
315 self.emit_progress(
316 peer.clone(),
317 &token,
318 current as f64,
319 total_files as f64,
320 format!("Analyzing {current}/{total_files} files"),
321 )
322 .await;
323 last_progress = current;
324 }
325 if handle.is_finished() {
326 break;
327 }
328 }
329
330 if !cancelled && total_files > 0 {
332 self.emit_progress(
333 peer.clone(),
334 &token,
335 total_files as f64,
336 total_files as f64,
337 format!("Completed analyzing {total_files} files"),
338 )
339 .await;
340 }
341
342 match handle.await {
343 Ok(Ok(mut output)) => {
344 output.subtree_counts = subtree_counts;
345 let arc_output = std::sync::Arc::new(output);
346 self.cache.put_directory(cache_key, arc_output.clone());
347 Ok(arc_output)
348 }
349 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
350 rmcp::model::ErrorCode::INTERNAL_ERROR,
351 "Analysis cancelled".to_string(),
352 Some(error_meta("transient", true, "analysis was cancelled")),
353 )),
354 Ok(Err(e)) => Err(ErrorData::new(
355 rmcp::model::ErrorCode::INTERNAL_ERROR,
356 format!("Error analyzing directory: {e}"),
357 Some(error_meta(
358 "resource",
359 false,
360 "check path and file permissions",
361 )),
362 )),
363 Err(e) => Err(ErrorData::new(
364 rmcp::model::ErrorCode::INTERNAL_ERROR,
365 format!("Task join error: {e}"),
366 Some(error_meta("transient", true, "retry the request")),
367 )),
368 }
369 }
370
371 #[instrument(skip(self, params))]
374 async fn handle_file_details_mode(
375 &self,
376 params: &AnalyzeFileParams,
377 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
378 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
380 meta.modified().ok().map(|mtime| cache::CacheKey {
381 path: std::path::PathBuf::from(¶ms.path),
382 modified: mtime,
383 mode: AnalysisMode::FileDetails,
384 })
385 });
386
387 if let Some(ref key) = cache_key
389 && let Some(cached) = self.cache.get(key)
390 {
391 return Ok(cached);
392 }
393
394 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
396 Ok(output) => {
397 let arc_output = std::sync::Arc::new(output);
398 if let Some(key) = cache_key {
399 self.cache.put(key, arc_output.clone());
400 }
401 Ok(arc_output)
402 }
403 Err(e) => Err(ErrorData::new(
404 rmcp::model::ErrorCode::INTERNAL_ERROR,
405 format!("Error analyzing file: {e}"),
406 Some(error_meta(
407 "resource",
408 false,
409 "check file path and permissions",
410 )),
411 )),
412 }
413 }
414
415 fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
417 let has_rust = entries.iter().any(|e| {
418 !e.is_dir
419 && e.path
420 .extension()
421 .and_then(|x: &std::ffi::OsStr| x.to_str())
422 == Some("rs")
423 });
424
425 if !has_rust {
426 return Err(ErrorData::new(
427 rmcp::model::ErrorCode::INVALID_PARAMS,
428 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
429 Some(error_meta(
430 "validation",
431 false,
432 "remove impl_only or point to a directory containing .rs files",
433 )),
434 ));
435 }
436 Ok(())
437 }
438
439 #[allow(clippy::cast_precision_loss)] async fn poll_progress_until_done(
442 &self,
443 analysis_params: &FocusedAnalysisParams,
444 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
445 ct: tokio_util::sync::CancellationToken,
446 entries: std::sync::Arc<Vec<WalkEntry>>,
447 total_files: usize,
448 symbol_display: &str,
449 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
450 let counter_clone = counter.clone();
451 let ct_clone = ct.clone();
452 let entries_clone = std::sync::Arc::clone(&entries);
453 let path_owned = analysis_params.path.clone();
454 let symbol_owned = analysis_params.symbol.clone();
455 let match_mode_owned = analysis_params.match_mode.clone();
456 let follow_depth = analysis_params.follow_depth;
457 let max_depth = analysis_params.max_depth;
458 let ast_recursion_limit = analysis_params.ast_recursion_limit;
459 let use_summary = analysis_params.use_summary;
460 let impl_only = analysis_params.impl_only;
461 let handle = tokio::task::spawn_blocking(move || {
462 let params = analyze::FocusedAnalysisConfig {
463 focus: symbol_owned,
464 match_mode: match_mode_owned,
465 follow_depth,
466 max_depth,
467 ast_recursion_limit,
468 use_summary,
469 impl_only,
470 };
471 analyze::analyze_focused_with_progress_with_entries(
472 &path_owned,
473 ¶ms,
474 &counter_clone,
475 &ct_clone,
476 &entries_clone,
477 )
478 });
479
480 let token = ProgressToken(NumberOrString::String(
481 format!(
482 "analyze-symbol-{}",
483 std::time::SystemTime::now()
484 .duration_since(std::time::UNIX_EPOCH)
485 .map(|d| d.as_nanos())
486 .unwrap_or(0)
487 )
488 .into(),
489 ));
490 let peer = self.peer.lock().await.clone();
491 let mut last_progress = 0usize;
492 let mut cancelled = false;
493
494 loop {
495 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
496 if ct.is_cancelled() {
497 cancelled = true;
498 break;
499 }
500 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
501 if current != last_progress && total_files > 0 {
502 self.emit_progress(
503 peer.clone(),
504 &token,
505 current as f64,
506 total_files as f64,
507 format!(
508 "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
509 ),
510 )
511 .await;
512 last_progress = current;
513 }
514 if handle.is_finished() {
515 break;
516 }
517 }
518
519 if !cancelled && total_files > 0 {
520 self.emit_progress(
521 peer.clone(),
522 &token,
523 total_files as f64,
524 total_files as f64,
525 format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
526 )
527 .await;
528 }
529
530 match handle.await {
531 Ok(Ok(output)) => Ok(output),
532 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
533 rmcp::model::ErrorCode::INTERNAL_ERROR,
534 "Analysis cancelled".to_string(),
535 Some(error_meta("transient", true, "analysis was cancelled")),
536 )),
537 Ok(Err(e)) => Err(ErrorData::new(
538 rmcp::model::ErrorCode::INTERNAL_ERROR,
539 format!("Error analyzing symbol: {e}"),
540 Some(error_meta("resource", false, "check symbol name and file")),
541 )),
542 Err(e) => Err(ErrorData::new(
543 rmcp::model::ErrorCode::INTERNAL_ERROR,
544 format!("Task join error: {e}"),
545 Some(error_meta("transient", true, "retry the request")),
546 )),
547 }
548 }
549
550 async fn run_focused_with_auto_summary(
552 &self,
553 params: &AnalyzeSymbolParams,
554 analysis_params: &FocusedAnalysisParams,
555 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
556 ct: tokio_util::sync::CancellationToken,
557 entries: std::sync::Arc<Vec<WalkEntry>>,
558 total_files: usize,
559 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
560 let use_summary_for_task = params.output_control.force != Some(true)
561 && params.output_control.summary == Some(true);
562
563 let analysis_params_initial = FocusedAnalysisParams {
564 use_summary: use_summary_for_task,
565 ..analysis_params.clone()
566 };
567
568 let mut output = self
569 .poll_progress_until_done(
570 &analysis_params_initial,
571 counter.clone(),
572 ct.clone(),
573 entries.clone(),
574 total_files,
575 ¶ms.symbol,
576 )
577 .await?;
578
579 if params.output_control.summary.is_none()
580 && params.output_control.force != Some(true)
581 && output.formatted.len() > SIZE_LIMIT
582 {
583 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
584 let analysis_params_retry = FocusedAnalysisParams {
585 use_summary: true,
586 ..analysis_params.clone()
587 };
588 let summary_result = self
589 .poll_progress_until_done(
590 &analysis_params_retry,
591 counter2,
592 ct,
593 entries,
594 total_files,
595 ¶ms.symbol,
596 )
597 .await;
598
599 if let Ok(summary_output) = summary_result {
600 output.formatted = summary_output.formatted;
601 } else {
602 let estimated_tokens = output.formatted.len() / 4;
603 let message = format!(
604 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
605 output.formatted.len(),
606 estimated_tokens
607 );
608 return Err(ErrorData::new(
609 rmcp::model::ErrorCode::INVALID_PARAMS,
610 message,
611 Some(error_meta(
612 "validation",
613 false,
614 "use summary=true or force=true",
615 )),
616 ));
617 }
618 } else if output.formatted.len() > SIZE_LIMIT
619 && params.output_control.force != Some(true)
620 && params.output_control.summary == Some(false)
621 {
622 let estimated_tokens = output.formatted.len() / 4;
623 let message = format!(
624 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
625 - force=true to return full output\n\
626 - summary=true to get compact summary\n\
627 - Narrow your scope (smaller directory, specific file)",
628 output.formatted.len(),
629 estimated_tokens
630 );
631 return Err(ErrorData::new(
632 rmcp::model::ErrorCode::INVALID_PARAMS,
633 message,
634 Some(error_meta(
635 "validation",
636 false,
637 "use force=true, summary=true, or narrow scope",
638 )),
639 ));
640 }
641
642 Ok(output)
643 }
644
645 #[instrument(skip(self, params, ct))]
649 async fn handle_focused_mode(
650 &self,
651 params: &AnalyzeSymbolParams,
652 ct: tokio_util::sync::CancellationToken,
653 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
654 let path = Path::new(¶ms.path);
655 let entries = match walk_directory(path, params.max_depth) {
656 Ok(e) => e,
657 Err(e) => {
658 return Err(ErrorData::new(
659 rmcp::model::ErrorCode::INTERNAL_ERROR,
660 format!("Failed to walk directory: {e}"),
661 Some(error_meta(
662 "resource",
663 false,
664 "check path permissions and availability",
665 )),
666 ));
667 }
668 };
669 let entries = std::sync::Arc::new(entries);
670
671 if params.impl_only == Some(true) {
672 Self::validate_impl_only(&entries)?;
673 }
674
675 let total_files = entries.iter().filter(|e| !e.is_dir).count();
676 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
677
678 let analysis_params = FocusedAnalysisParams {
679 path: path.to_path_buf(),
680 symbol: params.symbol.clone(),
681 match_mode: params.match_mode.clone().unwrap_or_default(),
682 follow_depth: params.follow_depth.unwrap_or(1),
683 max_depth: params.max_depth,
684 ast_recursion_limit: params.ast_recursion_limit,
685 use_summary: false,
686 impl_only: params.impl_only,
687 };
688
689 let mut output = self
690 .run_focused_with_auto_summary(
691 params,
692 &analysis_params,
693 counter,
694 ct,
695 entries,
696 total_files,
697 )
698 .await?;
699
700 if params.impl_only == Some(true) {
701 let filter_line = format!(
702 "FILTER: impl_only=true ({} of {} callers shown)\n",
703 output.impl_trait_caller_count, output.unfiltered_caller_count
704 );
705 output.formatted = format!("{}{}", filter_line, output.formatted);
706
707 if output.impl_trait_caller_count == 0 {
708 output.formatted.push_str(
709 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
710 );
711 }
712 }
713
714 Ok(output)
715 }
716
717 #[instrument(skip(self, context))]
718 #[tool(
719 name = "analyze_directory",
720 description = "Tree-view of directory with LOC, function/class counts, test markers. Respects .gitignore. For 1000+ files, use max_depth=2-3 and summary=true. Empty directories return zero counts. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they?",
721 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
722 annotations(
723 title = "Analyze Directory",
724 read_only_hint = true,
725 destructive_hint = false,
726 idempotent_hint = true,
727 open_world_hint = false
728 )
729 )]
730 async fn analyze_directory(
731 &self,
732 params: Parameters<AnalyzeDirectoryParams>,
733 context: RequestContext<RoleServer>,
734 ) -> Result<CallToolResult, ErrorData> {
735 let params = params.0;
736 let ct = context.ct.clone();
737 let t_start = std::time::Instant::now();
738 let param_path = params.path.clone();
739 let max_depth_val = params.max_depth;
740 let seq = self
741 .session_call_seq
742 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
743 let sid = self.session_id.lock().await.clone();
744
745 let arc_output = match self.handle_overview_mode(¶ms, ct).await {
747 Ok(v) => v,
748 Err(e) => return Ok(err_to_tool_result(e)),
749 };
750 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
753 Ok(owned) => owned,
754 Err(arc) => (*arc).clone(),
755 };
756
757 if summary_cursor_conflict(
760 params.output_control.summary,
761 params.pagination.cursor.as_deref(),
762 ) {
763 return Ok(err_to_tool_result(ErrorData::new(
764 rmcp::model::ErrorCode::INVALID_PARAMS,
765 "summary=true is incompatible with a pagination cursor; use one or the other"
766 .to_string(),
767 Some(error_meta(
768 "validation",
769 false,
770 "remove cursor or set summary=false",
771 )),
772 )));
773 }
774
775 let use_summary = if params.output_control.force == Some(true) {
777 false
778 } else if params.output_control.summary == Some(true) {
779 true
780 } else if params.output_control.summary == Some(false) {
781 false
782 } else {
783 output.formatted.len() > SIZE_LIMIT
784 };
785
786 if use_summary {
787 output.formatted = format_summary(
788 &output.entries,
789 &output.files,
790 params.max_depth,
791 output.subtree_counts.as_deref(),
792 );
793 }
794
795 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
797 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
798 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
799 ErrorData::new(
800 rmcp::model::ErrorCode::INVALID_PARAMS,
801 e.to_string(),
802 Some(error_meta("validation", false, "invalid cursor format")),
803 )
804 }) {
805 Ok(v) => v,
806 Err(e) => return Ok(err_to_tool_result(e)),
807 };
808 cursor_data.offset
809 } else {
810 0
811 };
812
813 let paginated =
815 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
816 Ok(v) => v,
817 Err(e) => {
818 return Ok(err_to_tool_result(ErrorData::new(
819 rmcp::model::ErrorCode::INTERNAL_ERROR,
820 e.to_string(),
821 Some(error_meta("transient", true, "retry the request")),
822 )));
823 }
824 };
825
826 let verbose = params.output_control.verbose.unwrap_or(false);
827 if !use_summary {
828 output.formatted = format_structure_paginated(
829 &paginated.items,
830 paginated.total,
831 params.max_depth,
832 Some(Path::new(¶ms.path)),
833 verbose,
834 );
835 }
836
837 if use_summary {
839 output.next_cursor = None;
840 } else {
841 output.next_cursor.clone_from(&paginated.next_cursor);
842 }
843
844 let mut final_text = output.formatted.clone();
846 if !use_summary && let Some(cursor) = paginated.next_cursor {
847 final_text.push('\n');
848 final_text.push_str("NEXT_CURSOR: ");
849 final_text.push_str(&cursor);
850 }
851
852 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
853 .with_meta(Some(no_cache_meta()));
854 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
855 result.structured_content = Some(structured);
856 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
857 self.metrics_tx.send(crate::metrics::MetricEvent {
858 ts: crate::metrics::unix_ms(),
859 tool: "analyze_directory",
860 duration_ms: dur,
861 output_chars: final_text.len(),
862 param_path_depth: crate::metrics::path_component_count(¶m_path),
863 max_depth: max_depth_val,
864 result: "ok",
865 error_type: None,
866 session_id: sid,
867 seq: Some(seq),
868 });
869 Ok(result)
870 }
871
872 #[instrument(skip(self, _context))]
873 #[tool(
874 name = "analyze_file",
875 description = "Functions, types, classes, and imports from a single source file; use analyze_directory for directories. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Passing a directory path returns an error. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py.",
876 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
877 annotations(
878 title = "Analyze File",
879 read_only_hint = true,
880 destructive_hint = false,
881 idempotent_hint = true,
882 open_world_hint = false
883 )
884 )]
885 async fn analyze_file(
886 &self,
887 params: Parameters<AnalyzeFileParams>,
888 _context: RequestContext<RoleServer>,
889 ) -> Result<CallToolResult, ErrorData> {
890 let params = params.0;
891 let t_start = std::time::Instant::now();
892 let param_path = params.path.clone();
893 let seq = self
894 .session_call_seq
895 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
896 let sid = self.session_id.lock().await.clone();
897
898 let arc_output = match self.handle_file_details_mode(¶ms).await {
900 Ok(v) => v,
901 Err(e) => return Ok(err_to_tool_result(e)),
902 };
903
904 let mut formatted = arc_output.formatted.clone();
908 let line_count = arc_output.line_count;
909
910 let use_summary = if params.output_control.force == Some(true) {
912 false
913 } else if params.output_control.summary == Some(true) {
914 true
915 } else if params.output_control.summary == Some(false) {
916 false
917 } else {
918 formatted.len() > SIZE_LIMIT
919 };
920
921 if use_summary {
922 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
923 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
924 let estimated_tokens = formatted.len() / 4;
925 let message = format!(
926 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
927 - force=true to return full output\n\
928 - Use fields to limit output to specific sections (functions, classes, or imports)\n\
929 - Use summary=true for a compact overview",
930 formatted.len(),
931 estimated_tokens
932 );
933 return Ok(err_to_tool_result(ErrorData::new(
934 rmcp::model::ErrorCode::INVALID_PARAMS,
935 message,
936 Some(error_meta(
937 "validation",
938 false,
939 "use force=true, fields, or summary=true",
940 )),
941 )));
942 }
943
944 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
946 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
947 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
948 ErrorData::new(
949 rmcp::model::ErrorCode::INVALID_PARAMS,
950 e.to_string(),
951 Some(error_meta("validation", false, "invalid cursor format")),
952 )
953 }) {
954 Ok(v) => v,
955 Err(e) => return Ok(err_to_tool_result(e)),
956 };
957 cursor_data.offset
958 } else {
959 0
960 };
961
962 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
964 .semantic
965 .functions
966 .iter()
967 .filter(|func| {
968 !arc_output
969 .semantic
970 .classes
971 .iter()
972 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
973 })
974 .cloned()
975 .collect();
976
977 let paginated =
979 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
980 Ok(v) => v,
981 Err(e) => {
982 return Ok(err_to_tool_result(ErrorData::new(
983 rmcp::model::ErrorCode::INTERNAL_ERROR,
984 e.to_string(),
985 Some(error_meta("transient", true, "retry the request")),
986 )));
987 }
988 };
989
990 let verbose = params.output_control.verbose.unwrap_or(false);
992 if !use_summary {
993 formatted = format_file_details_paginated(
995 &paginated.items,
996 paginated.total,
997 &arc_output.semantic,
998 ¶ms.path,
999 line_count,
1000 offset,
1001 verbose,
1002 params.fields.as_deref(),
1003 );
1004 }
1005
1006 let next_cursor = if use_summary {
1008 None
1009 } else {
1010 paginated.next_cursor.clone()
1011 };
1012
1013 let mut final_text = formatted.clone();
1015 if !use_summary && let Some(ref cursor) = next_cursor {
1016 final_text.push('\n');
1017 final_text.push_str("NEXT_CURSOR: ");
1018 final_text.push_str(cursor);
1019 }
1020
1021 let response_output = analyze::FileAnalysisOutput::new(
1023 formatted,
1024 arc_output.semantic.clone(),
1025 line_count,
1026 next_cursor,
1027 );
1028
1029 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1030 .with_meta(Some(no_cache_meta()));
1031 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1032 result.structured_content = Some(structured);
1033 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1034 self.metrics_tx.send(crate::metrics::MetricEvent {
1035 ts: crate::metrics::unix_ms(),
1036 tool: "analyze_file",
1037 duration_ms: dur,
1038 output_chars: final_text.len(),
1039 param_path_depth: crate::metrics::path_component_count(¶m_path),
1040 max_depth: None,
1041 result: "ok",
1042 error_type: None,
1043 session_id: sid,
1044 seq: Some(seq),
1045 });
1046 Ok(result)
1047 }
1048
1049 #[instrument(skip(self, context))]
1050 #[tool(
1051 name = "analyze_symbol",
1052 description = "Call graph for a named function/method across all files in a directory to trace usage. Returns direct callers and callees. Unknown symbols return error; symbols with no callers/callees return empty chains. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method",
1053 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1054 annotations(
1055 title = "Analyze Symbol",
1056 read_only_hint = true,
1057 destructive_hint = false,
1058 idempotent_hint = true,
1059 open_world_hint = false
1060 )
1061 )]
1062 async fn analyze_symbol(
1063 &self,
1064 params: Parameters<AnalyzeSymbolParams>,
1065 context: RequestContext<RoleServer>,
1066 ) -> Result<CallToolResult, ErrorData> {
1067 let params = params.0;
1068 let ct = context.ct.clone();
1069 let t_start = std::time::Instant::now();
1070 let param_path = params.path.clone();
1071 let max_depth_val = params.follow_depth;
1072 let seq = self
1073 .session_call_seq
1074 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1075 let sid = self.session_id.lock().await.clone();
1076
1077 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1079 Ok(v) => v,
1080 Err(e) => return Ok(err_to_tool_result(e)),
1081 };
1082
1083 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1085 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1086 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1087 ErrorData::new(
1088 rmcp::model::ErrorCode::INVALID_PARAMS,
1089 e.to_string(),
1090 Some(error_meta("validation", false, "invalid cursor format")),
1091 )
1092 }) {
1093 Ok(v) => v,
1094 Err(e) => return Ok(err_to_tool_result(e)),
1095 };
1096 cursor_data.offset
1097 } else {
1098 0
1099 };
1100
1101 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1103 decode_cursor(cursor_str)
1104 .map(|c| c.mode)
1105 .unwrap_or(PaginationMode::Callers)
1106 } else {
1107 PaginationMode::Callers
1108 };
1109
1110 let use_summary = params.output_control.summary == Some(true);
1111 let verbose = params.output_control.verbose.unwrap_or(false);
1112
1113 let mut callee_cursor = match cursor_mode {
1114 PaginationMode::Callers => {
1115 let (paginated_items, paginated_next) = match paginate_focus_chains(
1116 &output.prod_chains,
1117 PaginationMode::Callers,
1118 offset,
1119 page_size,
1120 ) {
1121 Ok(v) => v,
1122 Err(e) => return Ok(err_to_tool_result(e)),
1123 };
1124
1125 if !use_summary
1126 && (paginated_next.is_some()
1127 || offset > 0
1128 || !verbose
1129 || !output.outgoing_chains.is_empty())
1130 {
1131 let base_path = Path::new(¶ms.path);
1132 output.formatted = format_focused_paginated(
1133 &paginated_items,
1134 output.prod_chains.len(),
1135 PaginationMode::Callers,
1136 ¶ms.symbol,
1137 &output.prod_chains,
1138 &output.test_chains,
1139 &output.outgoing_chains,
1140 output.def_count,
1141 offset,
1142 Some(base_path),
1143 verbose,
1144 );
1145 paginated_next
1146 } else {
1147 None
1148 }
1149 }
1150 PaginationMode::Callees => {
1151 let (paginated_items, paginated_next) = match paginate_focus_chains(
1152 &output.outgoing_chains,
1153 PaginationMode::Callees,
1154 offset,
1155 page_size,
1156 ) {
1157 Ok(v) => v,
1158 Err(e) => return Ok(err_to_tool_result(e)),
1159 };
1160
1161 if paginated_next.is_some() || offset > 0 || !verbose {
1162 let base_path = Path::new(¶ms.path);
1163 output.formatted = format_focused_paginated(
1164 &paginated_items,
1165 output.outgoing_chains.len(),
1166 PaginationMode::Callees,
1167 ¶ms.symbol,
1168 &output.prod_chains,
1169 &output.test_chains,
1170 &output.outgoing_chains,
1171 output.def_count,
1172 offset,
1173 Some(base_path),
1174 verbose,
1175 );
1176 paginated_next
1177 } else {
1178 None
1179 }
1180 }
1181 PaginationMode::Default => {
1182 unreachable!("SymbolFocus should only use Callers or Callees modes")
1183 }
1184 };
1185
1186 if callee_cursor.is_none()
1191 && cursor_mode == PaginationMode::Callers
1192 && !output.outgoing_chains.is_empty()
1193 && !use_summary
1194 && let Ok(cursor) = encode_cursor(&CursorData {
1195 mode: PaginationMode::Callees,
1196 offset: 0,
1197 })
1198 {
1199 callee_cursor = Some(cursor);
1200 }
1201
1202 output.next_cursor.clone_from(&callee_cursor);
1204
1205 let mut final_text = output.formatted.clone();
1207 if let Some(cursor) = callee_cursor {
1208 final_text.push('\n');
1209 final_text.push_str("NEXT_CURSOR: ");
1210 final_text.push_str(&cursor);
1211 }
1212
1213 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1214 .with_meta(Some(no_cache_meta()));
1215 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1216 result.structured_content = Some(structured);
1217 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1218 self.metrics_tx.send(crate::metrics::MetricEvent {
1219 ts: crate::metrics::unix_ms(),
1220 tool: "analyze_symbol",
1221 duration_ms: dur,
1222 output_chars: final_text.len(),
1223 param_path_depth: crate::metrics::path_component_count(¶m_path),
1224 max_depth: max_depth_val,
1225 result: "ok",
1226 error_type: None,
1227 session_id: sid,
1228 seq: Some(seq),
1229 });
1230 Ok(result)
1231 }
1232
1233 #[instrument(skip(self, _context))]
1234 #[tool(
1235 name = "analyze_module",
1236 description = "Function and import index for a single source file with minimal token cost: name, line_count, language, function names with line numbers, import list only (~75% smaller than analyze_file). Use analyze_file when you need signatures, types, or class details. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Pagination, summary, force, and verbose not supported. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs.",
1237 output_schema = schema_for_type::<types::ModuleInfo>(),
1238 annotations(
1239 title = "Analyze Module",
1240 read_only_hint = true,
1241 destructive_hint = false,
1242 idempotent_hint = true,
1243 open_world_hint = false
1244 )
1245 )]
1246 async fn analyze_module(
1247 &self,
1248 params: Parameters<AnalyzeModuleParams>,
1249 _context: RequestContext<RoleServer>,
1250 ) -> Result<CallToolResult, ErrorData> {
1251 let params = params.0;
1252 let t_start = std::time::Instant::now();
1253 let param_path = params.path.clone();
1254 let seq = self
1255 .session_call_seq
1256 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1257 let sid = self.session_id.lock().await.clone();
1258
1259 if std::fs::metadata(¶ms.path)
1261 .map(|m| m.is_dir())
1262 .unwrap_or(false)
1263 {
1264 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1265 self.metrics_tx.send(crate::metrics::MetricEvent {
1266 ts: crate::metrics::unix_ms(),
1267 tool: "analyze_module",
1268 duration_ms: dur,
1269 output_chars: 0,
1270 param_path_depth: crate::metrics::path_component_count(¶m_path),
1271 max_depth: None,
1272 result: "error",
1273 error_type: Some("invalid_params".to_string()),
1274 session_id: sid.clone(),
1275 seq: Some(seq),
1276 });
1277 return Ok(err_to_tool_result(ErrorData::new(
1278 rmcp::model::ErrorCode::INVALID_PARAMS,
1279 format!(
1280 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1281 params.path
1282 ),
1283 Some(error_meta(
1284 "validation",
1285 false,
1286 "use analyze_directory for directories",
1287 )),
1288 )));
1289 }
1290
1291 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1292 ErrorData::new(
1293 rmcp::model::ErrorCode::INVALID_PARAMS,
1294 format!("Failed to analyze module: {e}"),
1295 Some(error_meta(
1296 "validation",
1297 false,
1298 "ensure file exists, is readable, and has a supported extension",
1299 )),
1300 )
1301 }) {
1302 Ok(v) => v,
1303 Err(e) => return Ok(err_to_tool_result(e)),
1304 };
1305
1306 let text = format_module_info(&module_info);
1307 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1308 .with_meta(Some(no_cache_meta()));
1309 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1310 ErrorData::new(
1311 rmcp::model::ErrorCode::INTERNAL_ERROR,
1312 format!("serialization failed: {e}"),
1313 Some(error_meta("internal", false, "report this as a bug")),
1314 )
1315 }) {
1316 Ok(v) => v,
1317 Err(e) => return Ok(err_to_tool_result(e)),
1318 };
1319 result.structured_content = Some(structured);
1320 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1321 self.metrics_tx.send(crate::metrics::MetricEvent {
1322 ts: crate::metrics::unix_ms(),
1323 tool: "analyze_module",
1324 duration_ms: dur,
1325 output_chars: text.len(),
1326 param_path_depth: crate::metrics::path_component_count(¶m_path),
1327 max_depth: None,
1328 result: "ok",
1329 error_type: None,
1330 session_id: sid,
1331 seq: Some(seq),
1332 });
1333 Ok(result)
1334 }
1335}
1336
1337#[derive(Clone)]
1339struct FocusedAnalysisParams {
1340 path: std::path::PathBuf,
1341 symbol: String,
1342 match_mode: SymbolMatchMode,
1343 follow_depth: u32,
1344 max_depth: Option<u32>,
1345 ast_recursion_limit: Option<usize>,
1346 use_summary: bool,
1347 impl_only: Option<bool>,
1348}
1349
1350#[tool_handler]
1351impl ServerHandler for CodeAnalyzer {
1352 fn get_info(&self) -> InitializeResult {
1353 let excluded = crate::EXCLUDED_DIRS.join(", ");
1354 let instructions = format!(
1355 "Recommended workflow:\n\
1356 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify source package (largest by file count; exclude {excluded}).\n\
1357 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for module map. Include test directories (tests/, *_test.go, test_*.py, test_*.rs, *.spec.ts, *.spec.js).\n\
1358 3. For key files, prefer analyze_module for function/import index; use analyze_file for signatures and types.\n\
1359 4. Use analyze_symbol to trace call graphs.\n\
1360 Prefer summary=true on 1000+ files. Set max_depth=2; increase if packages too large. Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1361 );
1362 let capabilities = ServerCapabilities::builder()
1363 .enable_logging()
1364 .enable_tools()
1365 .enable_tool_list_changed()
1366 .enable_completions()
1367 .build();
1368 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1369 .with_title("Code Analyze MCP")
1370 .with_description("MCP server for code structure analysis using tree-sitter");
1371 InitializeResult::new(capabilities)
1372 .with_server_info(server_info)
1373 .with_instructions(&instructions)
1374 }
1375
1376 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1377 let mut peer_lock = self.peer.lock().await;
1378 *peer_lock = Some(context.peer.clone());
1379 drop(peer_lock);
1380
1381 let millis = std::time::SystemTime::now()
1383 .duration_since(std::time::UNIX_EPOCH)
1384 .unwrap_or_default()
1385 .as_millis()
1386 .try_into()
1387 .unwrap_or(u64::MAX);
1388 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1389 let sid = format!("{millis}-{counter}");
1390 {
1391 let mut session_id_lock = self.session_id.lock().await;
1392 *session_id_lock = Some(sid);
1393 }
1394 self.session_call_seq
1395 .store(0, std::sync::atomic::Ordering::Relaxed);
1396
1397 let peer = self.peer.clone();
1399 let event_rx = self.event_rx.clone();
1400
1401 tokio::spawn(async move {
1402 let rx = {
1403 let mut rx_lock = event_rx.lock().await;
1404 rx_lock.take()
1405 };
1406
1407 if let Some(mut receiver) = rx {
1408 let mut buffer = Vec::with_capacity(64);
1409 loop {
1410 receiver.recv_many(&mut buffer, 64).await;
1412
1413 if buffer.is_empty() {
1414 break;
1416 }
1417
1418 let peer_lock = peer.lock().await;
1420 if let Some(peer) = peer_lock.as_ref() {
1421 for log_event in buffer.drain(..) {
1422 let notification = ServerNotification::LoggingMessageNotification(
1423 Notification::new(LoggingMessageNotificationParam {
1424 level: log_event.level,
1425 logger: Some(log_event.logger),
1426 data: log_event.data,
1427 }),
1428 );
1429 if let Err(e) = peer.send_notification(notification).await {
1430 warn!("Failed to send logging notification: {}", e);
1431 }
1432 }
1433 }
1434 }
1435 }
1436 });
1437 }
1438
1439 #[instrument(skip(self, _context))]
1440 async fn on_cancelled(
1441 &self,
1442 notification: CancelledNotificationParam,
1443 _context: NotificationContext<RoleServer>,
1444 ) {
1445 tracing::info!(
1446 request_id = ?notification.request_id,
1447 reason = ?notification.reason,
1448 "Received cancellation notification"
1449 );
1450 }
1451
1452 #[instrument(skip(self, _context))]
1453 async fn complete(
1454 &self,
1455 request: CompleteRequestParams,
1456 _context: RequestContext<RoleServer>,
1457 ) -> Result<CompleteResult, ErrorData> {
1458 let argument_name = &request.argument.name;
1460 let argument_value = &request.argument.value;
1461
1462 let completions = match argument_name.as_str() {
1463 "path" => {
1464 let root = Path::new(".");
1466 completion::path_completions(root, argument_value)
1467 }
1468 "symbol" => {
1469 let path_arg = request
1471 .context
1472 .as_ref()
1473 .and_then(|ctx| ctx.get_argument("path"));
1474
1475 match path_arg {
1476 Some(path_str) => {
1477 let path = Path::new(path_str);
1478 completion::symbol_completions(&self.cache, path, argument_value)
1479 }
1480 None => Vec::new(),
1481 }
1482 }
1483 _ => Vec::new(),
1484 };
1485
1486 let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1488 let (values, has_more) = if completions.len() > 100 {
1489 (completions.into_iter().take(100).collect(), true)
1490 } else {
1491 (completions, false)
1492 };
1493
1494 let completion_info =
1495 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1496 Ok(info) => info,
1497 Err(_) => {
1498 CompletionInfo::with_all_values(Vec::new())
1500 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1501 }
1502 };
1503
1504 Ok(CompleteResult::new(completion_info))
1505 }
1506
1507 async fn set_level(
1508 &self,
1509 params: SetLevelRequestParams,
1510 _context: RequestContext<RoleServer>,
1511 ) -> Result<(), ErrorData> {
1512 let level_filter = match params.level {
1513 LoggingLevel::Debug => LevelFilter::DEBUG,
1514 LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1515 LoggingLevel::Warning => LevelFilter::WARN,
1516 LoggingLevel::Error
1517 | LoggingLevel::Critical
1518 | LoggingLevel::Alert
1519 | LoggingLevel::Emergency => LevelFilter::ERROR,
1520 };
1521
1522 let mut filter_lock = self
1523 .log_level_filter
1524 .lock()
1525 .unwrap_or_else(|e| e.into_inner());
1526 *filter_lock = level_filter;
1527 Ok(())
1528 }
1529}
1530
1531#[cfg(test)]
1532mod tests {
1533 use super::*;
1534
1535 #[tokio::test]
1536 async fn test_emit_progress_none_peer_is_noop() {
1537 let peer = Arc::new(TokioMutex::new(None));
1538 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1539 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1540 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1541 let analyzer = CodeAnalyzer::new(
1542 peer,
1543 log_level_filter,
1544 rx,
1545 crate::metrics::MetricsSender(metrics_tx),
1546 );
1547 let token = ProgressToken(NumberOrString::String("test".into()));
1548 analyzer
1550 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1551 .await;
1552 }
1553
1554 fn make_analyzer() -> CodeAnalyzer {
1555 let peer = Arc::new(TokioMutex::new(None));
1556 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1557 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1558 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1559 CodeAnalyzer::new(
1560 peer,
1561 log_level_filter,
1562 rx,
1563 crate::metrics::MetricsSender(metrics_tx),
1564 )
1565 }
1566
1567 #[test]
1568 fn test_summary_cursor_conflict() {
1569 assert!(summary_cursor_conflict(Some(true), Some("cursor")));
1570 assert!(!summary_cursor_conflict(Some(true), None));
1571 assert!(!summary_cursor_conflict(None, Some("x")));
1572 assert!(!summary_cursor_conflict(None, None));
1573 }
1574
1575 #[tokio::test]
1576 async fn test_validate_impl_only_non_rust_returns_invalid_params() {
1577 use tempfile::TempDir;
1578
1579 let dir = TempDir::new().unwrap();
1580 std::fs::write(dir.path().join("main.py"), "def foo(): pass").unwrap();
1581
1582 let analyzer = make_analyzer();
1583 let entries: Vec<traversal::WalkEntry> =
1586 traversal::walk_directory(dir.path(), None).unwrap_or_default();
1587 let result = CodeAnalyzer::validate_impl_only(&entries);
1588 assert!(result.is_err());
1589 let err = result.unwrap_err();
1590 assert_eq!(err.code, rmcp::model::ErrorCode::INVALID_PARAMS);
1591 drop(analyzer); }
1593
1594 #[tokio::test]
1595 async fn test_no_cache_meta_on_analyze_directory_result() {
1596 use code_analyze_core::types::{
1597 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1598 };
1599 use tempfile::TempDir;
1600
1601 let dir = TempDir::new().unwrap();
1602 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
1603
1604 let analyzer = make_analyzer();
1605 let params = AnalyzeDirectoryParams {
1606 path: dir.path().to_str().unwrap().to_string(),
1607 max_depth: None,
1608 pagination: PaginationParams {
1609 cursor: None,
1610 page_size: None,
1611 },
1612 output_control: OutputControlParams {
1613 summary: None,
1614 force: None,
1615 verbose: None,
1616 },
1617 };
1618 let ct = tokio_util::sync::CancellationToken::new();
1619 let arc_output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1620 let meta = no_cache_meta();
1622 assert_eq!(
1623 meta.0.get("cache_hint").and_then(|v| v.as_str()),
1624 Some("no-cache"),
1625 );
1626 drop(arc_output);
1627 }
1628
1629 #[test]
1630 fn test_complete_path_completions_returns_suggestions() {
1631 let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
1636 let workspace_root = manifest_dir.parent().expect("manifest dir has parent");
1637 let suggestions = completion::path_completions(workspace_root, "code-");
1638 assert!(
1639 !suggestions.is_empty(),
1640 "expected completions for prefix 'code-' in workspace root"
1641 );
1642 }
1643
1644 #[tokio::test]
1645 async fn test_handle_overview_mode_verbose_no_summary_block() {
1646 use code_analyze_core::pagination::{PaginationMode, paginate_slice};
1647 use code_analyze_core::types::{
1648 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1649 };
1650 use tempfile::TempDir;
1651
1652 let tmp = TempDir::new().unwrap();
1653 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1654
1655 let peer = Arc::new(TokioMutex::new(None));
1656 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1657 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1658 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1659 let analyzer = CodeAnalyzer::new(
1660 peer,
1661 log_level_filter,
1662 rx,
1663 crate::metrics::MetricsSender(metrics_tx),
1664 );
1665
1666 let params = AnalyzeDirectoryParams {
1667 path: tmp.path().to_str().unwrap().to_string(),
1668 max_depth: None,
1669 pagination: PaginationParams {
1670 cursor: None,
1671 page_size: None,
1672 },
1673 output_control: OutputControlParams {
1674 summary: None,
1675 force: None,
1676 verbose: Some(true),
1677 },
1678 };
1679
1680 let ct = tokio_util::sync::CancellationToken::new();
1681 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1682
1683 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1686 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1687 let verbose = true;
1688 let formatted = if !use_summary {
1689 format_structure_paginated(
1690 &paginated.items,
1691 paginated.total,
1692 params.max_depth,
1693 Some(std::path::Path::new(¶ms.path)),
1694 verbose,
1695 )
1696 } else {
1697 output.formatted.clone()
1698 };
1699
1700 assert!(
1702 !formatted.contains("SUMMARY:"),
1703 "verbose=true must not emit SUMMARY: block; got: {}",
1704 &formatted[..formatted.len().min(300)]
1705 );
1706 assert!(
1707 formatted.contains("PAGINATED:"),
1708 "verbose=true must emit PAGINATED: header"
1709 );
1710 assert!(
1711 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1712 "verbose=true must emit FILES section header"
1713 );
1714 }
1715}