1pub mod analyze;
18pub mod cache;
19pub mod completion;
20pub mod formatter;
21pub mod graph;
22pub mod lang;
23pub mod languages;
24pub mod logging;
25pub mod metrics;
26pub mod pagination;
27pub mod parser;
28pub(crate) mod schema_helpers;
29pub mod test_detection;
30pub mod traversal;
31pub mod types;
32
33pub(crate) const EXCLUDED_DIRS: &[&str] = &[
34 "node_modules",
35 "vendor",
36 ".git",
37 "__pycache__",
38 "target",
39 "dist",
40 "build",
41 ".venv",
42];
43
44use cache::AnalysisCache;
45use formatter::{
46 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
47 format_module_info, format_structure_paginated, format_summary,
48};
49use logging::LogEvent;
50use pagination::{
51 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
52};
53use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
54use rmcp::handler::server::wrapper::Parameters;
55use rmcp::model::{
56 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
57 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
58 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
59 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
60};
61use rmcp::service::{NotificationContext, RequestContext};
62use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
63use serde_json::Value;
64use std::path::Path;
65use std::sync::{Arc, Mutex};
66use tokio::sync::{Mutex as TokioMutex, mpsc};
67use tracing::{instrument, warn};
68use tracing_subscriber::filter::LevelFilter;
69use traversal::walk_directory;
70use types::{
71 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
72 AnalyzeSymbolParams,
73};
74
75static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
76
77const SIZE_LIMIT: usize = 50_000;
78
79pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
82 summary == Some(true) && cursor.is_some()
83}
84
85fn error_meta(
86 category: &'static str,
87 is_retryable: bool,
88 suggested_action: &'static str,
89) -> Option<serde_json::Value> {
90 Some(serde_json::json!({
91 "errorCategory": category,
92 "isRetryable": is_retryable,
93 "suggestedAction": suggested_action,
94 }))
95}
96
97fn err_to_tool_result(e: ErrorData) -> CallToolResult {
98 CallToolResult::error(vec![Content::text(e.message)])
99}
100
101fn no_cache_meta() -> Meta {
102 let mut m = serde_json::Map::new();
103 m.insert(
104 "cache_hint".to_string(),
105 serde_json::Value::String("no-cache".to_string()),
106 );
107 Meta(m)
108}
109
110fn paginate_focus_chains(
113 chains: &[graph::InternalCallChain],
114 mode: PaginationMode,
115 offset: usize,
116 page_size: usize,
117) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
118 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
119 ErrorData::new(
120 rmcp::model::ErrorCode::INTERNAL_ERROR,
121 e.to_string(),
122 error_meta("transient", true, "retry the request"),
123 )
124 })?;
125
126 if paginated.next_cursor.is_none() && offset == 0 {
127 return Ok((paginated.items, None));
128 }
129
130 let next = if let Some(raw_cursor) = paginated.next_cursor {
131 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
132 ErrorData::new(
133 rmcp::model::ErrorCode::INVALID_PARAMS,
134 e.to_string(),
135 error_meta("validation", false, "invalid cursor format"),
136 )
137 })?;
138 Some(
139 encode_cursor(&CursorData {
140 mode,
141 offset: decoded.offset,
142 })
143 .map_err(|e| {
144 ErrorData::new(
145 rmcp::model::ErrorCode::INVALID_PARAMS,
146 e.to_string(),
147 error_meta("validation", false, "invalid cursor format"),
148 )
149 })?,
150 )
151 } else {
152 None
153 };
154
155 Ok((paginated.items, next))
156}
157
158#[derive(Clone)]
163pub struct CodeAnalyzer {
164 tool_router: ToolRouter<Self>,
165 cache: AnalysisCache,
166 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
167 log_level_filter: Arc<Mutex<LevelFilter>>,
168 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
169 metrics_tx: crate::metrics::MetricsSender,
170 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
171 session_id: Arc<TokioMutex<Option<String>>>,
172}
173
174#[tool_router]
175impl CodeAnalyzer {
176 pub fn list_tools() -> Vec<rmcp::model::Tool> {
177 Self::tool_router().list_all()
178 }
179
180 pub fn new(
181 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
182 log_level_filter: Arc<Mutex<LevelFilter>>,
183 event_rx: mpsc::UnboundedReceiver<LogEvent>,
184 metrics_tx: crate::metrics::MetricsSender,
185 ) -> Self {
186 CodeAnalyzer {
187 tool_router: Self::tool_router(),
188 cache: AnalysisCache::new(100),
189 peer,
190 log_level_filter,
191 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
192 metrics_tx,
193 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
194 session_id: Arc::new(TokioMutex::new(None)),
195 }
196 }
197
198 #[instrument(skip(self))]
199 async fn emit_progress(
200 &self,
201 peer: Option<Peer<RoleServer>>,
202 token: &ProgressToken,
203 progress: f64,
204 total: f64,
205 message: String,
206 ) {
207 if let Some(peer) = peer {
208 let notification = ServerNotification::ProgressNotification(Notification::new(
209 ProgressNotificationParam {
210 progress_token: token.clone(),
211 progress,
212 total: Some(total),
213 message: Some(message),
214 },
215 ));
216 if let Err(e) = peer.send_notification(notification).await {
217 warn!("Failed to send progress notification: {}", e);
218 }
219 }
220 }
221
222 #[instrument(skip(self, params, ct))]
226 async fn handle_overview_mode(
227 &self,
228 params: &AnalyzeDirectoryParams,
229 ct: tokio_util::sync::CancellationToken,
230 ) -> Result<std::sync::Arc<analyze::AnalysisOutput>, ErrorData> {
231 let path = Path::new(¶ms.path);
232 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
233 let counter_clone = counter.clone();
234 let path_owned = path.to_path_buf();
235 let max_depth = params.max_depth;
236 let ct_clone = ct.clone();
237
238 let all_entries = walk_directory(path, None).map_err(|e| {
240 ErrorData::new(
241 rmcp::model::ErrorCode::INTERNAL_ERROR,
242 format!("Failed to walk directory: {}", e),
243 error_meta("resource", false, "check path permissions and availability"),
244 )
245 })?;
246
247 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
249
250 let cache_key = cache::DirectoryCacheKey::from_entries(
252 &all_entries,
253 canonical_max_depth,
254 AnalysisMode::Overview,
255 );
256
257 if let Some(cached) = self.cache.get_directory(&cache_key) {
259 return Ok(cached);
260 }
261
262 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
264 Some(traversal::subtree_counts_from_entries(path, &all_entries))
265 } else {
266 None
267 };
268
269 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
271 && depth > 0
272 {
273 all_entries
274 .into_iter()
275 .filter(|e| e.depth <= depth as usize)
276 .collect()
277 } else {
278 all_entries
279 };
280
281 let total_files = entries.iter().filter(|e| !e.is_dir).count();
283
284 let handle = tokio::task::spawn_blocking(move || {
286 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
287 });
288
289 let token = ProgressToken(NumberOrString::String(
291 format!(
292 "analyze-overview-{}",
293 std::time::SystemTime::now()
294 .duration_since(std::time::UNIX_EPOCH)
295 .map(|d| d.as_nanos())
296 .unwrap_or(0)
297 )
298 .into(),
299 ));
300 let peer = self.peer.lock().await.clone();
301 let mut last_progress = 0usize;
302 let mut cancelled = false;
303 loop {
304 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
305 if ct.is_cancelled() {
306 cancelled = true;
307 break;
308 }
309 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
310 if current != last_progress && total_files > 0 {
311 self.emit_progress(
312 peer.clone(),
313 &token,
314 current as f64,
315 total_files as f64,
316 format!("Analyzing {}/{} files", current, total_files),
317 )
318 .await;
319 last_progress = current;
320 }
321 if handle.is_finished() {
322 break;
323 }
324 }
325
326 if !cancelled && total_files > 0 {
328 self.emit_progress(
329 peer.clone(),
330 &token,
331 total_files as f64,
332 total_files as f64,
333 format!("Completed analyzing {} files", total_files),
334 )
335 .await;
336 }
337
338 match handle.await {
339 Ok(Ok(mut output)) => {
340 output.subtree_counts = subtree_counts;
341 let arc_output = std::sync::Arc::new(output);
342 self.cache.put_directory(cache_key, arc_output.clone());
343 Ok(arc_output)
344 }
345 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
346 rmcp::model::ErrorCode::INTERNAL_ERROR,
347 "Analysis cancelled".to_string(),
348 error_meta("transient", true, "analysis was cancelled"),
349 )),
350 Ok(Err(e)) => Err(ErrorData::new(
351 rmcp::model::ErrorCode::INTERNAL_ERROR,
352 format!("Error analyzing directory: {}", e),
353 error_meta("resource", false, "check path and file permissions"),
354 )),
355 Err(e) => Err(ErrorData::new(
356 rmcp::model::ErrorCode::INTERNAL_ERROR,
357 format!("Task join error: {}", e),
358 error_meta("transient", true, "retry the request"),
359 )),
360 }
361 }
362
363 #[instrument(skip(self, params))]
366 async fn handle_file_details_mode(
367 &self,
368 params: &AnalyzeFileParams,
369 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
370 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
372 meta.modified().ok().map(|mtime| cache::CacheKey {
373 path: std::path::PathBuf::from(¶ms.path),
374 modified: mtime,
375 mode: AnalysisMode::FileDetails,
376 })
377 });
378
379 if let Some(ref key) = cache_key
381 && let Some(cached) = self.cache.get(key)
382 {
383 return Ok(cached);
384 }
385
386 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
388 Ok(output) => {
389 let arc_output = std::sync::Arc::new(output);
390 if let Some(ref key) = cache_key {
391 self.cache.put(key.clone(), arc_output.clone());
392 }
393 Ok(arc_output)
394 }
395 Err(e) => Err(ErrorData::new(
396 rmcp::model::ErrorCode::INTERNAL_ERROR,
397 format!("Error analyzing file: {}", e),
398 error_meta("resource", false, "check file path and permissions"),
399 )),
400 }
401 }
402
403 #[instrument(skip(self, params, ct))]
407 async fn handle_focused_mode(
408 &self,
409 params: &AnalyzeSymbolParams,
410 ct: tokio_util::sync::CancellationToken,
411 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
412 let follow_depth = params.follow_depth.unwrap_or(1);
413 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
414 let counter_clone = counter.clone();
415 let path = Path::new(¶ms.path);
416 let path_owned = path.to_path_buf();
417 let max_depth = params.max_depth;
418 let symbol_owned = params.symbol.clone();
419 let match_mode = params.match_mode.clone().unwrap_or_default();
420 let ast_recursion_limit = params.ast_recursion_limit;
421 let ct_clone = ct.clone();
422 let impl_only = params.impl_only;
423
424 if impl_only == Some(true) {
426 let has_rust = walk_directory(path, max_depth)
427 .ok()
428 .map(|entries| {
429 entries.iter().any(|e| {
430 !e.is_dir && e.path.extension().and_then(|x| x.to_str()) == Some("rs")
431 })
432 })
433 .unwrap_or(false);
434
435 if !has_rust {
436 return Err(ErrorData::new(
437 rmcp::model::ErrorCode::INVALID_PARAMS,
438 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
439 error_meta(
440 "validation",
441 false,
442 "remove impl_only or point to a directory containing .rs files",
443 ),
444 ));
445 }
446 }
447
448 let use_summary_for_task = params.output_control.force != Some(true)
450 && params.output_control.summary == Some(true);
451
452 let total_files = match walk_directory(path, max_depth) {
454 Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
455 Err(_) => 0,
456 };
457
458 let handle = tokio::task::spawn_blocking(move || {
460 analyze::analyze_focused_with_progress(
461 &path_owned,
462 &symbol_owned,
463 match_mode,
464 follow_depth,
465 max_depth,
466 ast_recursion_limit,
467 counter_clone,
468 ct_clone,
469 use_summary_for_task,
470 impl_only,
471 )
472 });
473
474 let token = ProgressToken(NumberOrString::String(
476 format!(
477 "analyze-symbol-{}",
478 std::time::SystemTime::now()
479 .duration_since(std::time::UNIX_EPOCH)
480 .map(|d| d.as_nanos())
481 .unwrap_or(0)
482 )
483 .into(),
484 ));
485 let peer = self.peer.lock().await.clone();
486 let mut last_progress = 0usize;
487 let mut cancelled = false;
488 loop {
489 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
490 if ct.is_cancelled() {
491 cancelled = true;
492 break;
493 }
494 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
495 if current != last_progress && total_files > 0 {
496 self.emit_progress(
497 peer.clone(),
498 &token,
499 current as f64,
500 total_files as f64,
501 format!(
502 "Analyzing {}/{} files for symbol '{}'",
503 current, total_files, params.symbol
504 ),
505 )
506 .await;
507 last_progress = current;
508 }
509 if handle.is_finished() {
510 break;
511 }
512 }
513
514 if !cancelled && total_files > 0 {
516 self.emit_progress(
517 peer.clone(),
518 &token,
519 total_files as f64,
520 total_files as f64,
521 format!(
522 "Completed analyzing {} files for symbol '{}'",
523 total_files, params.symbol
524 ),
525 )
526 .await;
527 }
528
529 let mut output = match handle.await {
530 Ok(Ok(output)) => output,
531 Ok(Err(analyze::AnalyzeError::Cancelled)) => {
532 return Err(ErrorData::new(
533 rmcp::model::ErrorCode::INTERNAL_ERROR,
534 "Analysis cancelled".to_string(),
535 error_meta("transient", true, "analysis was cancelled"),
536 ));
537 }
538 Ok(Err(e)) => {
539 return Err(ErrorData::new(
540 rmcp::model::ErrorCode::INTERNAL_ERROR,
541 format!("Error analyzing symbol: {}", e),
542 error_meta("resource", false, "check symbol name and file"),
543 ));
544 }
545 Err(e) => {
546 return Err(ErrorData::new(
547 rmcp::model::ErrorCode::INTERNAL_ERROR,
548 format!("Task join error: {}", e),
549 error_meta("transient", true, "retry the request"),
550 ));
551 }
552 };
553
554 if params.output_control.summary.is_none()
557 && params.output_control.force != Some(true)
558 && output.formatted.len() > SIZE_LIMIT
559 {
560 let path_owned2 = Path::new(¶ms.path).to_path_buf();
561 let symbol_owned2 = params.symbol.clone();
562 let match_mode2 = params.match_mode.clone().unwrap_or_default();
563 let follow_depth2 = params.follow_depth.unwrap_or(1);
564 let max_depth2 = params.max_depth;
565 let ast_recursion_limit2 = params.ast_recursion_limit;
566 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
567 let ct2 = ct.clone();
568 let impl_only2 = impl_only;
569 let summary_result = tokio::task::spawn_blocking(move || {
570 analyze::analyze_focused_with_progress(
571 &path_owned2,
572 &symbol_owned2,
573 match_mode2,
574 follow_depth2,
575 max_depth2,
576 ast_recursion_limit2,
577 counter2,
578 ct2,
579 true, impl_only2,
581 )
582 })
583 .await;
584 match summary_result {
585 Ok(Ok(summary_output)) => {
586 output.formatted = summary_output.formatted;
587 }
588 _ => {
589 let estimated_tokens = output.formatted.len() / 4;
591 let message = format!(
592 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
593 output.formatted.len(),
594 estimated_tokens
595 );
596 return Err(ErrorData::new(
597 rmcp::model::ErrorCode::INVALID_PARAMS,
598 message,
599 error_meta("validation", false, "use summary=true or force=true"),
600 ));
601 }
602 }
603 } else if output.formatted.len() > SIZE_LIMIT
604 && params.output_control.force != Some(true)
605 && params.output_control.summary == Some(false)
606 {
607 let estimated_tokens = output.formatted.len() / 4;
609 let message = format!(
610 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
611 - force=true to return full output\n\
612 - summary=true to get compact summary\n\
613 - Narrow your scope (smaller directory, specific file)",
614 output.formatted.len(),
615 estimated_tokens
616 );
617 return Err(ErrorData::new(
618 rmcp::model::ErrorCode::INVALID_PARAMS,
619 message,
620 error_meta(
621 "validation",
622 false,
623 "use force=true, summary=true, or narrow scope",
624 ),
625 ));
626 }
627
628 if impl_only == Some(true) {
630 let filter_line = format!(
631 "FILTER: impl_only=true ({} of {} callers shown)\n",
632 output.impl_trait_caller_count, output.unfiltered_caller_count
633 );
634 output.formatted = format!("{}{}", filter_line, output.formatted);
635
636 if output.impl_trait_caller_count == 0 {
637 output.formatted.push_str(
638 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
639 );
640 }
641 }
642
643 Ok(output)
644 }
645
646 #[instrument(skip(self, context))]
647 #[tool(
648 name = "analyze_directory",
649 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
650 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
651 annotations(
652 title = "Analyze Directory",
653 read_only_hint = true,
654 destructive_hint = false,
655 idempotent_hint = true,
656 open_world_hint = false
657 )
658 )]
659 async fn analyze_directory(
660 &self,
661 params: Parameters<AnalyzeDirectoryParams>,
662 context: RequestContext<RoleServer>,
663 ) -> Result<CallToolResult, ErrorData> {
664 let params = params.0;
665 let ct = context.ct.clone();
666 let _t_start = std::time::Instant::now();
667 let _param_path = params.path.clone();
668 let _max_depth_val = params.max_depth;
669 let _seq = self
670 .session_call_seq
671 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
672 let _sid = self.session_id.lock().await.clone();
673
674 let arc_output = match self.handle_overview_mode(¶ms, ct).await {
676 Ok(v) => v,
677 Err(e) => return Ok(err_to_tool_result(e)),
678 };
679 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
682 Ok(owned) => owned,
683 Err(arc) => (*arc).clone(),
684 };
685
686 if summary_cursor_conflict(
689 params.output_control.summary,
690 params.pagination.cursor.as_deref(),
691 ) {
692 return Ok(err_to_tool_result(ErrorData::new(
693 rmcp::model::ErrorCode::INVALID_PARAMS,
694 "summary=true is incompatible with a pagination cursor; use one or the other"
695 .to_string(),
696 error_meta("validation", false, "remove cursor or set summary=false"),
697 )));
698 }
699
700 let use_summary = if params.output_control.force == Some(true) {
702 false
703 } else if params.output_control.summary == Some(true) {
704 true
705 } else if params.output_control.summary == Some(false) {
706 false
707 } else {
708 output.formatted.len() > SIZE_LIMIT
709 };
710
711 if use_summary {
712 output.formatted = format_summary(
713 &output.entries,
714 &output.files,
715 params.max_depth,
716 output.subtree_counts.as_deref(),
717 );
718 }
719
720 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
722 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
723 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
724 ErrorData::new(
725 rmcp::model::ErrorCode::INVALID_PARAMS,
726 e.to_string(),
727 error_meta("validation", false, "invalid cursor format"),
728 )
729 }) {
730 Ok(v) => v,
731 Err(e) => return Ok(err_to_tool_result(e)),
732 };
733 cursor_data.offset
734 } else {
735 0
736 };
737
738 let paginated =
740 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
741 Ok(v) => v,
742 Err(e) => {
743 return Ok(err_to_tool_result(ErrorData::new(
744 rmcp::model::ErrorCode::INTERNAL_ERROR,
745 e.to_string(),
746 error_meta("transient", true, "retry the request"),
747 )));
748 }
749 };
750
751 let verbose = params.output_control.verbose.unwrap_or(false);
752 if !use_summary {
753 output.formatted = format_structure_paginated(
754 &paginated.items,
755 paginated.total,
756 params.max_depth,
757 Some(Path::new(¶ms.path)),
758 verbose,
759 );
760 }
761
762 if use_summary {
764 output.next_cursor = None;
765 } else {
766 output.next_cursor = paginated.next_cursor.clone();
767 }
768
769 let mut final_text = output.formatted.clone();
771 if !use_summary && let Some(cursor) = paginated.next_cursor {
772 final_text.push('\n');
773 final_text.push_str("NEXT_CURSOR: ");
774 final_text.push_str(&cursor);
775 }
776
777 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
778 .with_meta(Some(no_cache_meta()));
779 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
780 result.structured_content = Some(structured);
781 let _dur = _t_start.elapsed().as_millis() as u64;
782 self.metrics_tx.send(crate::metrics::MetricEvent {
783 ts: crate::metrics::unix_ms(),
784 tool: "analyze_directory",
785 duration_ms: _dur,
786 output_chars: final_text.len(),
787 param_path_depth: crate::metrics::path_component_count(&_param_path),
788 max_depth: _max_depth_val,
789 result: "ok",
790 error_type: None,
791 session_id: _sid,
792 seq: Some(_seq),
793 });
794 Ok(result)
795 }
796
797 #[instrument(skip(self, _context))]
798 #[tool(
799 name = "analyze_file",
800 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py. The fields parameter limits output to specific sections. Valid values: \"functions\", \"classes\", \"imports\". The FILE header (path, line count, section counts) is always emitted. Omit fields to return all sections. When summary=true, fields is ignored. When fields explicitly lists \"imports\", the imports section is rendered regardless of the verbose flag; in all other cases imports require verbose=true.",
801 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
802 annotations(
803 title = "Analyze File",
804 read_only_hint = true,
805 destructive_hint = false,
806 idempotent_hint = true,
807 open_world_hint = false
808 )
809 )]
810 async fn analyze_file(
811 &self,
812 params: Parameters<AnalyzeFileParams>,
813 _context: RequestContext<RoleServer>,
814 ) -> Result<CallToolResult, ErrorData> {
815 let params = params.0;
816 let _t_start = std::time::Instant::now();
817 let _param_path = params.path.clone();
818 let _seq = self
819 .session_call_seq
820 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
821 let _sid = self.session_id.lock().await.clone();
822
823 let arc_output = match self.handle_file_details_mode(¶ms).await {
825 Ok(v) => v,
826 Err(e) => return Ok(err_to_tool_result(e)),
827 };
828
829 let mut formatted = arc_output.formatted.clone();
833 let line_count = arc_output.line_count;
834
835 let use_summary = if params.output_control.force == Some(true) {
837 false
838 } else if params.output_control.summary == Some(true) {
839 true
840 } else if params.output_control.summary == Some(false) {
841 false
842 } else {
843 formatted.len() > SIZE_LIMIT
844 };
845
846 if use_summary {
847 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
848 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
849 let estimated_tokens = formatted.len() / 4;
850 let message = format!(
851 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
852 - force=true to return full output\n\
853 - Narrow your scope (smaller directory, specific file)\n\
854 - Use analyze_symbol mode for targeted analysis\n\
855 - Reduce max_depth parameter",
856 formatted.len(),
857 estimated_tokens
858 );
859 return Ok(err_to_tool_result(ErrorData::new(
860 rmcp::model::ErrorCode::INVALID_PARAMS,
861 message,
862 error_meta("validation", false, "use force=true or narrow scope"),
863 )));
864 }
865
866 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
868 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
869 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
870 ErrorData::new(
871 rmcp::model::ErrorCode::INVALID_PARAMS,
872 e.to_string(),
873 error_meta("validation", false, "invalid cursor format"),
874 )
875 }) {
876 Ok(v) => v,
877 Err(e) => return Ok(err_to_tool_result(e)),
878 };
879 cursor_data.offset
880 } else {
881 0
882 };
883
884 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
886 .semantic
887 .functions
888 .iter()
889 .filter(|func| {
890 !arc_output
891 .semantic
892 .classes
893 .iter()
894 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
895 })
896 .cloned()
897 .collect();
898
899 let paginated =
901 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
902 Ok(v) => v,
903 Err(e) => {
904 return Ok(err_to_tool_result(ErrorData::new(
905 rmcp::model::ErrorCode::INTERNAL_ERROR,
906 e.to_string(),
907 error_meta("transient", true, "retry the request"),
908 )));
909 }
910 };
911
912 let verbose = params.output_control.verbose.unwrap_or(false);
914 if !use_summary {
915 formatted = format_file_details_paginated(
917 &paginated.items,
918 paginated.total,
919 &arc_output.semantic,
920 ¶ms.path,
921 line_count,
922 offset,
923 verbose,
924 params.fields.as_deref(),
925 );
926 }
927
928 let next_cursor = if use_summary {
930 None
931 } else {
932 paginated.next_cursor.clone()
933 };
934
935 let mut final_text = formatted.clone();
937 if !use_summary && let Some(ref cursor) = next_cursor {
938 final_text.push('\n');
939 final_text.push_str("NEXT_CURSOR: ");
940 final_text.push_str(cursor);
941 }
942
943 let response_output = analyze::FileAnalysisOutput {
945 formatted,
946 semantic: arc_output.semantic.clone(),
947 line_count,
948 next_cursor,
949 };
950
951 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
952 .with_meta(Some(no_cache_meta()));
953 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
954 result.structured_content = Some(structured);
955 let _dur = _t_start.elapsed().as_millis() as u64;
956 self.metrics_tx.send(crate::metrics::MetricEvent {
957 ts: crate::metrics::unix_ms(),
958 tool: "analyze_file",
959 duration_ms: _dur,
960 output_chars: final_text.len(),
961 param_path_depth: crate::metrics::path_component_count(&_param_path),
962 max_depth: None,
963 result: "ok",
964 error_type: None,
965 session_id: _sid,
966 seq: Some(_seq),
967 });
968 Ok(result)
969 }
970
971 #[instrument(skip(self, context))]
972 #[tool(
973 name = "analyze_symbol",
974 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. impl_only=true: restrict callers to only those from 'impl Trait for Type' blocks (Rust only); returns INVALID_PARAMS for non-Rust directories; emits a FILTER header showing how many callers were retained. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method",
975 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
976 annotations(
977 title = "Analyze Symbol",
978 read_only_hint = true,
979 destructive_hint = false,
980 idempotent_hint = true,
981 open_world_hint = false
982 )
983 )]
984 async fn analyze_symbol(
985 &self,
986 params: Parameters<AnalyzeSymbolParams>,
987 context: RequestContext<RoleServer>,
988 ) -> Result<CallToolResult, ErrorData> {
989 let params = params.0;
990 let ct = context.ct.clone();
991 let _t_start = std::time::Instant::now();
992 let _param_path = params.path.clone();
993 let _max_depth_val = params.follow_depth;
994 let _seq = self
995 .session_call_seq
996 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
997 let _sid = self.session_id.lock().await.clone();
998
999 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1001 Ok(v) => v,
1002 Err(e) => return Ok(err_to_tool_result(e)),
1003 };
1004
1005 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1007 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1008 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1009 ErrorData::new(
1010 rmcp::model::ErrorCode::INVALID_PARAMS,
1011 e.to_string(),
1012 error_meta("validation", false, "invalid cursor format"),
1013 )
1014 }) {
1015 Ok(v) => v,
1016 Err(e) => return Ok(err_to_tool_result(e)),
1017 };
1018 cursor_data.offset
1019 } else {
1020 0
1021 };
1022
1023 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1025 decode_cursor(cursor_str)
1026 .map(|c| c.mode)
1027 .unwrap_or(PaginationMode::Callers)
1028 } else {
1029 PaginationMode::Callers
1030 };
1031
1032 let use_summary = params.output_control.summary == Some(true);
1033 let verbose = params.output_control.verbose.unwrap_or(false);
1034
1035 let mut callee_cursor = match cursor_mode {
1036 PaginationMode::Callers => {
1037 let (paginated_items, paginated_next) = match paginate_focus_chains(
1038 &output.prod_chains,
1039 PaginationMode::Callers,
1040 offset,
1041 page_size,
1042 ) {
1043 Ok(v) => v,
1044 Err(e) => return Ok(err_to_tool_result(e)),
1045 };
1046
1047 if !use_summary
1048 && (paginated_next.is_some()
1049 || offset > 0
1050 || !verbose
1051 || !output.outgoing_chains.is_empty())
1052 {
1053 let base_path = Path::new(¶ms.path);
1054 output.formatted = format_focused_paginated(
1055 &paginated_items,
1056 output.prod_chains.len(),
1057 PaginationMode::Callers,
1058 ¶ms.symbol,
1059 &output.prod_chains,
1060 &output.test_chains,
1061 &output.outgoing_chains,
1062 output.def_count,
1063 offset,
1064 Some(base_path),
1065 verbose,
1066 );
1067 paginated_next
1068 } else {
1069 None
1070 }
1071 }
1072 PaginationMode::Callees => {
1073 let (paginated_items, paginated_next) = match paginate_focus_chains(
1074 &output.outgoing_chains,
1075 PaginationMode::Callees,
1076 offset,
1077 page_size,
1078 ) {
1079 Ok(v) => v,
1080 Err(e) => return Ok(err_to_tool_result(e)),
1081 };
1082
1083 if paginated_next.is_some() || offset > 0 || !verbose {
1084 let base_path = Path::new(¶ms.path);
1085 output.formatted = format_focused_paginated(
1086 &paginated_items,
1087 output.outgoing_chains.len(),
1088 PaginationMode::Callees,
1089 ¶ms.symbol,
1090 &output.prod_chains,
1091 &output.test_chains,
1092 &output.outgoing_chains,
1093 output.def_count,
1094 offset,
1095 Some(base_path),
1096 verbose,
1097 );
1098 paginated_next
1099 } else {
1100 None
1101 }
1102 }
1103 PaginationMode::Default => {
1104 unreachable!("SymbolFocus should only use Callers or Callees modes")
1105 }
1106 };
1107
1108 if callee_cursor.is_none()
1113 && cursor_mode == PaginationMode::Callers
1114 && !output.outgoing_chains.is_empty()
1115 && !use_summary
1116 && let Ok(cursor) = encode_cursor(&CursorData {
1117 mode: PaginationMode::Callees,
1118 offset: 0,
1119 })
1120 {
1121 callee_cursor = Some(cursor);
1122 }
1123
1124 output.next_cursor = callee_cursor.clone();
1126
1127 let mut final_text = output.formatted.clone();
1129 if let Some(cursor) = callee_cursor {
1130 final_text.push('\n');
1131 final_text.push_str("NEXT_CURSOR: ");
1132 final_text.push_str(&cursor);
1133 }
1134
1135 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1136 .with_meta(Some(no_cache_meta()));
1137 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1138 result.structured_content = Some(structured);
1139 let _dur = _t_start.elapsed().as_millis() as u64;
1140 self.metrics_tx.send(crate::metrics::MetricEvent {
1141 ts: crate::metrics::unix_ms(),
1142 tool: "analyze_symbol",
1143 duration_ms: _dur,
1144 output_chars: final_text.len(),
1145 param_path_depth: crate::metrics::path_component_count(&_param_path),
1146 max_depth: _max_depth_val,
1147 result: "ok",
1148 error_type: None,
1149 session_id: _sid,
1150 seq: Some(_seq),
1151 });
1152 Ok(result)
1153 }
1154
1155 #[instrument(skip(self))]
1156 #[tool(
1157 name = "analyze_module",
1158 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX, Fortran; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
1159 output_schema = schema_for_type::<types::ModuleInfo>(),
1160 annotations(
1161 title = "Analyze Module",
1162 read_only_hint = true,
1163 destructive_hint = false,
1164 idempotent_hint = true,
1165 open_world_hint = false
1166 )
1167 )]
1168 async fn analyze_module(
1169 &self,
1170 params: Parameters<AnalyzeModuleParams>,
1171 _context: RequestContext<RoleServer>,
1172 ) -> Result<CallToolResult, ErrorData> {
1173 let params = params.0;
1174 let _t_start = std::time::Instant::now();
1175 let _param_path = params.path.clone();
1176 let _seq = self
1177 .session_call_seq
1178 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1179 let _sid = self.session_id.lock().await.clone();
1180
1181 if std::fs::metadata(¶ms.path)
1183 .map(|m| m.is_dir())
1184 .unwrap_or(false)
1185 {
1186 let _dur = _t_start.elapsed().as_millis() as u64;
1187 self.metrics_tx.send(crate::metrics::MetricEvent {
1188 ts: crate::metrics::unix_ms(),
1189 tool: "analyze_module",
1190 duration_ms: _dur,
1191 output_chars: 0,
1192 param_path_depth: crate::metrics::path_component_count(&_param_path),
1193 max_depth: None,
1194 result: "error",
1195 error_type: Some("invalid_params".to_string()),
1196 session_id: _sid.clone(),
1197 seq: Some(_seq),
1198 });
1199 return Ok(err_to_tool_result(ErrorData::new(
1200 rmcp::model::ErrorCode::INVALID_PARAMS,
1201 format!(
1202 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1203 params.path
1204 ),
1205 error_meta("validation", false, "use analyze_directory for directories"),
1206 )));
1207 }
1208
1209 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1210 ErrorData::new(
1211 rmcp::model::ErrorCode::INVALID_PARAMS,
1212 format!("Failed to analyze module: {}", e),
1213 error_meta(
1214 "validation",
1215 false,
1216 "ensure file exists, is readable, and has a supported extension",
1217 ),
1218 )
1219 }) {
1220 Ok(v) => v,
1221 Err(e) => return Ok(err_to_tool_result(e)),
1222 };
1223
1224 let text = format_module_info(&module_info);
1225 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1226 .with_meta(Some(no_cache_meta()));
1227 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1228 ErrorData::new(
1229 rmcp::model::ErrorCode::INTERNAL_ERROR,
1230 format!("serialization failed: {}", e),
1231 error_meta("internal", false, "report this as a bug"),
1232 )
1233 }) {
1234 Ok(v) => v,
1235 Err(e) => return Ok(err_to_tool_result(e)),
1236 };
1237 result.structured_content = Some(structured);
1238 let _dur = _t_start.elapsed().as_millis() as u64;
1239 self.metrics_tx.send(crate::metrics::MetricEvent {
1240 ts: crate::metrics::unix_ms(),
1241 tool: "analyze_module",
1242 duration_ms: _dur,
1243 output_chars: text.len(),
1244 param_path_depth: crate::metrics::path_component_count(&_param_path),
1245 max_depth: None,
1246 result: "ok",
1247 error_type: None,
1248 session_id: _sid,
1249 seq: Some(_seq),
1250 });
1251 Ok(result)
1252 }
1253}
1254
1255#[tool_handler]
1256impl ServerHandler for CodeAnalyzer {
1257 fn get_info(&self) -> InitializeResult {
1258 let excluded = crate::EXCLUDED_DIRS.join(", ");
1259 let instructions = format!(
1260 "Recommended workflow for unknown repositories:\n\
1261 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1262 (typically the largest directory by file count; exclude {excluded}).\n\
1263 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts. Include test directories (e.g., tests/, testutil/, files matching *_test.go, test_*.py, test_*.rs, *_test.rs, *.spec.ts, *.spec.js) in the module map; test files are valid analysis targets and must not be skipped.\n\
1264 3. For key files identified in step 2, prefer analyze_module to get a lightweight function/import index (~75% smaller output) when you only need function names and imports; call analyze_file when you need signatures, types, or class structure.\n\
1265 4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1266 Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1267 Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1268 );
1269 let capabilities = ServerCapabilities::builder()
1270 .enable_logging()
1271 .enable_tools()
1272 .enable_tool_list_changed()
1273 .enable_completions()
1274 .build();
1275 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1276 .with_title("Code Analyze MCP")
1277 .with_description("MCP server for code structure analysis using tree-sitter");
1278 InitializeResult::new(capabilities)
1279 .with_server_info(server_info)
1280 .with_instructions(&instructions)
1281 }
1282
1283 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1284 let mut peer_lock = self.peer.lock().await;
1285 *peer_lock = Some(context.peer.clone());
1286 drop(peer_lock);
1287
1288 let millis = std::time::SystemTime::now()
1290 .duration_since(std::time::UNIX_EPOCH)
1291 .unwrap_or_default()
1292 .as_millis() as u64;
1293 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1294 let sid = format!("{}-{}", millis, counter);
1295 {
1296 let mut session_id_lock = self.session_id.lock().await;
1297 *session_id_lock = Some(sid);
1298 }
1299 self.session_call_seq
1300 .store(0, std::sync::atomic::Ordering::Relaxed);
1301
1302 let peer = self.peer.clone();
1304 let event_rx = self.event_rx.clone();
1305
1306 tokio::spawn(async move {
1307 let rx = {
1308 let mut rx_lock = event_rx.lock().await;
1309 rx_lock.take()
1310 };
1311
1312 if let Some(mut receiver) = rx {
1313 let mut buffer = Vec::with_capacity(64);
1314 loop {
1315 receiver.recv_many(&mut buffer, 64).await;
1317
1318 if buffer.is_empty() {
1319 break;
1321 }
1322
1323 let peer_lock = peer.lock().await;
1325 if let Some(peer) = peer_lock.as_ref() {
1326 for log_event in buffer.drain(..) {
1327 let notification = ServerNotification::LoggingMessageNotification(
1328 Notification::new(LoggingMessageNotificationParam {
1329 level: log_event.level,
1330 logger: Some(log_event.logger),
1331 data: log_event.data,
1332 }),
1333 );
1334 if let Err(e) = peer.send_notification(notification).await {
1335 warn!("Failed to send logging notification: {}", e);
1336 }
1337 }
1338 }
1339 }
1340 }
1341 });
1342 }
1343
1344 #[instrument(skip(self, _context))]
1345 async fn on_cancelled(
1346 &self,
1347 notification: CancelledNotificationParam,
1348 _context: NotificationContext<RoleServer>,
1349 ) {
1350 tracing::info!(
1351 request_id = ?notification.request_id,
1352 reason = ?notification.reason,
1353 "Received cancellation notification"
1354 );
1355 }
1356
1357 #[instrument(skip(self, _context))]
1358 async fn complete(
1359 &self,
1360 request: CompleteRequestParams,
1361 _context: RequestContext<RoleServer>,
1362 ) -> Result<CompleteResult, ErrorData> {
1363 let argument_name = &request.argument.name;
1365 let argument_value = &request.argument.value;
1366
1367 let completions = match argument_name.as_str() {
1368 "path" => {
1369 let root = Path::new(".");
1371 completion::path_completions(root, argument_value)
1372 }
1373 "symbol" => {
1374 let path_arg = request
1376 .context
1377 .as_ref()
1378 .and_then(|ctx| ctx.get_argument("path"));
1379
1380 match path_arg {
1381 Some(path_str) => {
1382 let path = Path::new(path_str);
1383 completion::symbol_completions(&self.cache, path, argument_value)
1384 }
1385 None => Vec::new(),
1386 }
1387 }
1388 _ => Vec::new(),
1389 };
1390
1391 let total_count = completions.len() as u32;
1393 let (values, has_more) = if completions.len() > 100 {
1394 (completions.into_iter().take(100).collect(), true)
1395 } else {
1396 (completions, false)
1397 };
1398
1399 let completion_info =
1400 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1401 Ok(info) => info,
1402 Err(_) => {
1403 CompletionInfo::with_all_values(Vec::new())
1405 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1406 }
1407 };
1408
1409 Ok(CompleteResult::new(completion_info))
1410 }
1411
1412 async fn set_level(
1413 &self,
1414 params: SetLevelRequestParams,
1415 _context: RequestContext<RoleServer>,
1416 ) -> Result<(), ErrorData> {
1417 let level_filter = match params.level {
1418 LoggingLevel::Debug => LevelFilter::DEBUG,
1419 LoggingLevel::Info => LevelFilter::INFO,
1420 LoggingLevel::Notice => LevelFilter::INFO,
1421 LoggingLevel::Warning => LevelFilter::WARN,
1422 LoggingLevel::Error => LevelFilter::ERROR,
1423 LoggingLevel::Critical => LevelFilter::ERROR,
1424 LoggingLevel::Alert => LevelFilter::ERROR,
1425 LoggingLevel::Emergency => LevelFilter::ERROR,
1426 };
1427
1428 let mut filter_lock = self.log_level_filter.lock().unwrap();
1429 *filter_lock = level_filter;
1430 Ok(())
1431 }
1432}
1433
1434#[cfg(test)]
1435mod tests {
1436 use super::*;
1437
1438 #[tokio::test]
1439 async fn test_emit_progress_none_peer_is_noop() {
1440 let peer = Arc::new(TokioMutex::new(None));
1441 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1442 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1443 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1444 let analyzer = CodeAnalyzer::new(
1445 peer,
1446 log_level_filter,
1447 rx,
1448 crate::metrics::MetricsSender(metrics_tx),
1449 );
1450 let token = ProgressToken(NumberOrString::String("test".into()));
1451 analyzer
1453 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1454 .await;
1455 }
1456
1457 #[tokio::test]
1458 async fn test_handle_overview_mode_verbose_no_summary_block() {
1459 use crate::pagination::{PaginationMode, paginate_slice};
1460 use crate::types::{AnalyzeDirectoryParams, OutputControlParams, PaginationParams};
1461 use tempfile::TempDir;
1462
1463 let tmp = TempDir::new().unwrap();
1464 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1465
1466 let peer = Arc::new(TokioMutex::new(None));
1467 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1468 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1469 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1470 let analyzer = CodeAnalyzer::new(
1471 peer,
1472 log_level_filter,
1473 rx,
1474 crate::metrics::MetricsSender(metrics_tx),
1475 );
1476
1477 let params = AnalyzeDirectoryParams {
1478 path: tmp.path().to_str().unwrap().to_string(),
1479 max_depth: None,
1480 pagination: PaginationParams {
1481 cursor: None,
1482 page_size: None,
1483 },
1484 output_control: OutputControlParams {
1485 summary: None,
1486 force: None,
1487 verbose: Some(true),
1488 },
1489 };
1490
1491 let ct = tokio_util::sync::CancellationToken::new();
1492 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1493
1494 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1497 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1498 let verbose = true;
1499 let formatted = if !use_summary {
1500 format_structure_paginated(
1501 &paginated.items,
1502 paginated.total,
1503 params.max_depth,
1504 Some(std::path::Path::new(¶ms.path)),
1505 verbose,
1506 )
1507 } else {
1508 output.formatted.clone()
1509 };
1510
1511 assert!(
1513 !formatted.contains("SUMMARY:"),
1514 "verbose=true must not emit SUMMARY: block; got: {}",
1515 &formatted[..formatted.len().min(300)]
1516 );
1517 assert!(
1518 formatted.contains("PAGINATED:"),
1519 "verbose=true must emit PAGINATED: header"
1520 );
1521 assert!(
1522 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1523 "verbose=true must emit FILES section header"
1524 );
1525 }
1526}