1pub mod logging;
19pub mod metrics;
20
21pub use code_analyze_core::analyze;
22use code_analyze_core::{cache, completion, graph, traversal, types};
23
24pub(crate) const EXCLUDED_DIRS: &[&str] = &[
25 "node_modules",
26 "vendor",
27 ".git",
28 "__pycache__",
29 "target",
30 "dist",
31 "build",
32 ".venv",
33];
34
35use code_analyze_core::cache::AnalysisCache;
36use code_analyze_core::formatter::{
37 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
38 format_module_info, format_structure_paginated, format_summary,
39};
40use code_analyze_core::formatter_defuse::format_focused_paginated_defuse;
41use code_analyze_core::pagination::{
42 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
43};
44use code_analyze_core::traversal::{
45 WalkEntry, changed_files_from_git_ref, filter_entries_by_git_ref, walk_directory,
46};
47use code_analyze_core::types::{
48 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
49 AnalyzeSymbolParams, SymbolMatchMode,
50};
51use logging::LogEvent;
52use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
53use rmcp::handler::server::wrapper::Parameters;
54use rmcp::model::{
55 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
56 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
57 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
58 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
59};
60use rmcp::service::{NotificationContext, RequestContext};
61use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
62use serde_json::Value;
63use std::path::Path;
64use std::sync::{Arc, Mutex};
65use tokio::sync::{Mutex as TokioMutex, mpsc};
66use tracing::{instrument, warn};
67use tracing_subscriber::filter::LevelFilter;
68
69static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
70
71const SIZE_LIMIT: usize = 50_000;
72
73#[must_use]
76pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
77 summary == Some(true) && cursor.is_some()
78}
79
80#[must_use]
81fn error_meta(
82 category: &'static str,
83 is_retryable: bool,
84 suggested_action: &'static str,
85) -> serde_json::Value {
86 serde_json::json!({
87 "errorCategory": category,
88 "isRetryable": is_retryable,
89 "suggestedAction": suggested_action,
90 })
91}
92
93#[must_use]
94fn err_to_tool_result(e: ErrorData) -> CallToolResult {
95 CallToolResult::error(vec![Content::text(e.message)])
96}
97
98fn err_to_tool_result_from_pagination(
99 e: code_analyze_core::pagination::PaginationError,
100) -> CallToolResult {
101 let msg = format!("Pagination error: {}", e);
102 CallToolResult::error(vec![Content::text(msg)])
103}
104
105fn no_cache_meta() -> Meta {
106 let mut m = serde_json::Map::new();
107 m.insert(
108 "cache_hint".to_string(),
109 serde_json::Value::String("no-cache".to_string()),
110 );
111 Meta(m)
112}
113
114fn paginate_focus_chains(
117 chains: &[graph::InternalCallChain],
118 mode: PaginationMode,
119 offset: usize,
120 page_size: usize,
121) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
122 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
123 ErrorData::new(
124 rmcp::model::ErrorCode::INTERNAL_ERROR,
125 e.to_string(),
126 Some(error_meta("transient", true, "retry the request")),
127 )
128 })?;
129
130 if paginated.next_cursor.is_none() && offset == 0 {
131 return Ok((paginated.items, None));
132 }
133
134 let next = if let Some(raw_cursor) = paginated.next_cursor {
135 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
136 ErrorData::new(
137 rmcp::model::ErrorCode::INVALID_PARAMS,
138 e.to_string(),
139 Some(error_meta("validation", false, "invalid cursor format")),
140 )
141 })?;
142 Some(
143 encode_cursor(&CursorData {
144 mode,
145 offset: decoded.offset,
146 })
147 .map_err(|e| {
148 ErrorData::new(
149 rmcp::model::ErrorCode::INVALID_PARAMS,
150 e.to_string(),
151 Some(error_meta("validation", false, "invalid cursor format")),
152 )
153 })?,
154 )
155 } else {
156 None
157 };
158
159 Ok((paginated.items, next))
160}
161
162#[derive(Clone)]
167pub struct CodeAnalyzer {
168 #[allow(dead_code)]
171 tool_router: ToolRouter<Self>,
172 cache: AnalysisCache,
173 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
174 log_level_filter: Arc<Mutex<LevelFilter>>,
175 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
176 metrics_tx: crate::metrics::MetricsSender,
177 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
178 session_id: Arc<TokioMutex<Option<String>>>,
179}
180
181#[tool_router]
182impl CodeAnalyzer {
183 #[must_use]
184 pub fn list_tools() -> Vec<rmcp::model::Tool> {
185 Self::tool_router().list_all()
186 }
187
188 pub fn new(
189 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
190 log_level_filter: Arc<Mutex<LevelFilter>>,
191 event_rx: mpsc::UnboundedReceiver<LogEvent>,
192 metrics_tx: crate::metrics::MetricsSender,
193 ) -> Self {
194 let file_cap: usize = std::env::var("CODE_ANALYZE_FILE_CACHE_CAPACITY")
195 .ok()
196 .and_then(|v| v.parse().ok())
197 .unwrap_or(100);
198 CodeAnalyzer {
199 tool_router: Self::tool_router(),
200 cache: AnalysisCache::new(file_cap),
201 peer,
202 log_level_filter,
203 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
204 metrics_tx,
205 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
206 session_id: Arc::new(TokioMutex::new(None)),
207 }
208 }
209
210 #[instrument(skip(self))]
211 async fn emit_progress(
212 &self,
213 peer: Option<Peer<RoleServer>>,
214 token: &ProgressToken,
215 progress: f64,
216 total: f64,
217 message: String,
218 ) {
219 if let Some(peer) = peer {
220 let notification = ServerNotification::ProgressNotification(Notification::new(
221 ProgressNotificationParam {
222 progress_token: token.clone(),
223 progress,
224 total: Some(total),
225 message: Some(message),
226 },
227 ));
228 if let Err(e) = peer.send_notification(notification).await {
229 warn!("Failed to send progress notification: {}", e);
230 }
231 }
232 }
233
234 #[allow(clippy::too_many_lines)] #[allow(clippy::cast_precision_loss)] #[instrument(skip(self, params, ct))]
240 async fn handle_overview_mode(
241 &self,
242 params: &AnalyzeDirectoryParams,
243 ct: tokio_util::sync::CancellationToken,
244 ) -> Result<(std::sync::Arc<analyze::AnalysisOutput>, bool), ErrorData> {
245 let path = Path::new(¶ms.path);
246 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
247 let counter_clone = counter.clone();
248 let path_owned = path.to_path_buf();
249 let max_depth = params.max_depth;
250 let ct_clone = ct.clone();
251
252 let all_entries = walk_directory(path, None).map_err(|e| {
254 ErrorData::new(
255 rmcp::model::ErrorCode::INTERNAL_ERROR,
256 format!("Failed to walk directory: {e}"),
257 Some(error_meta(
258 "resource",
259 false,
260 "check path permissions and availability",
261 )),
262 )
263 })?;
264
265 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
267
268 let git_ref_val = params.git_ref.as_deref().filter(|s| !s.is_empty());
271 let cache_key = cache::DirectoryCacheKey::from_entries(
272 &all_entries,
273 canonical_max_depth,
274 AnalysisMode::Overview,
275 git_ref_val,
276 );
277
278 if let Some(cached) = self.cache.get_directory(&cache_key) {
280 return Ok((cached, true));
281 }
282
283 let all_entries = if let Some(ref git_ref) = params.git_ref
285 && !git_ref.is_empty()
286 {
287 let changed = changed_files_from_git_ref(path, git_ref).map_err(|e| {
288 ErrorData::new(
289 rmcp::model::ErrorCode::INVALID_PARAMS,
290 format!("git_ref filter failed: {e}"),
291 Some(error_meta(
292 "resource",
293 false,
294 "ensure git is installed and path is inside a git repository",
295 )),
296 )
297 })?;
298 filter_entries_by_git_ref(all_entries, &changed, path)
299 } else {
300 all_entries
301 };
302
303 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
305 Some(traversal::subtree_counts_from_entries(path, &all_entries))
306 } else {
307 None
308 };
309
310 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
312 && depth > 0
313 {
314 all_entries
315 .into_iter()
316 .filter(|e| e.depth <= depth as usize)
317 .collect()
318 } else {
319 all_entries
320 };
321
322 let total_files = entries.iter().filter(|e| !e.is_dir).count();
324
325 let handle = tokio::task::spawn_blocking(move || {
327 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
328 });
329
330 let token = ProgressToken(NumberOrString::String(
332 format!(
333 "analyze-overview-{}",
334 std::time::SystemTime::now()
335 .duration_since(std::time::UNIX_EPOCH)
336 .map(|d| d.as_nanos())
337 .unwrap_or(0)
338 )
339 .into(),
340 ));
341 let peer = self.peer.lock().await.clone();
342 let mut last_progress = 0usize;
343 let mut cancelled = false;
344 loop {
345 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
346 if ct.is_cancelled() {
347 cancelled = true;
348 break;
349 }
350 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
351 if current != last_progress && total_files > 0 {
352 self.emit_progress(
353 peer.clone(),
354 &token,
355 current as f64,
356 total_files as f64,
357 format!("Analyzing {current}/{total_files} files"),
358 )
359 .await;
360 last_progress = current;
361 }
362 if handle.is_finished() {
363 break;
364 }
365 }
366
367 if !cancelled && total_files > 0 {
369 self.emit_progress(
370 peer.clone(),
371 &token,
372 total_files as f64,
373 total_files as f64,
374 format!("Completed analyzing {total_files} files"),
375 )
376 .await;
377 }
378
379 match handle.await {
380 Ok(Ok(mut output)) => {
381 output.subtree_counts = subtree_counts;
382 let arc_output = std::sync::Arc::new(output);
383 self.cache.put_directory(cache_key, arc_output.clone());
384 Ok((arc_output, false))
385 }
386 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
387 rmcp::model::ErrorCode::INTERNAL_ERROR,
388 "Analysis cancelled".to_string(),
389 Some(error_meta("transient", true, "analysis was cancelled")),
390 )),
391 Ok(Err(e)) => Err(ErrorData::new(
392 rmcp::model::ErrorCode::INTERNAL_ERROR,
393 format!("Error analyzing directory: {e}"),
394 Some(error_meta(
395 "resource",
396 false,
397 "check path and file permissions",
398 )),
399 )),
400 Err(e) => Err(ErrorData::new(
401 rmcp::model::ErrorCode::INTERNAL_ERROR,
402 format!("Task join error: {e}"),
403 Some(error_meta("transient", true, "retry the request")),
404 )),
405 }
406 }
407
408 #[instrument(skip(self, params))]
411 async fn handle_file_details_mode(
412 &self,
413 params: &AnalyzeFileParams,
414 ) -> Result<(std::sync::Arc<analyze::FileAnalysisOutput>, bool), ErrorData> {
415 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
417 meta.modified().ok().map(|mtime| cache::CacheKey {
418 path: std::path::PathBuf::from(¶ms.path),
419 modified: mtime,
420 mode: AnalysisMode::FileDetails,
421 })
422 });
423
424 if let Some(ref key) = cache_key
426 && let Some(cached) = self.cache.get(key)
427 {
428 return Ok((cached, true));
429 }
430
431 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
433 Ok(output) => {
434 let arc_output = std::sync::Arc::new(output);
435 if let Some(key) = cache_key {
436 self.cache.put(key, arc_output.clone());
437 }
438 Ok((arc_output, false))
439 }
440 Err(e) => Err(ErrorData::new(
441 rmcp::model::ErrorCode::INTERNAL_ERROR,
442 format!("Error analyzing file: {e}"),
443 Some(error_meta(
444 "resource",
445 false,
446 "check file path and permissions",
447 )),
448 )),
449 }
450 }
451
452 fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
454 let has_rust = entries.iter().any(|e| {
455 !e.is_dir
456 && e.path
457 .extension()
458 .and_then(|x: &std::ffi::OsStr| x.to_str())
459 == Some("rs")
460 });
461
462 if !has_rust {
463 return Err(ErrorData::new(
464 rmcp::model::ErrorCode::INVALID_PARAMS,
465 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
466 Some(error_meta(
467 "validation",
468 false,
469 "remove impl_only or point to a directory containing .rs files",
470 )),
471 ));
472 }
473 Ok(())
474 }
475
476 fn validate_import_lookup(import_lookup: Option<bool>, symbol: &str) -> Result<(), ErrorData> {
478 if import_lookup == Some(true) && symbol.is_empty() {
479 return Err(ErrorData::new(
480 rmcp::model::ErrorCode::INVALID_PARAMS,
481 "import_lookup=true requires symbol to contain the module path to search for"
482 .to_string(),
483 Some(error_meta(
484 "validation",
485 false,
486 "set symbol to the module path when using import_lookup=true",
487 )),
488 ));
489 }
490 Ok(())
491 }
492
493 #[allow(clippy::cast_precision_loss)] async fn poll_progress_until_done(
496 &self,
497 analysis_params: &FocusedAnalysisParams,
498 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
499 ct: tokio_util::sync::CancellationToken,
500 entries: std::sync::Arc<Vec<WalkEntry>>,
501 total_files: usize,
502 symbol_display: &str,
503 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
504 let counter_clone = counter.clone();
505 let ct_clone = ct.clone();
506 let entries_clone = std::sync::Arc::clone(&entries);
507 let path_owned = analysis_params.path.clone();
508 let symbol_owned = analysis_params.symbol.clone();
509 let match_mode_owned = analysis_params.match_mode.clone();
510 let follow_depth = analysis_params.follow_depth;
511 let max_depth = analysis_params.max_depth;
512 let ast_recursion_limit = analysis_params.ast_recursion_limit;
513 let use_summary = analysis_params.use_summary;
514 let impl_only = analysis_params.impl_only;
515 let def_use = analysis_params.def_use;
516 let handle = tokio::task::spawn_blocking(move || {
517 let params = analyze::FocusedAnalysisConfig {
518 focus: symbol_owned,
519 match_mode: match_mode_owned,
520 follow_depth,
521 max_depth,
522 ast_recursion_limit,
523 use_summary,
524 impl_only,
525 def_use,
526 };
527 analyze::analyze_focused_with_progress_with_entries(
528 &path_owned,
529 ¶ms,
530 &counter_clone,
531 &ct_clone,
532 &entries_clone,
533 )
534 });
535
536 let token = ProgressToken(NumberOrString::String(
537 format!(
538 "analyze-symbol-{}",
539 std::time::SystemTime::now()
540 .duration_since(std::time::UNIX_EPOCH)
541 .map(|d| d.as_nanos())
542 .unwrap_or(0)
543 )
544 .into(),
545 ));
546 let peer = self.peer.lock().await.clone();
547 let mut last_progress = 0usize;
548 let mut cancelled = false;
549
550 loop {
551 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
552 if ct.is_cancelled() {
553 cancelled = true;
554 break;
555 }
556 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
557 if current != last_progress && total_files > 0 {
558 self.emit_progress(
559 peer.clone(),
560 &token,
561 current as f64,
562 total_files as f64,
563 format!(
564 "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
565 ),
566 )
567 .await;
568 last_progress = current;
569 }
570 if handle.is_finished() {
571 break;
572 }
573 }
574
575 if !cancelled && total_files > 0 {
576 self.emit_progress(
577 peer.clone(),
578 &token,
579 total_files as f64,
580 total_files as f64,
581 format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
582 )
583 .await;
584 }
585
586 match handle.await {
587 Ok(Ok(output)) => Ok(output),
588 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
589 rmcp::model::ErrorCode::INTERNAL_ERROR,
590 "Analysis cancelled".to_string(),
591 Some(error_meta("transient", true, "analysis was cancelled")),
592 )),
593 Ok(Err(e)) => Err(ErrorData::new(
594 rmcp::model::ErrorCode::INTERNAL_ERROR,
595 format!("Error analyzing symbol: {e}"),
596 Some(error_meta("resource", false, "check symbol name and file")),
597 )),
598 Err(e) => Err(ErrorData::new(
599 rmcp::model::ErrorCode::INTERNAL_ERROR,
600 format!("Task join error: {e}"),
601 Some(error_meta("transient", true, "retry the request")),
602 )),
603 }
604 }
605
606 async fn run_focused_with_auto_summary(
608 &self,
609 params: &AnalyzeSymbolParams,
610 analysis_params: &FocusedAnalysisParams,
611 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
612 ct: tokio_util::sync::CancellationToken,
613 entries: std::sync::Arc<Vec<WalkEntry>>,
614 total_files: usize,
615 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
616 let use_summary_for_task = params.output_control.force != Some(true)
617 && params.output_control.summary == Some(true);
618
619 let analysis_params_initial = FocusedAnalysisParams {
620 use_summary: use_summary_for_task,
621 ..analysis_params.clone()
622 };
623
624 let mut output = self
625 .poll_progress_until_done(
626 &analysis_params_initial,
627 counter.clone(),
628 ct.clone(),
629 entries.clone(),
630 total_files,
631 ¶ms.symbol,
632 )
633 .await?;
634
635 if params.output_control.summary.is_none()
636 && params.output_control.force != Some(true)
637 && output.formatted.len() > SIZE_LIMIT
638 {
639 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
640 let analysis_params_retry = FocusedAnalysisParams {
641 use_summary: true,
642 ..analysis_params.clone()
643 };
644 let summary_result = self
645 .poll_progress_until_done(
646 &analysis_params_retry,
647 counter2,
648 ct,
649 entries,
650 total_files,
651 ¶ms.symbol,
652 )
653 .await;
654
655 if let Ok(summary_output) = summary_result {
656 output.formatted = summary_output.formatted;
657 } else {
658 let estimated_tokens = output.formatted.len() / 4;
659 let message = format!(
660 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
661 output.formatted.len(),
662 estimated_tokens
663 );
664 return Err(ErrorData::new(
665 rmcp::model::ErrorCode::INVALID_PARAMS,
666 message,
667 Some(error_meta(
668 "validation",
669 false,
670 "use summary=true or force=true",
671 )),
672 ));
673 }
674 } else if output.formatted.len() > SIZE_LIMIT
675 && params.output_control.force != Some(true)
676 && params.output_control.summary == Some(false)
677 {
678 let estimated_tokens = output.formatted.len() / 4;
679 let message = format!(
680 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
681 - force=true to return full output\n\
682 - summary=true to get compact summary\n\
683 - Narrow your scope (smaller directory, specific file)",
684 output.formatted.len(),
685 estimated_tokens
686 );
687 return Err(ErrorData::new(
688 rmcp::model::ErrorCode::INVALID_PARAMS,
689 message,
690 Some(error_meta(
691 "validation",
692 false,
693 "use force=true, summary=true, or narrow scope",
694 )),
695 ));
696 }
697
698 Ok(output)
699 }
700
701 #[instrument(skip(self, params, ct))]
705 async fn handle_focused_mode(
706 &self,
707 params: &AnalyzeSymbolParams,
708 ct: tokio_util::sync::CancellationToken,
709 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
710 let path = Path::new(¶ms.path);
711 let raw_entries = match walk_directory(path, params.max_depth) {
712 Ok(e) => e,
713 Err(e) => {
714 return Err(ErrorData::new(
715 rmcp::model::ErrorCode::INTERNAL_ERROR,
716 format!("Failed to walk directory: {e}"),
717 Some(error_meta(
718 "resource",
719 false,
720 "check path permissions and availability",
721 )),
722 ));
723 }
724 };
725 let filtered_entries = if let Some(ref git_ref) = params.git_ref
727 && !git_ref.is_empty()
728 {
729 let changed = changed_files_from_git_ref(path, git_ref).map_err(|e| {
730 ErrorData::new(
731 rmcp::model::ErrorCode::INVALID_PARAMS,
732 format!("git_ref filter failed: {e}"),
733 Some(error_meta(
734 "resource",
735 false,
736 "ensure git is installed and path is inside a git repository",
737 )),
738 )
739 })?;
740 filter_entries_by_git_ref(raw_entries, &changed, path)
741 } else {
742 raw_entries
743 };
744 let entries = std::sync::Arc::new(filtered_entries);
745
746 if params.impl_only == Some(true) {
747 Self::validate_impl_only(&entries)?;
748 }
749
750 let total_files = entries.iter().filter(|e| !e.is_dir).count();
751 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
752
753 let analysis_params = FocusedAnalysisParams {
754 path: path.to_path_buf(),
755 symbol: params.symbol.clone(),
756 match_mode: params.match_mode.clone().unwrap_or_default(),
757 follow_depth: params.follow_depth.unwrap_or(1),
758 max_depth: params.max_depth,
759 ast_recursion_limit: params.ast_recursion_limit,
760 use_summary: false,
761 impl_only: params.impl_only,
762 def_use: params.def_use.unwrap_or(false),
763 };
764
765 let mut output = self
766 .run_focused_with_auto_summary(
767 params,
768 &analysis_params,
769 counter,
770 ct,
771 entries,
772 total_files,
773 )
774 .await?;
775
776 if params.impl_only == Some(true) {
777 let filter_line = format!(
778 "FILTER: impl_only=true ({} of {} callers shown)\n",
779 output.impl_trait_caller_count, output.unfiltered_caller_count
780 );
781 output.formatted = format!("{}{}", filter_line, output.formatted);
782
783 if output.impl_trait_caller_count == 0 {
784 output.formatted.push_str(
785 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
786 );
787 }
788 }
789
790 Ok(output)
791 }
792
793 #[instrument(skip(self, context))]
794 #[tool(
795 name = "analyze_directory",
796 description = "Tree-view of directory with LOC, function/class counts, test markers. Respects .gitignore. For 1000+ files, use max_depth=2-3 and summary=true. Empty directories return zero counts. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they?",
797 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
798 annotations(
799 title = "Analyze Directory",
800 read_only_hint = true,
801 destructive_hint = false,
802 idempotent_hint = true,
803 open_world_hint = false
804 )
805 )]
806 async fn analyze_directory(
807 &self,
808 params: Parameters<AnalyzeDirectoryParams>,
809 context: RequestContext<RoleServer>,
810 ) -> Result<CallToolResult, ErrorData> {
811 let params = params.0;
812 let ct = context.ct.clone();
813 let t_start = std::time::Instant::now();
814 let param_path = params.path.clone();
815 let max_depth_val = params.max_depth;
816 let seq = self
817 .session_call_seq
818 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
819 let sid = self.session_id.lock().await.clone();
820
821 let (arc_output, dir_cache_hit) = match self.handle_overview_mode(¶ms, ct).await {
823 Ok(v) => v,
824 Err(e) => return Ok(err_to_tool_result(e)),
825 };
826 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
829 Ok(owned) => owned,
830 Err(arc) => (*arc).clone(),
831 };
832
833 if summary_cursor_conflict(
836 params.output_control.summary,
837 params.pagination.cursor.as_deref(),
838 ) {
839 return Ok(err_to_tool_result(ErrorData::new(
840 rmcp::model::ErrorCode::INVALID_PARAMS,
841 "summary=true is incompatible with a pagination cursor; use one or the other"
842 .to_string(),
843 Some(error_meta(
844 "validation",
845 false,
846 "remove cursor or set summary=false",
847 )),
848 )));
849 }
850
851 let use_summary = if params.output_control.force == Some(true) {
853 false
854 } else if params.output_control.summary == Some(true) {
855 true
856 } else if params.output_control.summary == Some(false) {
857 false
858 } else {
859 output.formatted.len() > SIZE_LIMIT
860 };
861
862 if use_summary {
863 output.formatted = format_summary(
864 &output.entries,
865 &output.files,
866 params.max_depth,
867 output.subtree_counts.as_deref(),
868 );
869 }
870
871 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
873 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
874 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
875 ErrorData::new(
876 rmcp::model::ErrorCode::INVALID_PARAMS,
877 e.to_string(),
878 Some(error_meta("validation", false, "invalid cursor format")),
879 )
880 }) {
881 Ok(v) => v,
882 Err(e) => return Ok(err_to_tool_result(e)),
883 };
884 cursor_data.offset
885 } else {
886 0
887 };
888
889 let paginated =
891 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
892 Ok(v) => v,
893 Err(e) => {
894 return Ok(err_to_tool_result(ErrorData::new(
895 rmcp::model::ErrorCode::INTERNAL_ERROR,
896 e.to_string(),
897 Some(error_meta("transient", true, "retry the request")),
898 )));
899 }
900 };
901
902 let verbose = params.output_control.verbose.unwrap_or(false);
903 if !use_summary {
904 output.formatted = format_structure_paginated(
905 &paginated.items,
906 paginated.total,
907 params.max_depth,
908 Some(Path::new(¶ms.path)),
909 verbose,
910 );
911 }
912
913 if use_summary {
915 output.next_cursor = None;
916 } else {
917 output.next_cursor.clone_from(&paginated.next_cursor);
918 }
919
920 let mut final_text = output.formatted.clone();
922 if !use_summary && let Some(cursor) = paginated.next_cursor {
923 final_text.push('\n');
924 final_text.push_str("NEXT_CURSOR: ");
925 final_text.push_str(&cursor);
926 }
927
928 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
929 .with_meta(Some(no_cache_meta()));
930 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
931 result.structured_content = Some(structured);
932 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
933 self.metrics_tx.send(crate::metrics::MetricEvent {
934 ts: crate::metrics::unix_ms(),
935 tool: "analyze_directory",
936 duration_ms: dur,
937 output_chars: final_text.len(),
938 param_path_depth: crate::metrics::path_component_count(¶m_path),
939 max_depth: max_depth_val,
940 result: "ok",
941 error_type: None,
942 session_id: sid,
943 seq: Some(seq),
944 cache_hit: Some(dir_cache_hit),
945 });
946 Ok(result)
947 }
948
949 #[instrument(skip(self, _context))]
950 #[tool(
951 name = "analyze_file",
952 description = "Functions, types, classes, and imports from a single source file; use analyze_directory for directories. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Passing a directory path returns an error. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py.",
953 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
954 annotations(
955 title = "Analyze File",
956 read_only_hint = true,
957 destructive_hint = false,
958 idempotent_hint = true,
959 open_world_hint = false
960 )
961 )]
962 async fn analyze_file(
963 &self,
964 params: Parameters<AnalyzeFileParams>,
965 _context: RequestContext<RoleServer>,
966 ) -> Result<CallToolResult, ErrorData> {
967 let params = params.0;
968 let t_start = std::time::Instant::now();
969 let param_path = params.path.clone();
970 let seq = self
971 .session_call_seq
972 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
973 let sid = self.session_id.lock().await.clone();
974
975 let (arc_output, file_cache_hit) = match self.handle_file_details_mode(¶ms).await {
977 Ok(v) => v,
978 Err(e) => return Ok(err_to_tool_result(e)),
979 };
980
981 let mut formatted = arc_output.formatted.clone();
985 let line_count = arc_output.line_count;
986
987 let use_summary = if params.output_control.force == Some(true) {
989 false
990 } else if params.output_control.summary == Some(true) {
991 true
992 } else if params.output_control.summary == Some(false) {
993 false
994 } else {
995 formatted.len() > SIZE_LIMIT
996 };
997
998 if use_summary {
999 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
1000 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
1001 let estimated_tokens = formatted.len() / 4;
1002 let message = format!(
1003 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
1004 - force=true to return full output\n\
1005 - Use fields to limit output to specific sections (functions, classes, or imports)\n\
1006 - Use summary=true for a compact overview",
1007 formatted.len(),
1008 estimated_tokens
1009 );
1010 return Ok(err_to_tool_result(ErrorData::new(
1011 rmcp::model::ErrorCode::INVALID_PARAMS,
1012 message,
1013 Some(error_meta(
1014 "validation",
1015 false,
1016 "use force=true, fields, or summary=true",
1017 )),
1018 )));
1019 }
1020
1021 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1023 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1024 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1025 ErrorData::new(
1026 rmcp::model::ErrorCode::INVALID_PARAMS,
1027 e.to_string(),
1028 Some(error_meta("validation", false, "invalid cursor format")),
1029 )
1030 }) {
1031 Ok(v) => v,
1032 Err(e) => return Ok(err_to_tool_result(e)),
1033 };
1034 cursor_data.offset
1035 } else {
1036 0
1037 };
1038
1039 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
1041 .semantic
1042 .functions
1043 .iter()
1044 .filter(|func| {
1045 !arc_output
1046 .semantic
1047 .classes
1048 .iter()
1049 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
1050 })
1051 .cloned()
1052 .collect();
1053
1054 let paginated =
1056 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
1057 Ok(v) => v,
1058 Err(e) => {
1059 return Ok(err_to_tool_result(ErrorData::new(
1060 rmcp::model::ErrorCode::INTERNAL_ERROR,
1061 e.to_string(),
1062 Some(error_meta("transient", true, "retry the request")),
1063 )));
1064 }
1065 };
1066
1067 let verbose = params.output_control.verbose.unwrap_or(false);
1069 if !use_summary {
1070 formatted = format_file_details_paginated(
1072 &paginated.items,
1073 paginated.total,
1074 &arc_output.semantic,
1075 ¶ms.path,
1076 line_count,
1077 offset,
1078 verbose,
1079 params.fields.as_deref(),
1080 );
1081 }
1082
1083 let next_cursor = if use_summary {
1085 None
1086 } else {
1087 paginated.next_cursor.clone()
1088 };
1089
1090 let mut final_text = formatted.clone();
1092 if !use_summary && let Some(ref cursor) = next_cursor {
1093 final_text.push('\n');
1094 final_text.push_str("NEXT_CURSOR: ");
1095 final_text.push_str(cursor);
1096 }
1097
1098 let response_output = analyze::FileAnalysisOutput::new(
1100 formatted,
1101 arc_output.semantic.clone(),
1102 line_count,
1103 next_cursor,
1104 );
1105
1106 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1107 .with_meta(Some(no_cache_meta()));
1108 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1109 result.structured_content = Some(structured);
1110 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1111 self.metrics_tx.send(crate::metrics::MetricEvent {
1112 ts: crate::metrics::unix_ms(),
1113 tool: "analyze_file",
1114 duration_ms: dur,
1115 output_chars: final_text.len(),
1116 param_path_depth: crate::metrics::path_component_count(¶m_path),
1117 max_depth: None,
1118 result: "ok",
1119 error_type: None,
1120 session_id: sid,
1121 seq: Some(seq),
1122 cache_hit: Some(file_cache_hit),
1123 });
1124 Ok(result)
1125 }
1126
1127 #[instrument(skip(self, context))]
1128 #[tool(
1129 name = "analyze_symbol",
1130 description = "Call graph for a named function/method across all files in a directory to trace usage. Returns direct callers and callees. Unknown symbols return error; symbols with no callers/callees return empty chains. Use import_lookup=true with symbol set to the module path to find all files that import a given module path instead of tracing a call graph. When def_use is true, returns write and read sites for the symbol in def_use_sites; write sites include assignments and initializations, read sites include all references, augmented assignments appear as kind write_read. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method; Find all files that import std::collections",
1131 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1132 annotations(
1133 title = "Analyze Symbol",
1134 read_only_hint = true,
1135 destructive_hint = false,
1136 idempotent_hint = true,
1137 open_world_hint = false
1138 )
1139 )]
1140 async fn analyze_symbol(
1141 &self,
1142 params: Parameters<AnalyzeSymbolParams>,
1143 context: RequestContext<RoleServer>,
1144 ) -> Result<CallToolResult, ErrorData> {
1145 let params = params.0;
1146 let ct = context.ct.clone();
1147 let t_start = std::time::Instant::now();
1148 let param_path = params.path.clone();
1149 let max_depth_val = params.follow_depth;
1150 let seq = self
1151 .session_call_seq
1152 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1153 let sid = self.session_id.lock().await.clone();
1154
1155 if let Err(e) = Self::validate_import_lookup(params.import_lookup, ¶ms.symbol) {
1157 return Ok(err_to_tool_result(e));
1158 }
1159
1160 if params.import_lookup == Some(true) {
1162 let path = Path::new(¶ms.path);
1163 let raw_entries = match walk_directory(path, params.max_depth) {
1164 Ok(e) => e,
1165 Err(e) => {
1166 return Ok(err_to_tool_result(ErrorData::new(
1167 rmcp::model::ErrorCode::INTERNAL_ERROR,
1168 format!("Failed to walk directory: {e}"),
1169 Some(error_meta(
1170 "resource",
1171 false,
1172 "check path permissions and availability",
1173 )),
1174 )));
1175 }
1176 };
1177 let entries = if let Some(ref git_ref) = params.git_ref
1179 && !git_ref.is_empty()
1180 {
1181 let changed = match changed_files_from_git_ref(path, git_ref) {
1182 Ok(c) => c,
1183 Err(e) => {
1184 return Ok(err_to_tool_result(ErrorData::new(
1185 rmcp::model::ErrorCode::INVALID_PARAMS,
1186 format!("git_ref filter failed: {e}"),
1187 Some(error_meta(
1188 "resource",
1189 false,
1190 "ensure git is installed and path is inside a git repository",
1191 )),
1192 )));
1193 }
1194 };
1195 filter_entries_by_git_ref(raw_entries, &changed, path)
1196 } else {
1197 raw_entries
1198 };
1199 let output = match analyze::analyze_import_lookup(
1200 path,
1201 ¶ms.symbol,
1202 &entries,
1203 params.ast_recursion_limit,
1204 ) {
1205 Ok(v) => v,
1206 Err(e) => {
1207 return Ok(err_to_tool_result(ErrorData::new(
1208 rmcp::model::ErrorCode::INTERNAL_ERROR,
1209 format!("import_lookup failed: {e}"),
1210 Some(error_meta(
1211 "resource",
1212 false,
1213 "check path and file permissions",
1214 )),
1215 )));
1216 }
1217 };
1218 let final_text = output.formatted.clone();
1219 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1220 .with_meta(Some(no_cache_meta()));
1221 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1222 result.structured_content = Some(structured);
1223 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1224 self.metrics_tx.send(crate::metrics::MetricEvent {
1225 ts: crate::metrics::unix_ms(),
1226 tool: "analyze_symbol",
1227 duration_ms: dur,
1228 output_chars: final_text.len(),
1229 param_path_depth: crate::metrics::path_component_count(¶m_path),
1230 max_depth: max_depth_val,
1231 result: "ok",
1232 error_type: None,
1233 session_id: sid,
1234 seq: Some(seq),
1235 cache_hit: Some(false),
1236 });
1237 return Ok(result);
1238 }
1239
1240 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1242 Ok(v) => v,
1243 Err(e) => return Ok(err_to_tool_result(e)),
1244 };
1245
1246 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1248 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1249 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1250 ErrorData::new(
1251 rmcp::model::ErrorCode::INVALID_PARAMS,
1252 e.to_string(),
1253 Some(error_meta("validation", false, "invalid cursor format")),
1254 )
1255 }) {
1256 Ok(v) => v,
1257 Err(e) => return Ok(err_to_tool_result(e)),
1258 };
1259 cursor_data.offset
1260 } else {
1261 0
1262 };
1263
1264 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1266 decode_cursor(cursor_str)
1267 .map(|c| c.mode)
1268 .unwrap_or(PaginationMode::Callers)
1269 } else {
1270 PaginationMode::Callers
1271 };
1272
1273 let use_summary = params.output_control.summary == Some(true);
1274 let verbose = params.output_control.verbose.unwrap_or(false);
1275
1276 let mut callee_cursor = match cursor_mode {
1277 PaginationMode::Callers => {
1278 let (paginated_items, paginated_next) = match paginate_focus_chains(
1279 &output.prod_chains,
1280 PaginationMode::Callers,
1281 offset,
1282 page_size,
1283 ) {
1284 Ok(v) => v,
1285 Err(e) => return Ok(err_to_tool_result(e)),
1286 };
1287
1288 if !use_summary
1289 && (paginated_next.is_some()
1290 || offset > 0
1291 || !verbose
1292 || !output.outgoing_chains.is_empty())
1293 {
1294 let base_path = Path::new(¶ms.path);
1295 output.formatted = format_focused_paginated(
1296 &paginated_items,
1297 output.prod_chains.len(),
1298 PaginationMode::Callers,
1299 ¶ms.symbol,
1300 &output.prod_chains,
1301 &output.test_chains,
1302 &output.outgoing_chains,
1303 output.def_count,
1304 offset,
1305 Some(base_path),
1306 verbose,
1307 );
1308 paginated_next
1309 } else {
1310 None
1311 }
1312 }
1313 PaginationMode::Callees => {
1314 let (paginated_items, paginated_next) = match paginate_focus_chains(
1315 &output.outgoing_chains,
1316 PaginationMode::Callees,
1317 offset,
1318 page_size,
1319 ) {
1320 Ok(v) => v,
1321 Err(e) => return Ok(err_to_tool_result(e)),
1322 };
1323
1324 if paginated_next.is_some() || offset > 0 || !verbose {
1325 let base_path = Path::new(¶ms.path);
1326 output.formatted = format_focused_paginated(
1327 &paginated_items,
1328 output.outgoing_chains.len(),
1329 PaginationMode::Callees,
1330 ¶ms.symbol,
1331 &output.prod_chains,
1332 &output.test_chains,
1333 &output.outgoing_chains,
1334 output.def_count,
1335 offset,
1336 Some(base_path),
1337 verbose,
1338 );
1339 paginated_next
1340 } else {
1341 None
1342 }
1343 }
1344 PaginationMode::Default => {
1345 unreachable!("SymbolFocus should only use Callers or Callees modes")
1346 }
1347 PaginationMode::DefUse => {
1348 let total_sites = output.def_use_sites.len();
1349 let (paginated_sites, paginated_next) = match paginate_slice(
1350 &output.def_use_sites,
1351 offset,
1352 page_size,
1353 PaginationMode::DefUse,
1354 ) {
1355 Ok(r) => (r.items, r.next_cursor),
1356 Err(e) => return Ok(err_to_tool_result_from_pagination(e)),
1357 };
1358
1359 if !use_summary {
1362 let base_path = Path::new(¶ms.path);
1363 output.formatted = format_focused_paginated_defuse(
1364 &paginated_sites,
1365 total_sites,
1366 ¶ms.symbol,
1367 offset,
1368 Some(base_path),
1369 verbose,
1370 );
1371 }
1372
1373 output.def_use_sites = paginated_sites;
1376
1377 paginated_next
1378 }
1379 };
1380
1381 if callee_cursor.is_none()
1386 && cursor_mode == PaginationMode::Callers
1387 && !output.outgoing_chains.is_empty()
1388 && !use_summary
1389 && let Ok(cursor) = encode_cursor(&CursorData {
1390 mode: PaginationMode::Callees,
1391 offset: 0,
1392 })
1393 {
1394 callee_cursor = Some(cursor);
1395 }
1396
1397 if callee_cursor.is_none()
1404 && matches!(
1405 cursor_mode,
1406 PaginationMode::Callees | PaginationMode::Callers
1407 )
1408 && !output.def_use_sites.is_empty()
1409 && !use_summary
1410 && let Ok(cursor) = encode_cursor(&CursorData {
1411 mode: PaginationMode::DefUse,
1412 offset: 0,
1413 })
1414 {
1415 if cursor_mode == PaginationMode::Callees || output.outgoing_chains.is_empty() {
1418 callee_cursor = Some(cursor);
1419 }
1420 }
1421
1422 output.next_cursor.clone_from(&callee_cursor);
1424
1425 let mut final_text = output.formatted.clone();
1427 if let Some(cursor) = callee_cursor {
1428 final_text.push('\n');
1429 final_text.push_str("NEXT_CURSOR: ");
1430 final_text.push_str(&cursor);
1431 }
1432
1433 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1434 .with_meta(Some(no_cache_meta()));
1435 if cursor_mode != PaginationMode::DefUse {
1439 output.def_use_sites = Vec::new();
1440 }
1441 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1442 result.structured_content = Some(structured);
1443 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1444 self.metrics_tx.send(crate::metrics::MetricEvent {
1445 ts: crate::metrics::unix_ms(),
1446 tool: "analyze_symbol",
1447 duration_ms: dur,
1448 output_chars: final_text.len(),
1449 param_path_depth: crate::metrics::path_component_count(¶m_path),
1450 max_depth: max_depth_val,
1451 result: "ok",
1452 error_type: None,
1453 session_id: sid,
1454 seq: Some(seq),
1455 cache_hit: Some(false),
1456 });
1457 Ok(result)
1458 }
1459
1460 #[instrument(skip(self, _context))]
1461 #[tool(
1462 name = "analyze_module",
1463 description = "Function and import index for a single source file with minimal token cost: name, line_count, language, function names with line numbers, import list only (~75% smaller than analyze_file). Use analyze_file when you need signatures, types, or class details. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Pagination, summary, force, and verbose not supported. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs.",
1464 output_schema = schema_for_type::<types::ModuleInfo>(),
1465 annotations(
1466 title = "Analyze Module",
1467 read_only_hint = true,
1468 destructive_hint = false,
1469 idempotent_hint = true,
1470 open_world_hint = false
1471 )
1472 )]
1473 async fn analyze_module(
1474 &self,
1475 params: Parameters<AnalyzeModuleParams>,
1476 _context: RequestContext<RoleServer>,
1477 ) -> Result<CallToolResult, ErrorData> {
1478 let params = params.0;
1479 let t_start = std::time::Instant::now();
1480 let param_path = params.path.clone();
1481 let seq = self
1482 .session_call_seq
1483 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1484 let sid = self.session_id.lock().await.clone();
1485
1486 if std::fs::metadata(¶ms.path)
1488 .map(|m| m.is_dir())
1489 .unwrap_or(false)
1490 {
1491 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1492 self.metrics_tx.send(crate::metrics::MetricEvent {
1493 ts: crate::metrics::unix_ms(),
1494 tool: "analyze_module",
1495 duration_ms: dur,
1496 output_chars: 0,
1497 param_path_depth: crate::metrics::path_component_count(¶m_path),
1498 max_depth: None,
1499 result: "error",
1500 error_type: Some("invalid_params".to_string()),
1501 session_id: sid.clone(),
1502 seq: Some(seq),
1503 cache_hit: None,
1504 });
1505 return Ok(err_to_tool_result(ErrorData::new(
1506 rmcp::model::ErrorCode::INVALID_PARAMS,
1507 format!(
1508 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1509 params.path
1510 ),
1511 Some(error_meta(
1512 "validation",
1513 false,
1514 "use analyze_directory for directories",
1515 )),
1516 )));
1517 }
1518
1519 let module_cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
1521 meta.modified().ok().map(|mtime| cache::CacheKey {
1522 path: std::path::PathBuf::from(¶ms.path),
1523 modified: mtime,
1524 mode: AnalysisMode::FileDetails,
1525 })
1526 });
1527 let (module_info, module_cache_hit) = if let Some(ref key) = module_cache_key
1528 && let Some(cached_file) = self.cache.get(key)
1529 {
1530 let file_path = std::path::Path::new(¶ms.path);
1534 let name = file_path
1535 .file_name()
1536 .and_then(|n: &std::ffi::OsStr| n.to_str())
1537 .unwrap_or("unknown")
1538 .to_string();
1539 let language = file_path
1540 .extension()
1541 .and_then(|e| e.to_str())
1542 .and_then(code_analyze_core::lang::language_for_extension)
1543 .unwrap_or("unknown")
1544 .to_string();
1545 let mi = types::ModuleInfo {
1546 name,
1547 line_count: cached_file.line_count,
1548 language,
1549 functions: cached_file
1550 .semantic
1551 .functions
1552 .iter()
1553 .map(|f| types::ModuleFunctionInfo {
1554 name: f.name.clone(),
1555 line: f.line,
1556 })
1557 .collect(),
1558 imports: cached_file
1559 .semantic
1560 .imports
1561 .iter()
1562 .map(|i| types::ModuleImportInfo {
1563 module: i.module.clone(),
1564 items: i.items.clone(),
1565 })
1566 .collect(),
1567 };
1568 (mi, true)
1569 } else {
1570 let file_output = match analyze::analyze_file(¶ms.path, None).map_err(|e| {
1574 ErrorData::new(
1575 rmcp::model::ErrorCode::INVALID_PARAMS,
1576 format!("Failed to analyze module: {e}"),
1577 Some(error_meta(
1578 "validation",
1579 false,
1580 "ensure file exists, is readable, and has a supported extension",
1581 )),
1582 )
1583 }) {
1584 Ok(v) => v,
1585 Err(e) => return Ok(err_to_tool_result(e)),
1586 };
1587 let arc_output = std::sync::Arc::new(file_output);
1588 if let Some(key) = module_cache_key.clone() {
1589 self.cache.put(key, arc_output.clone());
1590 }
1591 let file_path = std::path::Path::new(¶ms.path);
1592 let name = file_path
1593 .file_name()
1594 .and_then(|n: &std::ffi::OsStr| n.to_str())
1595 .unwrap_or("unknown")
1596 .to_string();
1597 let language = file_path
1598 .extension()
1599 .and_then(|e| e.to_str())
1600 .and_then(code_analyze_core::lang::language_for_extension)
1601 .unwrap_or("unknown")
1602 .to_string();
1603 let mi = types::ModuleInfo {
1604 name,
1605 line_count: arc_output.line_count,
1606 language,
1607 functions: arc_output
1608 .semantic
1609 .functions
1610 .iter()
1611 .map(|f| types::ModuleFunctionInfo {
1612 name: f.name.clone(),
1613 line: f.line,
1614 })
1615 .collect(),
1616 imports: arc_output
1617 .semantic
1618 .imports
1619 .iter()
1620 .map(|i| types::ModuleImportInfo {
1621 module: i.module.clone(),
1622 items: i.items.clone(),
1623 })
1624 .collect(),
1625 };
1626 (mi, false)
1627 };
1628
1629 let text = format_module_info(&module_info);
1630 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1631 .with_meta(Some(no_cache_meta()));
1632 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1633 ErrorData::new(
1634 rmcp::model::ErrorCode::INTERNAL_ERROR,
1635 format!("serialization failed: {e}"),
1636 Some(error_meta("internal", false, "report this as a bug")),
1637 )
1638 }) {
1639 Ok(v) => v,
1640 Err(e) => return Ok(err_to_tool_result(e)),
1641 };
1642 result.structured_content = Some(structured);
1643 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1644 self.metrics_tx.send(crate::metrics::MetricEvent {
1645 ts: crate::metrics::unix_ms(),
1646 tool: "analyze_module",
1647 duration_ms: dur,
1648 output_chars: text.len(),
1649 param_path_depth: crate::metrics::path_component_count(¶m_path),
1650 max_depth: None,
1651 result: "ok",
1652 error_type: None,
1653 session_id: sid,
1654 seq: Some(seq),
1655 cache_hit: Some(module_cache_hit),
1656 });
1657 Ok(result)
1658 }
1659}
1660
1661#[derive(Clone)]
1663struct FocusedAnalysisParams {
1664 path: std::path::PathBuf,
1665 symbol: String,
1666 match_mode: SymbolMatchMode,
1667 follow_depth: u32,
1668 max_depth: Option<u32>,
1669 ast_recursion_limit: Option<usize>,
1670 use_summary: bool,
1671 impl_only: Option<bool>,
1672 def_use: bool,
1673}
1674
1675#[tool_handler]
1676impl ServerHandler for CodeAnalyzer {
1677 fn get_info(&self) -> InitializeResult {
1678 let excluded = crate::EXCLUDED_DIRS.join(", ");
1679 let instructions = format!(
1680 "Recommended workflow:\n\
1681 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify source package (largest by file count; exclude {excluded}).\n\
1682 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for module map. Include test directories (tests/, *_test.go, test_*.py, test_*.rs, *.spec.ts, *.spec.js).\n\
1683 3. For key files, prefer analyze_module for function/import index; use analyze_file for signatures and types.\n\
1684 4. Use analyze_symbol to trace call graphs.\n\
1685 Prefer summary=true on 1000+ files. Set max_depth=2; increase if packages too large. Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1686 );
1687 let capabilities = ServerCapabilities::builder()
1688 .enable_logging()
1689 .enable_tools()
1690 .enable_tool_list_changed()
1691 .enable_completions()
1692 .build();
1693 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1694 .with_title("Code Analyze MCP")
1695 .with_description("MCP server for code structure analysis using tree-sitter");
1696 InitializeResult::new(capabilities)
1697 .with_server_info(server_info)
1698 .with_instructions(&instructions)
1699 }
1700
1701 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1702 let mut peer_lock = self.peer.lock().await;
1703 *peer_lock = Some(context.peer.clone());
1704 drop(peer_lock);
1705
1706 let millis = std::time::SystemTime::now()
1708 .duration_since(std::time::UNIX_EPOCH)
1709 .unwrap_or_default()
1710 .as_millis()
1711 .try_into()
1712 .unwrap_or(u64::MAX);
1713 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1714 let sid = format!("{millis}-{counter}");
1715 {
1716 let mut session_id_lock = self.session_id.lock().await;
1717 *session_id_lock = Some(sid);
1718 }
1719 self.session_call_seq
1720 .store(0, std::sync::atomic::Ordering::Relaxed);
1721
1722 let peer = self.peer.clone();
1724 let event_rx = self.event_rx.clone();
1725
1726 tokio::spawn(async move {
1727 let rx = {
1728 let mut rx_lock = event_rx.lock().await;
1729 rx_lock.take()
1730 };
1731
1732 if let Some(mut receiver) = rx {
1733 let mut buffer = Vec::with_capacity(64);
1734 loop {
1735 receiver.recv_many(&mut buffer, 64).await;
1737
1738 if buffer.is_empty() {
1739 break;
1741 }
1742
1743 let peer_lock = peer.lock().await;
1745 if let Some(peer) = peer_lock.as_ref() {
1746 for log_event in buffer.drain(..) {
1747 let notification = ServerNotification::LoggingMessageNotification(
1748 Notification::new(LoggingMessageNotificationParam {
1749 level: log_event.level,
1750 logger: Some(log_event.logger),
1751 data: log_event.data,
1752 }),
1753 );
1754 if let Err(e) = peer.send_notification(notification).await {
1755 warn!("Failed to send logging notification: {}", e);
1756 }
1757 }
1758 }
1759 }
1760 }
1761 });
1762 }
1763
1764 #[instrument(skip(self, _context))]
1765 async fn on_cancelled(
1766 &self,
1767 notification: CancelledNotificationParam,
1768 _context: NotificationContext<RoleServer>,
1769 ) {
1770 tracing::info!(
1771 request_id = ?notification.request_id,
1772 reason = ?notification.reason,
1773 "Received cancellation notification"
1774 );
1775 }
1776
1777 #[instrument(skip(self, _context))]
1778 async fn complete(
1779 &self,
1780 request: CompleteRequestParams,
1781 _context: RequestContext<RoleServer>,
1782 ) -> Result<CompleteResult, ErrorData> {
1783 let argument_name = &request.argument.name;
1785 let argument_value = &request.argument.value;
1786
1787 let completions = match argument_name.as_str() {
1788 "path" => {
1789 let root = Path::new(".");
1791 completion::path_completions(root, argument_value)
1792 }
1793 "symbol" => {
1794 let path_arg = request
1796 .context
1797 .as_ref()
1798 .and_then(|ctx| ctx.get_argument("path"));
1799
1800 match path_arg {
1801 Some(path_str) => {
1802 let path = Path::new(path_str);
1803 completion::symbol_completions(&self.cache, path, argument_value)
1804 }
1805 None => Vec::new(),
1806 }
1807 }
1808 _ => Vec::new(),
1809 };
1810
1811 let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1813 let (values, has_more) = if completions.len() > 100 {
1814 (completions.into_iter().take(100).collect(), true)
1815 } else {
1816 (completions, false)
1817 };
1818
1819 let completion_info =
1820 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1821 Ok(info) => info,
1822 Err(_) => {
1823 CompletionInfo::with_all_values(Vec::new())
1825 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1826 }
1827 };
1828
1829 Ok(CompleteResult::new(completion_info))
1830 }
1831
1832 async fn set_level(
1833 &self,
1834 params: SetLevelRequestParams,
1835 _context: RequestContext<RoleServer>,
1836 ) -> Result<(), ErrorData> {
1837 let level_filter = match params.level {
1838 LoggingLevel::Debug => LevelFilter::DEBUG,
1839 LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1840 LoggingLevel::Warning => LevelFilter::WARN,
1841 LoggingLevel::Error
1842 | LoggingLevel::Critical
1843 | LoggingLevel::Alert
1844 | LoggingLevel::Emergency => LevelFilter::ERROR,
1845 };
1846
1847 let mut filter_lock = self
1848 .log_level_filter
1849 .lock()
1850 .unwrap_or_else(|e| e.into_inner());
1851 *filter_lock = level_filter;
1852 Ok(())
1853 }
1854}
1855
1856#[cfg(test)]
1857mod tests {
1858 use super::*;
1859
1860 #[tokio::test]
1861 async fn test_emit_progress_none_peer_is_noop() {
1862 let peer = Arc::new(TokioMutex::new(None));
1863 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1864 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1865 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1866 let analyzer = CodeAnalyzer::new(
1867 peer,
1868 log_level_filter,
1869 rx,
1870 crate::metrics::MetricsSender(metrics_tx),
1871 );
1872 let token = ProgressToken(NumberOrString::String("test".into()));
1873 analyzer
1875 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1876 .await;
1877 }
1878
1879 fn make_analyzer() -> CodeAnalyzer {
1880 let peer = Arc::new(TokioMutex::new(None));
1881 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1882 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1883 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1884 CodeAnalyzer::new(
1885 peer,
1886 log_level_filter,
1887 rx,
1888 crate::metrics::MetricsSender(metrics_tx),
1889 )
1890 }
1891
1892 #[test]
1893 fn test_summary_cursor_conflict() {
1894 assert!(summary_cursor_conflict(Some(true), Some("cursor")));
1895 assert!(!summary_cursor_conflict(Some(true), None));
1896 assert!(!summary_cursor_conflict(None, Some("x")));
1897 assert!(!summary_cursor_conflict(None, None));
1898 }
1899
1900 #[tokio::test]
1901 async fn test_validate_impl_only_non_rust_returns_invalid_params() {
1902 use tempfile::TempDir;
1903
1904 let dir = TempDir::new().unwrap();
1905 std::fs::write(dir.path().join("main.py"), "def foo(): pass").unwrap();
1906
1907 let analyzer = make_analyzer();
1908 let entries: Vec<traversal::WalkEntry> =
1911 traversal::walk_directory(dir.path(), None).unwrap_or_default();
1912 let result = CodeAnalyzer::validate_impl_only(&entries);
1913 assert!(result.is_err());
1914 let err = result.unwrap_err();
1915 assert_eq!(err.code, rmcp::model::ErrorCode::INVALID_PARAMS);
1916 drop(analyzer); }
1918
1919 #[tokio::test]
1920 async fn test_no_cache_meta_on_analyze_directory_result() {
1921 use code_analyze_core::types::{
1922 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1923 };
1924 use tempfile::TempDir;
1925
1926 let dir = TempDir::new().unwrap();
1927 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
1928
1929 let analyzer = make_analyzer();
1930 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
1931 "path": dir.path().to_str().unwrap(),
1932 }))
1933 .unwrap();
1934 let ct = tokio_util::sync::CancellationToken::new();
1935 let (arc_output, _cache_hit) = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1936 let meta = no_cache_meta();
1938 assert_eq!(
1939 meta.0.get("cache_hint").and_then(|v| v.as_str()),
1940 Some("no-cache"),
1941 );
1942 drop(arc_output);
1943 }
1944
1945 #[test]
1946 fn test_complete_path_completions_returns_suggestions() {
1947 let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
1952 let workspace_root = manifest_dir.parent().expect("manifest dir has parent");
1953 let suggestions = completion::path_completions(workspace_root, "code-");
1954 assert!(
1955 !suggestions.is_empty(),
1956 "expected completions for prefix 'code-' in workspace root"
1957 );
1958 }
1959
1960 #[tokio::test]
1961 async fn test_handle_overview_mode_verbose_no_summary_block() {
1962 use code_analyze_core::pagination::{PaginationMode, paginate_slice};
1963 use code_analyze_core::types::{
1964 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1965 };
1966 use tempfile::TempDir;
1967
1968 let tmp = TempDir::new().unwrap();
1969 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1970
1971 let peer = Arc::new(TokioMutex::new(None));
1972 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1973 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1974 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1975 let analyzer = CodeAnalyzer::new(
1976 peer,
1977 log_level_filter,
1978 rx,
1979 crate::metrics::MetricsSender(metrics_tx),
1980 );
1981
1982 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
1983 "path": tmp.path().to_str().unwrap(),
1984 "verbose": true,
1985 }))
1986 .unwrap();
1987
1988 let ct = tokio_util::sync::CancellationToken::new();
1989 let (output, _cache_hit) = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1990
1991 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1994 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1995 let verbose = true;
1996 let formatted = if !use_summary {
1997 format_structure_paginated(
1998 &paginated.items,
1999 paginated.total,
2000 params.max_depth,
2001 Some(std::path::Path::new(¶ms.path)),
2002 verbose,
2003 )
2004 } else {
2005 output.formatted.clone()
2006 };
2007
2008 assert!(
2010 !formatted.contains("SUMMARY:"),
2011 "verbose=true must not emit SUMMARY: block; got: {}",
2012 &formatted[..formatted.len().min(300)]
2013 );
2014 assert!(
2015 formatted.contains("PAGINATED:"),
2016 "verbose=true must emit PAGINATED: header"
2017 );
2018 assert!(
2019 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
2020 "verbose=true must emit FILES section header"
2021 );
2022 }
2023
2024 #[tokio::test]
2027 async fn test_analyze_directory_cache_hit_metrics() {
2028 use code_analyze_core::types::{
2029 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
2030 };
2031 use tempfile::TempDir;
2032
2033 let dir = TempDir::new().unwrap();
2035 std::fs::write(dir.path().join("lib.rs"), "fn foo() {}").unwrap();
2036 let analyzer = make_analyzer();
2037 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
2038 "path": dir.path().to_str().unwrap(),
2039 }))
2040 .unwrap();
2041
2042 let ct1 = tokio_util::sync::CancellationToken::new();
2044 let (_out1, hit1) = analyzer.handle_overview_mode(¶ms, ct1).await.unwrap();
2045
2046 let ct2 = tokio_util::sync::CancellationToken::new();
2048 let (_out2, hit2) = analyzer.handle_overview_mode(¶ms, ct2).await.unwrap();
2049
2050 assert!(!hit1, "first call must be a cache miss");
2052 assert!(hit2, "second call must be a cache hit");
2053 }
2054
2055 #[tokio::test]
2056 async fn test_analyze_module_cache_hit_metrics() {
2057 use std::io::Write as _;
2058 use tempfile::NamedTempFile;
2059
2060 let mut f = NamedTempFile::with_suffix(".rs").unwrap();
2062 writeln!(f, "fn bar() {{}}").unwrap();
2063 let path = f.path().to_str().unwrap().to_string();
2064
2065 let analyzer = make_analyzer();
2066
2067 let file_params = code_analyze_core::types::AnalyzeFileParams {
2069 path: path.clone(),
2070 ast_recursion_limit: None,
2071 fields: None,
2072 pagination: code_analyze_core::types::PaginationParams {
2073 cursor: None,
2074 page_size: None,
2075 },
2076 output_control: code_analyze_core::types::OutputControlParams {
2077 summary: None,
2078 force: None,
2079 verbose: None,
2080 },
2081 };
2082 let (_cached, _) = analyzer
2083 .handle_file_details_mode(&file_params)
2084 .await
2085 .unwrap();
2086
2087 let module_params = code_analyze_core::types::AnalyzeModuleParams { path: path.clone() };
2089
2090 let module_cache_key = std::fs::metadata(&path).ok().and_then(|meta| {
2092 meta.modified()
2093 .ok()
2094 .map(|mtime| code_analyze_core::cache::CacheKey {
2095 path: std::path::PathBuf::from(&path),
2096 modified: mtime,
2097 mode: code_analyze_core::types::AnalysisMode::FileDetails,
2098 })
2099 });
2100 let cache_hit = module_cache_key
2101 .as_ref()
2102 .and_then(|k| analyzer.cache.get(k))
2103 .is_some();
2104
2105 assert!(
2107 cache_hit,
2108 "analyze_module should find the file in the shared file cache"
2109 );
2110 drop(module_params);
2111 }
2112
2113 #[test]
2116 fn test_analyze_symbol_import_lookup_invalid_params() {
2117 let result = CodeAnalyzer::validate_import_lookup(Some(true), "");
2121
2122 assert!(
2124 result.is_err(),
2125 "import_lookup=true with empty symbol must return Err"
2126 );
2127 let err = result.unwrap_err();
2128 assert_eq!(
2129 err.code,
2130 rmcp::model::ErrorCode::INVALID_PARAMS,
2131 "expected INVALID_PARAMS; got {:?}",
2132 err.code
2133 );
2134 }
2135
2136 #[tokio::test]
2137 async fn test_analyze_symbol_import_lookup_found() {
2138 use tempfile::TempDir;
2139
2140 let dir = TempDir::new().unwrap();
2142 std::fs::write(
2143 dir.path().join("main.rs"),
2144 "use std::collections::HashMap;\nfn main() {}\n",
2145 )
2146 .unwrap();
2147
2148 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2149
2150 let output =
2152 analyze::analyze_import_lookup(dir.path(), "std::collections", &entries, None).unwrap();
2153
2154 assert!(
2156 output.formatted.contains("MATCHES: 1"),
2157 "expected 1 match; got: {}",
2158 output.formatted
2159 );
2160 assert!(
2161 output.formatted.contains("main.rs"),
2162 "expected main.rs in output; got: {}",
2163 output.formatted
2164 );
2165 }
2166
2167 #[tokio::test]
2168 async fn test_analyze_symbol_import_lookup_empty() {
2169 use tempfile::TempDir;
2170
2171 let dir = TempDir::new().unwrap();
2173 std::fs::write(dir.path().join("main.rs"), "fn main() {}\n").unwrap();
2174
2175 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2176
2177 let output =
2179 analyze::analyze_import_lookup(dir.path(), "no_such_module", &entries, None).unwrap();
2180
2181 assert!(
2183 output.formatted.contains("MATCHES: 0"),
2184 "expected 0 matches; got: {}",
2185 output.formatted
2186 );
2187 }
2188
2189 #[tokio::test]
2192 async fn test_analyze_directory_git_ref_non_git_repo() {
2193 use code_analyze_core::traversal::changed_files_from_git_ref;
2194 use tempfile::TempDir;
2195
2196 let dir = TempDir::new().unwrap();
2198 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
2199
2200 let result = changed_files_from_git_ref(dir.path(), "HEAD~1");
2202
2203 assert!(result.is_err(), "non-git dir must return an error");
2205 let err_msg = result.unwrap_err().to_string();
2206 assert!(
2207 err_msg.contains("git"),
2208 "error must mention git; got: {err_msg}"
2209 );
2210 }
2211
2212 #[tokio::test]
2213 async fn test_analyze_directory_git_ref_filters_changed_files() {
2214 use code_analyze_core::traversal::{changed_files_from_git_ref, filter_entries_by_git_ref};
2215 use std::collections::HashSet;
2216 use tempfile::TempDir;
2217
2218 let dir = TempDir::new().unwrap();
2220 let changed_file = dir.path().join("changed.rs");
2221 let unchanged_file = dir.path().join("unchanged.rs");
2222 std::fs::write(&changed_file, "fn changed() {}").unwrap();
2223 std::fs::write(&unchanged_file, "fn unchanged() {}").unwrap();
2224
2225 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2226 let total_files = entries.iter().filter(|e| !e.is_dir).count();
2227 assert_eq!(total_files, 2, "sanity: 2 files before filtering");
2228
2229 let mut changed: HashSet<std::path::PathBuf> = HashSet::new();
2231 changed.insert(changed_file.clone());
2232
2233 let filtered = filter_entries_by_git_ref(entries, &changed, dir.path());
2235 let filtered_files: Vec<_> = filtered.iter().filter(|e| !e.is_dir).collect();
2236
2237 assert_eq!(
2239 filtered_files.len(),
2240 1,
2241 "only 1 file must remain after git_ref filter"
2242 );
2243 assert_eq!(
2244 filtered_files[0].path, changed_file,
2245 "the remaining file must be the changed one"
2246 );
2247
2248 let _ = changed_files_from_git_ref;
2250 }
2251
2252 #[tokio::test]
2253 async fn test_handle_overview_mode_git_ref_filters_via_handler() {
2254 use code_analyze_core::types::{
2255 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
2256 };
2257 use std::process::Command;
2258 use tempfile::TempDir;
2259
2260 let dir = TempDir::new().unwrap();
2262 let repo = dir.path();
2263
2264 let git_no_hook = |repo_path: &std::path::Path, args: &[&str]| {
2267 let mut cmd = std::process::Command::new("git");
2268 cmd.args(["-c", "core.hooksPath=/dev/null"]);
2269 cmd.args(args);
2270 cmd.current_dir(repo_path);
2271 let out = cmd.output().unwrap();
2272 assert!(out.status.success(), "{out:?}");
2273 };
2274 git_no_hook(repo, &["init"]);
2275 git_no_hook(
2276 repo,
2277 &[
2278 "-c",
2279 "user.email=ci@example.com",
2280 "-c",
2281 "user.name=CI",
2282 "commit",
2283 "--allow-empty",
2284 "-m",
2285 "initial",
2286 ],
2287 );
2288
2289 std::fs::write(repo.join("file_a.rs"), "fn a() {}").unwrap();
2291 git_no_hook(repo, &["add", "file_a.rs"]);
2292 git_no_hook(
2293 repo,
2294 &[
2295 "-c",
2296 "user.email=ci@example.com",
2297 "-c",
2298 "user.name=CI",
2299 "commit",
2300 "-m",
2301 "add a",
2302 ],
2303 );
2304
2305 std::fs::write(repo.join("file_b.rs"), "fn b() {}").unwrap();
2307 git_no_hook(repo, &["add", "file_b.rs"]);
2308 git_no_hook(
2309 repo,
2310 &[
2311 "-c",
2312 "user.email=ci@example.com",
2313 "-c",
2314 "user.name=CI",
2315 "commit",
2316 "-m",
2317 "add b",
2318 ],
2319 );
2320
2321 let canon_repo = std::fs::canonicalize(repo).unwrap();
2327 let analyzer = make_analyzer();
2328 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
2329 "path": canon_repo.to_str().unwrap(),
2330 "git_ref": "HEAD~1",
2331 }))
2332 .unwrap();
2333 let ct = tokio_util::sync::CancellationToken::new();
2334 let (arc_output, _cache_hit) = analyzer
2335 .handle_overview_mode(¶ms, ct)
2336 .await
2337 .expect("handle_overview_mode with git_ref must succeed");
2338
2339 let formatted = &arc_output.formatted;
2341 assert!(
2342 formatted.contains("file_b.rs"),
2343 "git_ref=HEAD~1 output must include file_b.rs; got:\n{formatted}"
2344 );
2345 assert!(
2346 !formatted.contains("file_a.rs"),
2347 "git_ref=HEAD~1 output must exclude file_a.rs; got:\n{formatted}"
2348 );
2349 }
2350}