1pub mod logging;
19pub mod metrics;
20
21pub use code_analyze_core::analyze;
22use code_analyze_core::{cache, completion, graph, traversal, types};
23
24pub(crate) const EXCLUDED_DIRS: &[&str] = &[
25 "node_modules",
26 "vendor",
27 ".git",
28 "__pycache__",
29 "target",
30 "dist",
31 "build",
32 ".venv",
33];
34
35use code_analyze_core::cache::AnalysisCache;
36use code_analyze_core::formatter::{
37 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
38 format_module_info, format_structure_paginated, format_summary,
39};
40use code_analyze_core::pagination::{
41 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
42};
43use code_analyze_core::traversal::{
44 WalkEntry, changed_files_from_git_ref, filter_entries_by_git_ref, walk_directory,
45};
46use code_analyze_core::types::{
47 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
48 AnalyzeSymbolParams, SymbolMatchMode,
49};
50use logging::LogEvent;
51use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
52use rmcp::handler::server::wrapper::Parameters;
53use rmcp::model::{
54 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
55 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
56 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
57 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
58};
59use rmcp::service::{NotificationContext, RequestContext};
60use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
61use serde_json::Value;
62use std::path::Path;
63use std::sync::{Arc, Mutex};
64use tokio::sync::{Mutex as TokioMutex, mpsc};
65use tracing::{instrument, warn};
66use tracing_subscriber::filter::LevelFilter;
67
68static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
69
70const SIZE_LIMIT: usize = 50_000;
71
72#[must_use]
75pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
76 summary == Some(true) && cursor.is_some()
77}
78
79#[must_use]
80fn error_meta(
81 category: &'static str,
82 is_retryable: bool,
83 suggested_action: &'static str,
84) -> serde_json::Value {
85 serde_json::json!({
86 "errorCategory": category,
87 "isRetryable": is_retryable,
88 "suggestedAction": suggested_action,
89 })
90}
91
92#[must_use]
93fn err_to_tool_result(e: ErrorData) -> CallToolResult {
94 CallToolResult::error(vec![Content::text(e.message)])
95}
96
97fn no_cache_meta() -> Meta {
98 let mut m = serde_json::Map::new();
99 m.insert(
100 "cache_hint".to_string(),
101 serde_json::Value::String("no-cache".to_string()),
102 );
103 Meta(m)
104}
105
106fn paginate_focus_chains(
109 chains: &[graph::InternalCallChain],
110 mode: PaginationMode,
111 offset: usize,
112 page_size: usize,
113) -> Result<(Vec<graph::InternalCallChain>, Option<String>), ErrorData> {
114 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
115 ErrorData::new(
116 rmcp::model::ErrorCode::INTERNAL_ERROR,
117 e.to_string(),
118 Some(error_meta("transient", true, "retry the request")),
119 )
120 })?;
121
122 if paginated.next_cursor.is_none() && offset == 0 {
123 return Ok((paginated.items, None));
124 }
125
126 let next = if let Some(raw_cursor) = paginated.next_cursor {
127 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
128 ErrorData::new(
129 rmcp::model::ErrorCode::INVALID_PARAMS,
130 e.to_string(),
131 Some(error_meta("validation", false, "invalid cursor format")),
132 )
133 })?;
134 Some(
135 encode_cursor(&CursorData {
136 mode,
137 offset: decoded.offset,
138 })
139 .map_err(|e| {
140 ErrorData::new(
141 rmcp::model::ErrorCode::INVALID_PARAMS,
142 e.to_string(),
143 Some(error_meta("validation", false, "invalid cursor format")),
144 )
145 })?,
146 )
147 } else {
148 None
149 };
150
151 Ok((paginated.items, next))
152}
153
154#[derive(Clone)]
159pub struct CodeAnalyzer {
160 #[allow(dead_code)]
163 tool_router: ToolRouter<Self>,
164 cache: AnalysisCache,
165 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
166 log_level_filter: Arc<Mutex<LevelFilter>>,
167 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
168 metrics_tx: crate::metrics::MetricsSender,
169 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
170 session_id: Arc<TokioMutex<Option<String>>>,
171}
172
173#[tool_router]
174impl CodeAnalyzer {
175 #[must_use]
176 pub fn list_tools() -> Vec<rmcp::model::Tool> {
177 Self::tool_router().list_all()
178 }
179
180 pub fn new(
181 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
182 log_level_filter: Arc<Mutex<LevelFilter>>,
183 event_rx: mpsc::UnboundedReceiver<LogEvent>,
184 metrics_tx: crate::metrics::MetricsSender,
185 ) -> Self {
186 let file_cap: usize = std::env::var("CODE_ANALYZE_FILE_CACHE_CAPACITY")
187 .ok()
188 .and_then(|v| v.parse().ok())
189 .unwrap_or(100);
190 CodeAnalyzer {
191 tool_router: Self::tool_router(),
192 cache: AnalysisCache::new(file_cap),
193 peer,
194 log_level_filter,
195 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
196 metrics_tx,
197 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
198 session_id: Arc::new(TokioMutex::new(None)),
199 }
200 }
201
202 #[instrument(skip(self))]
203 async fn emit_progress(
204 &self,
205 peer: Option<Peer<RoleServer>>,
206 token: &ProgressToken,
207 progress: f64,
208 total: f64,
209 message: String,
210 ) {
211 if let Some(peer) = peer {
212 let notification = ServerNotification::ProgressNotification(Notification::new(
213 ProgressNotificationParam {
214 progress_token: token.clone(),
215 progress,
216 total: Some(total),
217 message: Some(message),
218 },
219 ));
220 if let Err(e) = peer.send_notification(notification).await {
221 warn!("Failed to send progress notification: {}", e);
222 }
223 }
224 }
225
226 #[allow(clippy::too_many_lines)] #[allow(clippy::cast_precision_loss)] #[instrument(skip(self, params, ct))]
232 async fn handle_overview_mode(
233 &self,
234 params: &AnalyzeDirectoryParams,
235 ct: tokio_util::sync::CancellationToken,
236 ) -> Result<(std::sync::Arc<analyze::AnalysisOutput>, bool), ErrorData> {
237 let path = Path::new(¶ms.path);
238 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
239 let counter_clone = counter.clone();
240 let path_owned = path.to_path_buf();
241 let max_depth = params.max_depth;
242 let ct_clone = ct.clone();
243
244 let all_entries = walk_directory(path, None).map_err(|e| {
246 ErrorData::new(
247 rmcp::model::ErrorCode::INTERNAL_ERROR,
248 format!("Failed to walk directory: {e}"),
249 Some(error_meta(
250 "resource",
251 false,
252 "check path permissions and availability",
253 )),
254 )
255 })?;
256
257 let canonical_max_depth = max_depth.and_then(|d| if d == 0 { None } else { Some(d) });
259
260 let git_ref_val = params.git_ref.as_deref().filter(|s| !s.is_empty());
263 let cache_key = cache::DirectoryCacheKey::from_entries(
264 &all_entries,
265 canonical_max_depth,
266 AnalysisMode::Overview,
267 git_ref_val,
268 );
269
270 if let Some(cached) = self.cache.get_directory(&cache_key) {
272 return Ok((cached, true));
273 }
274
275 let all_entries = if let Some(ref git_ref) = params.git_ref
277 && !git_ref.is_empty()
278 {
279 let changed = changed_files_from_git_ref(path, git_ref).map_err(|e| {
280 ErrorData::new(
281 rmcp::model::ErrorCode::INVALID_PARAMS,
282 format!("git_ref filter failed: {e}"),
283 Some(error_meta(
284 "resource",
285 false,
286 "ensure git is installed and path is inside a git repository",
287 )),
288 )
289 })?;
290 filter_entries_by_git_ref(all_entries, &changed, path)
291 } else {
292 all_entries
293 };
294
295 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
297 Some(traversal::subtree_counts_from_entries(path, &all_entries))
298 } else {
299 None
300 };
301
302 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
304 && depth > 0
305 {
306 all_entries
307 .into_iter()
308 .filter(|e| e.depth <= depth as usize)
309 .collect()
310 } else {
311 all_entries
312 };
313
314 let total_files = entries.iter().filter(|e| !e.is_dir).count();
316
317 let handle = tokio::task::spawn_blocking(move || {
319 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
320 });
321
322 let token = ProgressToken(NumberOrString::String(
324 format!(
325 "analyze-overview-{}",
326 std::time::SystemTime::now()
327 .duration_since(std::time::UNIX_EPOCH)
328 .map(|d| d.as_nanos())
329 .unwrap_or(0)
330 )
331 .into(),
332 ));
333 let peer = self.peer.lock().await.clone();
334 let mut last_progress = 0usize;
335 let mut cancelled = false;
336 loop {
337 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
338 if ct.is_cancelled() {
339 cancelled = true;
340 break;
341 }
342 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
343 if current != last_progress && total_files > 0 {
344 self.emit_progress(
345 peer.clone(),
346 &token,
347 current as f64,
348 total_files as f64,
349 format!("Analyzing {current}/{total_files} files"),
350 )
351 .await;
352 last_progress = current;
353 }
354 if handle.is_finished() {
355 break;
356 }
357 }
358
359 if !cancelled && total_files > 0 {
361 self.emit_progress(
362 peer.clone(),
363 &token,
364 total_files as f64,
365 total_files as f64,
366 format!("Completed analyzing {total_files} files"),
367 )
368 .await;
369 }
370
371 match handle.await {
372 Ok(Ok(mut output)) => {
373 output.subtree_counts = subtree_counts;
374 let arc_output = std::sync::Arc::new(output);
375 self.cache.put_directory(cache_key, arc_output.clone());
376 Ok((arc_output, false))
377 }
378 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
379 rmcp::model::ErrorCode::INTERNAL_ERROR,
380 "Analysis cancelled".to_string(),
381 Some(error_meta("transient", true, "analysis was cancelled")),
382 )),
383 Ok(Err(e)) => Err(ErrorData::new(
384 rmcp::model::ErrorCode::INTERNAL_ERROR,
385 format!("Error analyzing directory: {e}"),
386 Some(error_meta(
387 "resource",
388 false,
389 "check path and file permissions",
390 )),
391 )),
392 Err(e) => Err(ErrorData::new(
393 rmcp::model::ErrorCode::INTERNAL_ERROR,
394 format!("Task join error: {e}"),
395 Some(error_meta("transient", true, "retry the request")),
396 )),
397 }
398 }
399
400 #[instrument(skip(self, params))]
403 async fn handle_file_details_mode(
404 &self,
405 params: &AnalyzeFileParams,
406 ) -> Result<(std::sync::Arc<analyze::FileAnalysisOutput>, bool), ErrorData> {
407 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
409 meta.modified().ok().map(|mtime| cache::CacheKey {
410 path: std::path::PathBuf::from(¶ms.path),
411 modified: mtime,
412 mode: AnalysisMode::FileDetails,
413 })
414 });
415
416 if let Some(ref key) = cache_key
418 && let Some(cached) = self.cache.get(key)
419 {
420 return Ok((cached, true));
421 }
422
423 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
425 Ok(output) => {
426 let arc_output = std::sync::Arc::new(output);
427 if let Some(key) = cache_key {
428 self.cache.put(key, arc_output.clone());
429 }
430 Ok((arc_output, false))
431 }
432 Err(e) => Err(ErrorData::new(
433 rmcp::model::ErrorCode::INTERNAL_ERROR,
434 format!("Error analyzing file: {e}"),
435 Some(error_meta(
436 "resource",
437 false,
438 "check file path and permissions",
439 )),
440 )),
441 }
442 }
443
444 fn validate_impl_only(entries: &[WalkEntry]) -> Result<(), ErrorData> {
446 let has_rust = entries.iter().any(|e| {
447 !e.is_dir
448 && e.path
449 .extension()
450 .and_then(|x: &std::ffi::OsStr| x.to_str())
451 == Some("rs")
452 });
453
454 if !has_rust {
455 return Err(ErrorData::new(
456 rmcp::model::ErrorCode::INVALID_PARAMS,
457 "impl_only=true requires Rust source files. No .rs files found in the given path. Use analyze_symbol without impl_only for cross-language analysis.".to_string(),
458 Some(error_meta(
459 "validation",
460 false,
461 "remove impl_only or point to a directory containing .rs files",
462 )),
463 ));
464 }
465 Ok(())
466 }
467
468 fn validate_import_lookup(import_lookup: Option<bool>, symbol: &str) -> Result<(), ErrorData> {
470 if import_lookup == Some(true) && symbol.is_empty() {
471 return Err(ErrorData::new(
472 rmcp::model::ErrorCode::INVALID_PARAMS,
473 "import_lookup=true requires symbol to contain the module path to search for"
474 .to_string(),
475 Some(error_meta(
476 "validation",
477 false,
478 "set symbol to the module path when using import_lookup=true",
479 )),
480 ));
481 }
482 Ok(())
483 }
484
485 #[allow(clippy::cast_precision_loss)] async fn poll_progress_until_done(
488 &self,
489 analysis_params: &FocusedAnalysisParams,
490 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
491 ct: tokio_util::sync::CancellationToken,
492 entries: std::sync::Arc<Vec<WalkEntry>>,
493 total_files: usize,
494 symbol_display: &str,
495 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
496 let counter_clone = counter.clone();
497 let ct_clone = ct.clone();
498 let entries_clone = std::sync::Arc::clone(&entries);
499 let path_owned = analysis_params.path.clone();
500 let symbol_owned = analysis_params.symbol.clone();
501 let match_mode_owned = analysis_params.match_mode.clone();
502 let follow_depth = analysis_params.follow_depth;
503 let max_depth = analysis_params.max_depth;
504 let ast_recursion_limit = analysis_params.ast_recursion_limit;
505 let use_summary = analysis_params.use_summary;
506 let impl_only = analysis_params.impl_only;
507 let handle = tokio::task::spawn_blocking(move || {
508 let params = analyze::FocusedAnalysisConfig {
509 focus: symbol_owned,
510 match_mode: match_mode_owned,
511 follow_depth,
512 max_depth,
513 ast_recursion_limit,
514 use_summary,
515 impl_only,
516 };
517 analyze::analyze_focused_with_progress_with_entries(
518 &path_owned,
519 ¶ms,
520 &counter_clone,
521 &ct_clone,
522 &entries_clone,
523 )
524 });
525
526 let token = ProgressToken(NumberOrString::String(
527 format!(
528 "analyze-symbol-{}",
529 std::time::SystemTime::now()
530 .duration_since(std::time::UNIX_EPOCH)
531 .map(|d| d.as_nanos())
532 .unwrap_or(0)
533 )
534 .into(),
535 ));
536 let peer = self.peer.lock().await.clone();
537 let mut last_progress = 0usize;
538 let mut cancelled = false;
539
540 loop {
541 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
542 if ct.is_cancelled() {
543 cancelled = true;
544 break;
545 }
546 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
547 if current != last_progress && total_files > 0 {
548 self.emit_progress(
549 peer.clone(),
550 &token,
551 current as f64,
552 total_files as f64,
553 format!(
554 "Analyzing {current}/{total_files} files for symbol '{symbol_display}'"
555 ),
556 )
557 .await;
558 last_progress = current;
559 }
560 if handle.is_finished() {
561 break;
562 }
563 }
564
565 if !cancelled && total_files > 0 {
566 self.emit_progress(
567 peer.clone(),
568 &token,
569 total_files as f64,
570 total_files as f64,
571 format!("Completed analyzing {total_files} files for symbol '{symbol_display}'"),
572 )
573 .await;
574 }
575
576 match handle.await {
577 Ok(Ok(output)) => Ok(output),
578 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
579 rmcp::model::ErrorCode::INTERNAL_ERROR,
580 "Analysis cancelled".to_string(),
581 Some(error_meta("transient", true, "analysis was cancelled")),
582 )),
583 Ok(Err(e)) => Err(ErrorData::new(
584 rmcp::model::ErrorCode::INTERNAL_ERROR,
585 format!("Error analyzing symbol: {e}"),
586 Some(error_meta("resource", false, "check symbol name and file")),
587 )),
588 Err(e) => Err(ErrorData::new(
589 rmcp::model::ErrorCode::INTERNAL_ERROR,
590 format!("Task join error: {e}"),
591 Some(error_meta("transient", true, "retry the request")),
592 )),
593 }
594 }
595
596 async fn run_focused_with_auto_summary(
598 &self,
599 params: &AnalyzeSymbolParams,
600 analysis_params: &FocusedAnalysisParams,
601 counter: std::sync::Arc<std::sync::atomic::AtomicUsize>,
602 ct: tokio_util::sync::CancellationToken,
603 entries: std::sync::Arc<Vec<WalkEntry>>,
604 total_files: usize,
605 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
606 let use_summary_for_task = params.output_control.force != Some(true)
607 && params.output_control.summary == Some(true);
608
609 let analysis_params_initial = FocusedAnalysisParams {
610 use_summary: use_summary_for_task,
611 ..analysis_params.clone()
612 };
613
614 let mut output = self
615 .poll_progress_until_done(
616 &analysis_params_initial,
617 counter.clone(),
618 ct.clone(),
619 entries.clone(),
620 total_files,
621 ¶ms.symbol,
622 )
623 .await?;
624
625 if params.output_control.summary.is_none()
626 && params.output_control.force != Some(true)
627 && output.formatted.len() > SIZE_LIMIT
628 {
629 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
630 let analysis_params_retry = FocusedAnalysisParams {
631 use_summary: true,
632 ..analysis_params.clone()
633 };
634 let summary_result = self
635 .poll_progress_until_done(
636 &analysis_params_retry,
637 counter2,
638 ct,
639 entries,
640 total_files,
641 ¶ms.symbol,
642 )
643 .await;
644
645 if let Ok(summary_output) = summary_result {
646 output.formatted = summary_output.formatted;
647 } else {
648 let estimated_tokens = output.formatted.len() / 4;
649 let message = format!(
650 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
651 output.formatted.len(),
652 estimated_tokens
653 );
654 return Err(ErrorData::new(
655 rmcp::model::ErrorCode::INVALID_PARAMS,
656 message,
657 Some(error_meta(
658 "validation",
659 false,
660 "use summary=true or force=true",
661 )),
662 ));
663 }
664 } else if output.formatted.len() > SIZE_LIMIT
665 && params.output_control.force != Some(true)
666 && params.output_control.summary == Some(false)
667 {
668 let estimated_tokens = output.formatted.len() / 4;
669 let message = format!(
670 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
671 - force=true to return full output\n\
672 - summary=true to get compact summary\n\
673 - Narrow your scope (smaller directory, specific file)",
674 output.formatted.len(),
675 estimated_tokens
676 );
677 return Err(ErrorData::new(
678 rmcp::model::ErrorCode::INVALID_PARAMS,
679 message,
680 Some(error_meta(
681 "validation",
682 false,
683 "use force=true, summary=true, or narrow scope",
684 )),
685 ));
686 }
687
688 Ok(output)
689 }
690
691 #[instrument(skip(self, params, ct))]
695 async fn handle_focused_mode(
696 &self,
697 params: &AnalyzeSymbolParams,
698 ct: tokio_util::sync::CancellationToken,
699 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
700 let path = Path::new(¶ms.path);
701 let raw_entries = match walk_directory(path, params.max_depth) {
702 Ok(e) => e,
703 Err(e) => {
704 return Err(ErrorData::new(
705 rmcp::model::ErrorCode::INTERNAL_ERROR,
706 format!("Failed to walk directory: {e}"),
707 Some(error_meta(
708 "resource",
709 false,
710 "check path permissions and availability",
711 )),
712 ));
713 }
714 };
715 let filtered_entries = if let Some(ref git_ref) = params.git_ref
717 && !git_ref.is_empty()
718 {
719 let changed = changed_files_from_git_ref(path, git_ref).map_err(|e| {
720 ErrorData::new(
721 rmcp::model::ErrorCode::INVALID_PARAMS,
722 format!("git_ref filter failed: {e}"),
723 Some(error_meta(
724 "resource",
725 false,
726 "ensure git is installed and path is inside a git repository",
727 )),
728 )
729 })?;
730 filter_entries_by_git_ref(raw_entries, &changed, path)
731 } else {
732 raw_entries
733 };
734 let entries = std::sync::Arc::new(filtered_entries);
735
736 if params.impl_only == Some(true) {
737 Self::validate_impl_only(&entries)?;
738 }
739
740 let total_files = entries.iter().filter(|e| !e.is_dir).count();
741 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
742
743 let analysis_params = FocusedAnalysisParams {
744 path: path.to_path_buf(),
745 symbol: params.symbol.clone(),
746 match_mode: params.match_mode.clone().unwrap_or_default(),
747 follow_depth: params.follow_depth.unwrap_or(1),
748 max_depth: params.max_depth,
749 ast_recursion_limit: params.ast_recursion_limit,
750 use_summary: false,
751 impl_only: params.impl_only,
752 };
753
754 let mut output = self
755 .run_focused_with_auto_summary(
756 params,
757 &analysis_params,
758 counter,
759 ct,
760 entries,
761 total_files,
762 )
763 .await?;
764
765 if params.impl_only == Some(true) {
766 let filter_line = format!(
767 "FILTER: impl_only=true ({} of {} callers shown)\n",
768 output.impl_trait_caller_count, output.unfiltered_caller_count
769 );
770 output.formatted = format!("{}{}", filter_line, output.formatted);
771
772 if output.impl_trait_caller_count == 0 {
773 output.formatted.push_str(
774 "\nNOTE: No impl-trait callers found. The symbol may be a plain function or struct, not a trait method. Remove impl_only to see all callers.\n"
775 );
776 }
777 }
778
779 Ok(output)
780 }
781
782 #[instrument(skip(self, context))]
783 #[tool(
784 name = "analyze_directory",
785 description = "Tree-view of directory with LOC, function/class counts, test markers. Respects .gitignore. For 1000+ files, use max_depth=2-3 and summary=true. Empty directories return zero counts. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they?",
786 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
787 annotations(
788 title = "Analyze Directory",
789 read_only_hint = true,
790 destructive_hint = false,
791 idempotent_hint = true,
792 open_world_hint = false
793 )
794 )]
795 async fn analyze_directory(
796 &self,
797 params: Parameters<AnalyzeDirectoryParams>,
798 context: RequestContext<RoleServer>,
799 ) -> Result<CallToolResult, ErrorData> {
800 let params = params.0;
801 let ct = context.ct.clone();
802 let t_start = std::time::Instant::now();
803 let param_path = params.path.clone();
804 let max_depth_val = params.max_depth;
805 let seq = self
806 .session_call_seq
807 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
808 let sid = self.session_id.lock().await.clone();
809
810 let (arc_output, dir_cache_hit) = match self.handle_overview_mode(¶ms, ct).await {
812 Ok(v) => v,
813 Err(e) => return Ok(err_to_tool_result(e)),
814 };
815 let mut output = match std::sync::Arc::try_unwrap(arc_output) {
818 Ok(owned) => owned,
819 Err(arc) => (*arc).clone(),
820 };
821
822 if summary_cursor_conflict(
825 params.output_control.summary,
826 params.pagination.cursor.as_deref(),
827 ) {
828 return Ok(err_to_tool_result(ErrorData::new(
829 rmcp::model::ErrorCode::INVALID_PARAMS,
830 "summary=true is incompatible with a pagination cursor; use one or the other"
831 .to_string(),
832 Some(error_meta(
833 "validation",
834 false,
835 "remove cursor or set summary=false",
836 )),
837 )));
838 }
839
840 let use_summary = if params.output_control.force == Some(true) {
842 false
843 } else if params.output_control.summary == Some(true) {
844 true
845 } else if params.output_control.summary == Some(false) {
846 false
847 } else {
848 output.formatted.len() > SIZE_LIMIT
849 };
850
851 if use_summary {
852 output.formatted = format_summary(
853 &output.entries,
854 &output.files,
855 params.max_depth,
856 output.subtree_counts.as_deref(),
857 );
858 }
859
860 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
862 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
863 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
864 ErrorData::new(
865 rmcp::model::ErrorCode::INVALID_PARAMS,
866 e.to_string(),
867 Some(error_meta("validation", false, "invalid cursor format")),
868 )
869 }) {
870 Ok(v) => v,
871 Err(e) => return Ok(err_to_tool_result(e)),
872 };
873 cursor_data.offset
874 } else {
875 0
876 };
877
878 let paginated =
880 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
881 Ok(v) => v,
882 Err(e) => {
883 return Ok(err_to_tool_result(ErrorData::new(
884 rmcp::model::ErrorCode::INTERNAL_ERROR,
885 e.to_string(),
886 Some(error_meta("transient", true, "retry the request")),
887 )));
888 }
889 };
890
891 let verbose = params.output_control.verbose.unwrap_or(false);
892 if !use_summary {
893 output.formatted = format_structure_paginated(
894 &paginated.items,
895 paginated.total,
896 params.max_depth,
897 Some(Path::new(¶ms.path)),
898 verbose,
899 );
900 }
901
902 if use_summary {
904 output.next_cursor = None;
905 } else {
906 output.next_cursor.clone_from(&paginated.next_cursor);
907 }
908
909 let mut final_text = output.formatted.clone();
911 if !use_summary && let Some(cursor) = paginated.next_cursor {
912 final_text.push('\n');
913 final_text.push_str("NEXT_CURSOR: ");
914 final_text.push_str(&cursor);
915 }
916
917 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
918 .with_meta(Some(no_cache_meta()));
919 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
920 result.structured_content = Some(structured);
921 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
922 self.metrics_tx.send(crate::metrics::MetricEvent {
923 ts: crate::metrics::unix_ms(),
924 tool: "analyze_directory",
925 duration_ms: dur,
926 output_chars: final_text.len(),
927 param_path_depth: crate::metrics::path_component_count(¶m_path),
928 max_depth: max_depth_val,
929 result: "ok",
930 error_type: None,
931 session_id: sid,
932 seq: Some(seq),
933 cache_hit: Some(dir_cache_hit),
934 });
935 Ok(result)
936 }
937
938 #[instrument(skip(self, _context))]
939 #[tool(
940 name = "analyze_file",
941 description = "Functions, types, classes, and imports from a single source file; use analyze_directory for directories. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Passing a directory path returns an error. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py.",
942 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
943 annotations(
944 title = "Analyze File",
945 read_only_hint = true,
946 destructive_hint = false,
947 idempotent_hint = true,
948 open_world_hint = false
949 )
950 )]
951 async fn analyze_file(
952 &self,
953 params: Parameters<AnalyzeFileParams>,
954 _context: RequestContext<RoleServer>,
955 ) -> Result<CallToolResult, ErrorData> {
956 let params = params.0;
957 let t_start = std::time::Instant::now();
958 let param_path = params.path.clone();
959 let seq = self
960 .session_call_seq
961 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
962 let sid = self.session_id.lock().await.clone();
963
964 let (arc_output, file_cache_hit) = match self.handle_file_details_mode(¶ms).await {
966 Ok(v) => v,
967 Err(e) => return Ok(err_to_tool_result(e)),
968 };
969
970 let mut formatted = arc_output.formatted.clone();
974 let line_count = arc_output.line_count;
975
976 let use_summary = if params.output_control.force == Some(true) {
978 false
979 } else if params.output_control.summary == Some(true) {
980 true
981 } else if params.output_control.summary == Some(false) {
982 false
983 } else {
984 formatted.len() > SIZE_LIMIT
985 };
986
987 if use_summary {
988 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
989 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
990 let estimated_tokens = formatted.len() / 4;
991 let message = format!(
992 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
993 - force=true to return full output\n\
994 - Use fields to limit output to specific sections (functions, classes, or imports)\n\
995 - Use summary=true for a compact overview",
996 formatted.len(),
997 estimated_tokens
998 );
999 return Ok(err_to_tool_result(ErrorData::new(
1000 rmcp::model::ErrorCode::INVALID_PARAMS,
1001 message,
1002 Some(error_meta(
1003 "validation",
1004 false,
1005 "use force=true, fields, or summary=true",
1006 )),
1007 )));
1008 }
1009
1010 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1012 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1013 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1014 ErrorData::new(
1015 rmcp::model::ErrorCode::INVALID_PARAMS,
1016 e.to_string(),
1017 Some(error_meta("validation", false, "invalid cursor format")),
1018 )
1019 }) {
1020 Ok(v) => v,
1021 Err(e) => return Ok(err_to_tool_result(e)),
1022 };
1023 cursor_data.offset
1024 } else {
1025 0
1026 };
1027
1028 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
1030 .semantic
1031 .functions
1032 .iter()
1033 .filter(|func| {
1034 !arc_output
1035 .semantic
1036 .classes
1037 .iter()
1038 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
1039 })
1040 .cloned()
1041 .collect();
1042
1043 let paginated =
1045 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
1046 Ok(v) => v,
1047 Err(e) => {
1048 return Ok(err_to_tool_result(ErrorData::new(
1049 rmcp::model::ErrorCode::INTERNAL_ERROR,
1050 e.to_string(),
1051 Some(error_meta("transient", true, "retry the request")),
1052 )));
1053 }
1054 };
1055
1056 let verbose = params.output_control.verbose.unwrap_or(false);
1058 if !use_summary {
1059 formatted = format_file_details_paginated(
1061 &paginated.items,
1062 paginated.total,
1063 &arc_output.semantic,
1064 ¶ms.path,
1065 line_count,
1066 offset,
1067 verbose,
1068 params.fields.as_deref(),
1069 );
1070 }
1071
1072 let next_cursor = if use_summary {
1074 None
1075 } else {
1076 paginated.next_cursor.clone()
1077 };
1078
1079 let mut final_text = formatted.clone();
1081 if !use_summary && let Some(ref cursor) = next_cursor {
1082 final_text.push('\n');
1083 final_text.push_str("NEXT_CURSOR: ");
1084 final_text.push_str(cursor);
1085 }
1086
1087 let response_output = analyze::FileAnalysisOutput::new(
1089 formatted,
1090 arc_output.semantic.clone(),
1091 line_count,
1092 next_cursor,
1093 );
1094
1095 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1096 .with_meta(Some(no_cache_meta()));
1097 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
1098 result.structured_content = Some(structured);
1099 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1100 self.metrics_tx.send(crate::metrics::MetricEvent {
1101 ts: crate::metrics::unix_ms(),
1102 tool: "analyze_file",
1103 duration_ms: dur,
1104 output_chars: final_text.len(),
1105 param_path_depth: crate::metrics::path_component_count(¶m_path),
1106 max_depth: None,
1107 result: "ok",
1108 error_type: None,
1109 session_id: sid,
1110 seq: Some(seq),
1111 cache_hit: Some(file_cache_hit),
1112 });
1113 Ok(result)
1114 }
1115
1116 #[instrument(skip(self, context))]
1117 #[tool(
1118 name = "analyze_symbol",
1119 description = "Call graph for a named function/method across all files in a directory to trace usage. Returns direct callers and callees. Unknown symbols return error; symbols with no callers/callees return empty chains. Use import_lookup=true with symbol set to the module path to find all files that import a given module path instead of tracing a call graph. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep; Show only trait impl callers of the write method; Find all files that import std::collections",
1120 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
1121 annotations(
1122 title = "Analyze Symbol",
1123 read_only_hint = true,
1124 destructive_hint = false,
1125 idempotent_hint = true,
1126 open_world_hint = false
1127 )
1128 )]
1129 async fn analyze_symbol(
1130 &self,
1131 params: Parameters<AnalyzeSymbolParams>,
1132 context: RequestContext<RoleServer>,
1133 ) -> Result<CallToolResult, ErrorData> {
1134 let params = params.0;
1135 let ct = context.ct.clone();
1136 let t_start = std::time::Instant::now();
1137 let param_path = params.path.clone();
1138 let max_depth_val = params.follow_depth;
1139 let seq = self
1140 .session_call_seq
1141 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1142 let sid = self.session_id.lock().await.clone();
1143
1144 if let Err(e) = Self::validate_import_lookup(params.import_lookup, ¶ms.symbol) {
1146 return Ok(err_to_tool_result(e));
1147 }
1148
1149 if params.import_lookup == Some(true) {
1151 let path = Path::new(¶ms.path);
1152 let raw_entries = match walk_directory(path, params.max_depth) {
1153 Ok(e) => e,
1154 Err(e) => {
1155 return Ok(err_to_tool_result(ErrorData::new(
1156 rmcp::model::ErrorCode::INTERNAL_ERROR,
1157 format!("Failed to walk directory: {e}"),
1158 Some(error_meta(
1159 "resource",
1160 false,
1161 "check path permissions and availability",
1162 )),
1163 )));
1164 }
1165 };
1166 let entries = if let Some(ref git_ref) = params.git_ref
1168 && !git_ref.is_empty()
1169 {
1170 let changed = match changed_files_from_git_ref(path, git_ref) {
1171 Ok(c) => c,
1172 Err(e) => {
1173 return Ok(err_to_tool_result(ErrorData::new(
1174 rmcp::model::ErrorCode::INVALID_PARAMS,
1175 format!("git_ref filter failed: {e}"),
1176 Some(error_meta(
1177 "resource",
1178 false,
1179 "ensure git is installed and path is inside a git repository",
1180 )),
1181 )));
1182 }
1183 };
1184 filter_entries_by_git_ref(raw_entries, &changed, path)
1185 } else {
1186 raw_entries
1187 };
1188 let output = match analyze::analyze_import_lookup(
1189 path,
1190 ¶ms.symbol,
1191 &entries,
1192 params.ast_recursion_limit,
1193 ) {
1194 Ok(v) => v,
1195 Err(e) => {
1196 return Ok(err_to_tool_result(ErrorData::new(
1197 rmcp::model::ErrorCode::INTERNAL_ERROR,
1198 format!("import_lookup failed: {e}"),
1199 Some(error_meta(
1200 "resource",
1201 false,
1202 "check path and file permissions",
1203 )),
1204 )));
1205 }
1206 };
1207 let final_text = output.formatted.clone();
1208 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1209 .with_meta(Some(no_cache_meta()));
1210 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1211 result.structured_content = Some(structured);
1212 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1213 self.metrics_tx.send(crate::metrics::MetricEvent {
1214 ts: crate::metrics::unix_ms(),
1215 tool: "analyze_symbol",
1216 duration_ms: dur,
1217 output_chars: final_text.len(),
1218 param_path_depth: crate::metrics::path_component_count(¶m_path),
1219 max_depth: max_depth_val,
1220 result: "ok",
1221 error_type: None,
1222 session_id: sid,
1223 seq: Some(seq),
1224 cache_hit: Some(false),
1225 });
1226 return Ok(result);
1227 }
1228
1229 let mut output = match self.handle_focused_mode(¶ms, ct).await {
1231 Ok(v) => v,
1232 Err(e) => return Ok(err_to_tool_result(e)),
1233 };
1234
1235 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
1237 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
1238 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
1239 ErrorData::new(
1240 rmcp::model::ErrorCode::INVALID_PARAMS,
1241 e.to_string(),
1242 Some(error_meta("validation", false, "invalid cursor format")),
1243 )
1244 }) {
1245 Ok(v) => v,
1246 Err(e) => return Ok(err_to_tool_result(e)),
1247 };
1248 cursor_data.offset
1249 } else {
1250 0
1251 };
1252
1253 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
1255 decode_cursor(cursor_str)
1256 .map(|c| c.mode)
1257 .unwrap_or(PaginationMode::Callers)
1258 } else {
1259 PaginationMode::Callers
1260 };
1261
1262 let use_summary = params.output_control.summary == Some(true);
1263 let verbose = params.output_control.verbose.unwrap_or(false);
1264
1265 let mut callee_cursor = match cursor_mode {
1266 PaginationMode::Callers => {
1267 let (paginated_items, paginated_next) = match paginate_focus_chains(
1268 &output.prod_chains,
1269 PaginationMode::Callers,
1270 offset,
1271 page_size,
1272 ) {
1273 Ok(v) => v,
1274 Err(e) => return Ok(err_to_tool_result(e)),
1275 };
1276
1277 if !use_summary
1278 && (paginated_next.is_some()
1279 || offset > 0
1280 || !verbose
1281 || !output.outgoing_chains.is_empty())
1282 {
1283 let base_path = Path::new(¶ms.path);
1284 output.formatted = format_focused_paginated(
1285 &paginated_items,
1286 output.prod_chains.len(),
1287 PaginationMode::Callers,
1288 ¶ms.symbol,
1289 &output.prod_chains,
1290 &output.test_chains,
1291 &output.outgoing_chains,
1292 output.def_count,
1293 offset,
1294 Some(base_path),
1295 verbose,
1296 );
1297 paginated_next
1298 } else {
1299 None
1300 }
1301 }
1302 PaginationMode::Callees => {
1303 let (paginated_items, paginated_next) = match paginate_focus_chains(
1304 &output.outgoing_chains,
1305 PaginationMode::Callees,
1306 offset,
1307 page_size,
1308 ) {
1309 Ok(v) => v,
1310 Err(e) => return Ok(err_to_tool_result(e)),
1311 };
1312
1313 if paginated_next.is_some() || offset > 0 || !verbose {
1314 let base_path = Path::new(¶ms.path);
1315 output.formatted = format_focused_paginated(
1316 &paginated_items,
1317 output.outgoing_chains.len(),
1318 PaginationMode::Callees,
1319 ¶ms.symbol,
1320 &output.prod_chains,
1321 &output.test_chains,
1322 &output.outgoing_chains,
1323 output.def_count,
1324 offset,
1325 Some(base_path),
1326 verbose,
1327 );
1328 paginated_next
1329 } else {
1330 None
1331 }
1332 }
1333 PaginationMode::Default => {
1334 unreachable!("SymbolFocus should only use Callers or Callees modes")
1335 }
1336 };
1337
1338 if callee_cursor.is_none()
1343 && cursor_mode == PaginationMode::Callers
1344 && !output.outgoing_chains.is_empty()
1345 && !use_summary
1346 && let Ok(cursor) = encode_cursor(&CursorData {
1347 mode: PaginationMode::Callees,
1348 offset: 0,
1349 })
1350 {
1351 callee_cursor = Some(cursor);
1352 }
1353
1354 output.next_cursor.clone_from(&callee_cursor);
1356
1357 let mut final_text = output.formatted.clone();
1359 if let Some(cursor) = callee_cursor {
1360 final_text.push('\n');
1361 final_text.push_str("NEXT_CURSOR: ");
1362 final_text.push_str(&cursor);
1363 }
1364
1365 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1366 .with_meta(Some(no_cache_meta()));
1367 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1368 result.structured_content = Some(structured);
1369 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1370 self.metrics_tx.send(crate::metrics::MetricEvent {
1371 ts: crate::metrics::unix_ms(),
1372 tool: "analyze_symbol",
1373 duration_ms: dur,
1374 output_chars: final_text.len(),
1375 param_path_depth: crate::metrics::path_component_count(¶m_path),
1376 max_depth: max_depth_val,
1377 result: "ok",
1378 error_type: None,
1379 session_id: sid,
1380 seq: Some(seq),
1381 cache_hit: Some(false),
1382 });
1383 Ok(result)
1384 }
1385
1386 #[instrument(skip(self, _context))]
1387 #[tool(
1388 name = "analyze_module",
1389 description = "Function and import index for a single source file with minimal token cost: name, line_count, language, function names with line numbers, import list only (~75% smaller than analyze_file). Use analyze_file when you need signatures, types, or class details. Supported: Rust, Go, Java, Python, TypeScript, TSX, Fortran, JavaScript, C/C++, C#. Pagination, summary, force, and verbose not supported. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs.",
1390 output_schema = schema_for_type::<types::ModuleInfo>(),
1391 annotations(
1392 title = "Analyze Module",
1393 read_only_hint = true,
1394 destructive_hint = false,
1395 idempotent_hint = true,
1396 open_world_hint = false
1397 )
1398 )]
1399 async fn analyze_module(
1400 &self,
1401 params: Parameters<AnalyzeModuleParams>,
1402 _context: RequestContext<RoleServer>,
1403 ) -> Result<CallToolResult, ErrorData> {
1404 let params = params.0;
1405 let t_start = std::time::Instant::now();
1406 let param_path = params.path.clone();
1407 let seq = self
1408 .session_call_seq
1409 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1410 let sid = self.session_id.lock().await.clone();
1411
1412 if std::fs::metadata(¶ms.path)
1414 .map(|m| m.is_dir())
1415 .unwrap_or(false)
1416 {
1417 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1418 self.metrics_tx.send(crate::metrics::MetricEvent {
1419 ts: crate::metrics::unix_ms(),
1420 tool: "analyze_module",
1421 duration_ms: dur,
1422 output_chars: 0,
1423 param_path_depth: crate::metrics::path_component_count(¶m_path),
1424 max_depth: None,
1425 result: "error",
1426 error_type: Some("invalid_params".to_string()),
1427 session_id: sid.clone(),
1428 seq: Some(seq),
1429 cache_hit: None,
1430 });
1431 return Ok(err_to_tool_result(ErrorData::new(
1432 rmcp::model::ErrorCode::INVALID_PARAMS,
1433 format!(
1434 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1435 params.path
1436 ),
1437 Some(error_meta(
1438 "validation",
1439 false,
1440 "use analyze_directory for directories",
1441 )),
1442 )));
1443 }
1444
1445 let module_cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
1447 meta.modified().ok().map(|mtime| cache::CacheKey {
1448 path: std::path::PathBuf::from(¶ms.path),
1449 modified: mtime,
1450 mode: AnalysisMode::FileDetails,
1451 })
1452 });
1453 let (module_info, module_cache_hit) = if let Some(ref key) = module_cache_key
1454 && let Some(cached_file) = self.cache.get(key)
1455 {
1456 let file_path = std::path::Path::new(¶ms.path);
1460 let name = file_path
1461 .file_name()
1462 .and_then(|n: &std::ffi::OsStr| n.to_str())
1463 .unwrap_or("unknown")
1464 .to_string();
1465 let language = file_path
1466 .extension()
1467 .and_then(|e| e.to_str())
1468 .and_then(code_analyze_core::lang::language_for_extension)
1469 .unwrap_or("unknown")
1470 .to_string();
1471 let mi = types::ModuleInfo {
1472 name,
1473 line_count: cached_file.line_count,
1474 language,
1475 functions: cached_file
1476 .semantic
1477 .functions
1478 .iter()
1479 .map(|f| types::ModuleFunctionInfo {
1480 name: f.name.clone(),
1481 line: f.line,
1482 })
1483 .collect(),
1484 imports: cached_file
1485 .semantic
1486 .imports
1487 .iter()
1488 .map(|i| types::ModuleImportInfo {
1489 module: i.module.clone(),
1490 items: i.items.clone(),
1491 })
1492 .collect(),
1493 };
1494 (mi, true)
1495 } else {
1496 let file_output = match analyze::analyze_file(¶ms.path, None).map_err(|e| {
1500 ErrorData::new(
1501 rmcp::model::ErrorCode::INVALID_PARAMS,
1502 format!("Failed to analyze module: {e}"),
1503 Some(error_meta(
1504 "validation",
1505 false,
1506 "ensure file exists, is readable, and has a supported extension",
1507 )),
1508 )
1509 }) {
1510 Ok(v) => v,
1511 Err(e) => return Ok(err_to_tool_result(e)),
1512 };
1513 let arc_output = std::sync::Arc::new(file_output);
1514 if let Some(key) = module_cache_key.clone() {
1515 self.cache.put(key, arc_output.clone());
1516 }
1517 let file_path = std::path::Path::new(¶ms.path);
1518 let name = file_path
1519 .file_name()
1520 .and_then(|n: &std::ffi::OsStr| n.to_str())
1521 .unwrap_or("unknown")
1522 .to_string();
1523 let language = file_path
1524 .extension()
1525 .and_then(|e| e.to_str())
1526 .and_then(code_analyze_core::lang::language_for_extension)
1527 .unwrap_or("unknown")
1528 .to_string();
1529 let mi = types::ModuleInfo {
1530 name,
1531 line_count: arc_output.line_count,
1532 language,
1533 functions: arc_output
1534 .semantic
1535 .functions
1536 .iter()
1537 .map(|f| types::ModuleFunctionInfo {
1538 name: f.name.clone(),
1539 line: f.line,
1540 })
1541 .collect(),
1542 imports: arc_output
1543 .semantic
1544 .imports
1545 .iter()
1546 .map(|i| types::ModuleImportInfo {
1547 module: i.module.clone(),
1548 items: i.items.clone(),
1549 })
1550 .collect(),
1551 };
1552 (mi, false)
1553 };
1554
1555 let text = format_module_info(&module_info);
1556 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1557 .with_meta(Some(no_cache_meta()));
1558 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1559 ErrorData::new(
1560 rmcp::model::ErrorCode::INTERNAL_ERROR,
1561 format!("serialization failed: {e}"),
1562 Some(error_meta("internal", false, "report this as a bug")),
1563 )
1564 }) {
1565 Ok(v) => v,
1566 Err(e) => return Ok(err_to_tool_result(e)),
1567 };
1568 result.structured_content = Some(structured);
1569 let dur = t_start.elapsed().as_millis().try_into().unwrap_or(u64::MAX);
1570 self.metrics_tx.send(crate::metrics::MetricEvent {
1571 ts: crate::metrics::unix_ms(),
1572 tool: "analyze_module",
1573 duration_ms: dur,
1574 output_chars: text.len(),
1575 param_path_depth: crate::metrics::path_component_count(¶m_path),
1576 max_depth: None,
1577 result: "ok",
1578 error_type: None,
1579 session_id: sid,
1580 seq: Some(seq),
1581 cache_hit: Some(module_cache_hit),
1582 });
1583 Ok(result)
1584 }
1585}
1586
1587#[derive(Clone)]
1589struct FocusedAnalysisParams {
1590 path: std::path::PathBuf,
1591 symbol: String,
1592 match_mode: SymbolMatchMode,
1593 follow_depth: u32,
1594 max_depth: Option<u32>,
1595 ast_recursion_limit: Option<usize>,
1596 use_summary: bool,
1597 impl_only: Option<bool>,
1598}
1599
1600#[tool_handler]
1601impl ServerHandler for CodeAnalyzer {
1602 fn get_info(&self) -> InitializeResult {
1603 let excluded = crate::EXCLUDED_DIRS.join(", ");
1604 let instructions = format!(
1605 "Recommended workflow:\n\
1606 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify source package (largest by file count; exclude {excluded}).\n\
1607 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for module map. Include test directories (tests/, *_test.go, test_*.py, test_*.rs, *.spec.ts, *.spec.js).\n\
1608 3. For key files, prefer analyze_module for function/import index; use analyze_file for signatures and types.\n\
1609 4. Use analyze_symbol to trace call graphs.\n\
1610 Prefer summary=true on 1000+ files. Set max_depth=2; increase if packages too large. Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1611 );
1612 let capabilities = ServerCapabilities::builder()
1613 .enable_logging()
1614 .enable_tools()
1615 .enable_tool_list_changed()
1616 .enable_completions()
1617 .build();
1618 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1619 .with_title("Code Analyze MCP")
1620 .with_description("MCP server for code structure analysis using tree-sitter");
1621 InitializeResult::new(capabilities)
1622 .with_server_info(server_info)
1623 .with_instructions(&instructions)
1624 }
1625
1626 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1627 let mut peer_lock = self.peer.lock().await;
1628 *peer_lock = Some(context.peer.clone());
1629 drop(peer_lock);
1630
1631 let millis = std::time::SystemTime::now()
1633 .duration_since(std::time::UNIX_EPOCH)
1634 .unwrap_or_default()
1635 .as_millis()
1636 .try_into()
1637 .unwrap_or(u64::MAX);
1638 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1639 let sid = format!("{millis}-{counter}");
1640 {
1641 let mut session_id_lock = self.session_id.lock().await;
1642 *session_id_lock = Some(sid);
1643 }
1644 self.session_call_seq
1645 .store(0, std::sync::atomic::Ordering::Relaxed);
1646
1647 let peer = self.peer.clone();
1649 let event_rx = self.event_rx.clone();
1650
1651 tokio::spawn(async move {
1652 let rx = {
1653 let mut rx_lock = event_rx.lock().await;
1654 rx_lock.take()
1655 };
1656
1657 if let Some(mut receiver) = rx {
1658 let mut buffer = Vec::with_capacity(64);
1659 loop {
1660 receiver.recv_many(&mut buffer, 64).await;
1662
1663 if buffer.is_empty() {
1664 break;
1666 }
1667
1668 let peer_lock = peer.lock().await;
1670 if let Some(peer) = peer_lock.as_ref() {
1671 for log_event in buffer.drain(..) {
1672 let notification = ServerNotification::LoggingMessageNotification(
1673 Notification::new(LoggingMessageNotificationParam {
1674 level: log_event.level,
1675 logger: Some(log_event.logger),
1676 data: log_event.data,
1677 }),
1678 );
1679 if let Err(e) = peer.send_notification(notification).await {
1680 warn!("Failed to send logging notification: {}", e);
1681 }
1682 }
1683 }
1684 }
1685 }
1686 });
1687 }
1688
1689 #[instrument(skip(self, _context))]
1690 async fn on_cancelled(
1691 &self,
1692 notification: CancelledNotificationParam,
1693 _context: NotificationContext<RoleServer>,
1694 ) {
1695 tracing::info!(
1696 request_id = ?notification.request_id,
1697 reason = ?notification.reason,
1698 "Received cancellation notification"
1699 );
1700 }
1701
1702 #[instrument(skip(self, _context))]
1703 async fn complete(
1704 &self,
1705 request: CompleteRequestParams,
1706 _context: RequestContext<RoleServer>,
1707 ) -> Result<CompleteResult, ErrorData> {
1708 let argument_name = &request.argument.name;
1710 let argument_value = &request.argument.value;
1711
1712 let completions = match argument_name.as_str() {
1713 "path" => {
1714 let root = Path::new(".");
1716 completion::path_completions(root, argument_value)
1717 }
1718 "symbol" => {
1719 let path_arg = request
1721 .context
1722 .as_ref()
1723 .and_then(|ctx| ctx.get_argument("path"));
1724
1725 match path_arg {
1726 Some(path_str) => {
1727 let path = Path::new(path_str);
1728 completion::symbol_completions(&self.cache, path, argument_value)
1729 }
1730 None => Vec::new(),
1731 }
1732 }
1733 _ => Vec::new(),
1734 };
1735
1736 let total_count = u32::try_from(completions.len()).unwrap_or(u32::MAX);
1738 let (values, has_more) = if completions.len() > 100 {
1739 (completions.into_iter().take(100).collect(), true)
1740 } else {
1741 (completions, false)
1742 };
1743
1744 let completion_info =
1745 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1746 Ok(info) => info,
1747 Err(_) => {
1748 CompletionInfo::with_all_values(Vec::new())
1750 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1751 }
1752 };
1753
1754 Ok(CompleteResult::new(completion_info))
1755 }
1756
1757 async fn set_level(
1758 &self,
1759 params: SetLevelRequestParams,
1760 _context: RequestContext<RoleServer>,
1761 ) -> Result<(), ErrorData> {
1762 let level_filter = match params.level {
1763 LoggingLevel::Debug => LevelFilter::DEBUG,
1764 LoggingLevel::Info | LoggingLevel::Notice => LevelFilter::INFO,
1765 LoggingLevel::Warning => LevelFilter::WARN,
1766 LoggingLevel::Error
1767 | LoggingLevel::Critical
1768 | LoggingLevel::Alert
1769 | LoggingLevel::Emergency => LevelFilter::ERROR,
1770 };
1771
1772 let mut filter_lock = self
1773 .log_level_filter
1774 .lock()
1775 .unwrap_or_else(|e| e.into_inner());
1776 *filter_lock = level_filter;
1777 Ok(())
1778 }
1779}
1780
1781#[cfg(test)]
1782mod tests {
1783 use super::*;
1784
1785 #[tokio::test]
1786 async fn test_emit_progress_none_peer_is_noop() {
1787 let peer = Arc::new(TokioMutex::new(None));
1788 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1789 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1790 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1791 let analyzer = CodeAnalyzer::new(
1792 peer,
1793 log_level_filter,
1794 rx,
1795 crate::metrics::MetricsSender(metrics_tx),
1796 );
1797 let token = ProgressToken(NumberOrString::String("test".into()));
1798 analyzer
1800 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1801 .await;
1802 }
1803
1804 fn make_analyzer() -> CodeAnalyzer {
1805 let peer = Arc::new(TokioMutex::new(None));
1806 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1807 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1808 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1809 CodeAnalyzer::new(
1810 peer,
1811 log_level_filter,
1812 rx,
1813 crate::metrics::MetricsSender(metrics_tx),
1814 )
1815 }
1816
1817 #[test]
1818 fn test_summary_cursor_conflict() {
1819 assert!(summary_cursor_conflict(Some(true), Some("cursor")));
1820 assert!(!summary_cursor_conflict(Some(true), None));
1821 assert!(!summary_cursor_conflict(None, Some("x")));
1822 assert!(!summary_cursor_conflict(None, None));
1823 }
1824
1825 #[tokio::test]
1826 async fn test_validate_impl_only_non_rust_returns_invalid_params() {
1827 use tempfile::TempDir;
1828
1829 let dir = TempDir::new().unwrap();
1830 std::fs::write(dir.path().join("main.py"), "def foo(): pass").unwrap();
1831
1832 let analyzer = make_analyzer();
1833 let entries: Vec<traversal::WalkEntry> =
1836 traversal::walk_directory(dir.path(), None).unwrap_or_default();
1837 let result = CodeAnalyzer::validate_impl_only(&entries);
1838 assert!(result.is_err());
1839 let err = result.unwrap_err();
1840 assert_eq!(err.code, rmcp::model::ErrorCode::INVALID_PARAMS);
1841 drop(analyzer); }
1843
1844 #[tokio::test]
1845 async fn test_no_cache_meta_on_analyze_directory_result() {
1846 use code_analyze_core::types::{
1847 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1848 };
1849 use tempfile::TempDir;
1850
1851 let dir = TempDir::new().unwrap();
1852 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
1853
1854 let analyzer = make_analyzer();
1855 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
1856 "path": dir.path().to_str().unwrap(),
1857 }))
1858 .unwrap();
1859 let ct = tokio_util::sync::CancellationToken::new();
1860 let (arc_output, _cache_hit) = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1861 let meta = no_cache_meta();
1863 assert_eq!(
1864 meta.0.get("cache_hint").and_then(|v| v.as_str()),
1865 Some("no-cache"),
1866 );
1867 drop(arc_output);
1868 }
1869
1870 #[test]
1871 fn test_complete_path_completions_returns_suggestions() {
1872 let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR"));
1877 let workspace_root = manifest_dir.parent().expect("manifest dir has parent");
1878 let suggestions = completion::path_completions(workspace_root, "code-");
1879 assert!(
1880 !suggestions.is_empty(),
1881 "expected completions for prefix 'code-' in workspace root"
1882 );
1883 }
1884
1885 #[tokio::test]
1886 async fn test_handle_overview_mode_verbose_no_summary_block() {
1887 use code_analyze_core::pagination::{PaginationMode, paginate_slice};
1888 use code_analyze_core::types::{
1889 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1890 };
1891 use tempfile::TempDir;
1892
1893 let tmp = TempDir::new().unwrap();
1894 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1895
1896 let peer = Arc::new(TokioMutex::new(None));
1897 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1898 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1899 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1900 let analyzer = CodeAnalyzer::new(
1901 peer,
1902 log_level_filter,
1903 rx,
1904 crate::metrics::MetricsSender(metrics_tx),
1905 );
1906
1907 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
1908 "path": tmp.path().to_str().unwrap(),
1909 "verbose": true,
1910 }))
1911 .unwrap();
1912
1913 let ct = tokio_util::sync::CancellationToken::new();
1914 let (output, _cache_hit) = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1915
1916 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1919 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1920 let verbose = true;
1921 let formatted = if !use_summary {
1922 format_structure_paginated(
1923 &paginated.items,
1924 paginated.total,
1925 params.max_depth,
1926 Some(std::path::Path::new(¶ms.path)),
1927 verbose,
1928 )
1929 } else {
1930 output.formatted.clone()
1931 };
1932
1933 assert!(
1935 !formatted.contains("SUMMARY:"),
1936 "verbose=true must not emit SUMMARY: block; got: {}",
1937 &formatted[..formatted.len().min(300)]
1938 );
1939 assert!(
1940 formatted.contains("PAGINATED:"),
1941 "verbose=true must emit PAGINATED: header"
1942 );
1943 assert!(
1944 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1945 "verbose=true must emit FILES section header"
1946 );
1947 }
1948
1949 #[tokio::test]
1952 async fn test_analyze_directory_cache_hit_metrics() {
1953 use code_analyze_core::types::{
1954 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
1955 };
1956 use tempfile::TempDir;
1957
1958 let dir = TempDir::new().unwrap();
1960 std::fs::write(dir.path().join("lib.rs"), "fn foo() {}").unwrap();
1961 let analyzer = make_analyzer();
1962 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
1963 "path": dir.path().to_str().unwrap(),
1964 }))
1965 .unwrap();
1966
1967 let ct1 = tokio_util::sync::CancellationToken::new();
1969 let (_out1, hit1) = analyzer.handle_overview_mode(¶ms, ct1).await.unwrap();
1970
1971 let ct2 = tokio_util::sync::CancellationToken::new();
1973 let (_out2, hit2) = analyzer.handle_overview_mode(¶ms, ct2).await.unwrap();
1974
1975 assert!(!hit1, "first call must be a cache miss");
1977 assert!(hit2, "second call must be a cache hit");
1978 }
1979
1980 #[tokio::test]
1981 async fn test_analyze_module_cache_hit_metrics() {
1982 use std::io::Write as _;
1983 use tempfile::NamedTempFile;
1984
1985 let mut f = NamedTempFile::with_suffix(".rs").unwrap();
1987 writeln!(f, "fn bar() {{}}").unwrap();
1988 let path = f.path().to_str().unwrap().to_string();
1989
1990 let analyzer = make_analyzer();
1991
1992 let file_params = code_analyze_core::types::AnalyzeFileParams {
1994 path: path.clone(),
1995 ast_recursion_limit: None,
1996 fields: None,
1997 pagination: code_analyze_core::types::PaginationParams {
1998 cursor: None,
1999 page_size: None,
2000 },
2001 output_control: code_analyze_core::types::OutputControlParams {
2002 summary: None,
2003 force: None,
2004 verbose: None,
2005 },
2006 };
2007 let (_cached, _) = analyzer
2008 .handle_file_details_mode(&file_params)
2009 .await
2010 .unwrap();
2011
2012 let module_params = code_analyze_core::types::AnalyzeModuleParams { path: path.clone() };
2014
2015 let module_cache_key = std::fs::metadata(&path).ok().and_then(|meta| {
2017 meta.modified()
2018 .ok()
2019 .map(|mtime| code_analyze_core::cache::CacheKey {
2020 path: std::path::PathBuf::from(&path),
2021 modified: mtime,
2022 mode: code_analyze_core::types::AnalysisMode::FileDetails,
2023 })
2024 });
2025 let cache_hit = module_cache_key
2026 .as_ref()
2027 .and_then(|k| analyzer.cache.get(k))
2028 .is_some();
2029
2030 assert!(
2032 cache_hit,
2033 "analyze_module should find the file in the shared file cache"
2034 );
2035 drop(module_params);
2036 }
2037
2038 #[test]
2041 fn test_analyze_symbol_import_lookup_invalid_params() {
2042 let result = CodeAnalyzer::validate_import_lookup(Some(true), "");
2046
2047 assert!(
2049 result.is_err(),
2050 "import_lookup=true with empty symbol must return Err"
2051 );
2052 let err = result.unwrap_err();
2053 assert_eq!(
2054 err.code,
2055 rmcp::model::ErrorCode::INVALID_PARAMS,
2056 "expected INVALID_PARAMS; got {:?}",
2057 err.code
2058 );
2059 }
2060
2061 #[tokio::test]
2062 async fn test_analyze_symbol_import_lookup_found() {
2063 use tempfile::TempDir;
2064
2065 let dir = TempDir::new().unwrap();
2067 std::fs::write(
2068 dir.path().join("main.rs"),
2069 "use std::collections::HashMap;\nfn main() {}\n",
2070 )
2071 .unwrap();
2072
2073 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2074
2075 let output =
2077 analyze::analyze_import_lookup(dir.path(), "std::collections", &entries, None).unwrap();
2078
2079 assert!(
2081 output.formatted.contains("MATCHES: 1"),
2082 "expected 1 match; got: {}",
2083 output.formatted
2084 );
2085 assert!(
2086 output.formatted.contains("main.rs"),
2087 "expected main.rs in output; got: {}",
2088 output.formatted
2089 );
2090 }
2091
2092 #[tokio::test]
2093 async fn test_analyze_symbol_import_lookup_empty() {
2094 use tempfile::TempDir;
2095
2096 let dir = TempDir::new().unwrap();
2098 std::fs::write(dir.path().join("main.rs"), "fn main() {}\n").unwrap();
2099
2100 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2101
2102 let output =
2104 analyze::analyze_import_lookup(dir.path(), "no_such_module", &entries, None).unwrap();
2105
2106 assert!(
2108 output.formatted.contains("MATCHES: 0"),
2109 "expected 0 matches; got: {}",
2110 output.formatted
2111 );
2112 }
2113
2114 #[tokio::test]
2117 async fn test_analyze_directory_git_ref_non_git_repo() {
2118 use code_analyze_core::traversal::changed_files_from_git_ref;
2119 use tempfile::TempDir;
2120
2121 let dir = TempDir::new().unwrap();
2123 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
2124
2125 let result = changed_files_from_git_ref(dir.path(), "HEAD~1");
2127
2128 assert!(result.is_err(), "non-git dir must return an error");
2130 let err_msg = result.unwrap_err().to_string();
2131 assert!(
2132 err_msg.contains("git"),
2133 "error must mention git; got: {err_msg}"
2134 );
2135 }
2136
2137 #[tokio::test]
2138 async fn test_analyze_directory_git_ref_filters_changed_files() {
2139 use code_analyze_core::traversal::{changed_files_from_git_ref, filter_entries_by_git_ref};
2140 use std::collections::HashSet;
2141 use tempfile::TempDir;
2142
2143 let dir = TempDir::new().unwrap();
2145 let changed_file = dir.path().join("changed.rs");
2146 let unchanged_file = dir.path().join("unchanged.rs");
2147 std::fs::write(&changed_file, "fn changed() {}").unwrap();
2148 std::fs::write(&unchanged_file, "fn unchanged() {}").unwrap();
2149
2150 let entries = traversal::walk_directory(dir.path(), None).unwrap();
2151 let total_files = entries.iter().filter(|e| !e.is_dir).count();
2152 assert_eq!(total_files, 2, "sanity: 2 files before filtering");
2153
2154 let mut changed: HashSet<std::path::PathBuf> = HashSet::new();
2156 changed.insert(changed_file.clone());
2157
2158 let filtered = filter_entries_by_git_ref(entries, &changed, dir.path());
2160 let filtered_files: Vec<_> = filtered.iter().filter(|e| !e.is_dir).collect();
2161
2162 assert_eq!(
2164 filtered_files.len(),
2165 1,
2166 "only 1 file must remain after git_ref filter"
2167 );
2168 assert_eq!(
2169 filtered_files[0].path, changed_file,
2170 "the remaining file must be the changed one"
2171 );
2172
2173 let _ = changed_files_from_git_ref;
2175 }
2176
2177 #[tokio::test]
2178 async fn test_handle_overview_mode_git_ref_filters_via_handler() {
2179 use code_analyze_core::types::{
2180 AnalyzeDirectoryParams, OutputControlParams, PaginationParams,
2181 };
2182 use std::process::Command;
2183 use tempfile::TempDir;
2184
2185 let dir = TempDir::new().unwrap();
2187 let repo = dir.path();
2188
2189 let git_no_hook = |repo_path: &std::path::Path, args: &[&str]| {
2192 let mut cmd = std::process::Command::new("git");
2193 cmd.args(["-c", "core.hooksPath=/dev/null"]);
2194 cmd.args(args);
2195 cmd.current_dir(repo_path);
2196 let out = cmd.output().unwrap();
2197 assert!(out.status.success(), "{out:?}");
2198 };
2199 git_no_hook(repo, &["init"]);
2200 git_no_hook(
2201 repo,
2202 &[
2203 "-c",
2204 "user.email=ci@example.com",
2205 "-c",
2206 "user.name=CI",
2207 "commit",
2208 "--allow-empty",
2209 "-m",
2210 "initial",
2211 ],
2212 );
2213
2214 std::fs::write(repo.join("file_a.rs"), "fn a() {}").unwrap();
2216 git_no_hook(repo, &["add", "file_a.rs"]);
2217 git_no_hook(
2218 repo,
2219 &[
2220 "-c",
2221 "user.email=ci@example.com",
2222 "-c",
2223 "user.name=CI",
2224 "commit",
2225 "-m",
2226 "add a",
2227 ],
2228 );
2229
2230 std::fs::write(repo.join("file_b.rs"), "fn b() {}").unwrap();
2232 git_no_hook(repo, &["add", "file_b.rs"]);
2233 git_no_hook(
2234 repo,
2235 &[
2236 "-c",
2237 "user.email=ci@example.com",
2238 "-c",
2239 "user.name=CI",
2240 "commit",
2241 "-m",
2242 "add b",
2243 ],
2244 );
2245
2246 let canon_repo = std::fs::canonicalize(repo).unwrap();
2252 let analyzer = make_analyzer();
2253 let params: AnalyzeDirectoryParams = serde_json::from_value(serde_json::json!({
2254 "path": canon_repo.to_str().unwrap(),
2255 "git_ref": "HEAD~1",
2256 }))
2257 .unwrap();
2258 let ct = tokio_util::sync::CancellationToken::new();
2259 let (arc_output, _cache_hit) = analyzer
2260 .handle_overview_mode(¶ms, ct)
2261 .await
2262 .expect("handle_overview_mode with git_ref must succeed");
2263
2264 let formatted = &arc_output.formatted;
2266 assert!(
2267 formatted.contains("file_b.rs"),
2268 "git_ref=HEAD~1 output must include file_b.rs; got:\n{formatted}"
2269 );
2270 assert!(
2271 !formatted.contains("file_a.rs"),
2272 "git_ref=HEAD~1 output must exclude file_a.rs; got:\n{formatted}"
2273 );
2274 }
2275}