1pub mod analyze;
17pub mod cache;
18pub mod completion;
19pub mod formatter;
20pub mod graph;
21pub mod lang;
22pub mod languages;
23pub mod logging;
24pub mod metrics;
25pub mod pagination;
26pub mod parser;
27pub(crate) mod schema_helpers;
28pub mod test_detection;
29pub mod traversal;
30pub mod types;
31
32pub(crate) const EXCLUDED_DIRS: &[&str] = &[
33 "node_modules",
34 "vendor",
35 ".git",
36 "__pycache__",
37 "target",
38 "dist",
39 "build",
40 ".venv",
41];
42
43use cache::AnalysisCache;
44use formatter::{
45 format_file_details_paginated, format_file_details_summary, format_focused_paginated,
46 format_module_info, format_structure_paginated, format_summary,
47};
48use logging::LogEvent;
49use pagination::{
50 CursorData, DEFAULT_PAGE_SIZE, PaginationMode, decode_cursor, encode_cursor, paginate_slice,
51};
52use rmcp::handler::server::tool::{ToolRouter, schema_for_type};
53use rmcp::handler::server::wrapper::Parameters;
54use rmcp::model::{
55 CallToolResult, CancelledNotificationParam, CompleteRequestParams, CompleteResult,
56 CompletionInfo, Content, ErrorData, Implementation, InitializeResult, LoggingLevel,
57 LoggingMessageNotificationParam, Meta, Notification, NumberOrString, ProgressNotificationParam,
58 ProgressToken, ServerCapabilities, ServerNotification, SetLevelRequestParams,
59};
60use rmcp::service::{NotificationContext, RequestContext};
61use rmcp::{Peer, RoleServer, ServerHandler, tool, tool_handler, tool_router};
62use serde_json::Value;
63use std::path::Path;
64use std::sync::{Arc, Mutex};
65use tokio::sync::{Mutex as TokioMutex, mpsc};
66use tracing::{instrument, warn};
67use tracing_subscriber::filter::LevelFilter;
68use traversal::walk_directory;
69use types::{
70 AnalysisMode, AnalyzeDirectoryParams, AnalyzeFileParams, AnalyzeModuleParams,
71 AnalyzeSymbolParams,
72};
73
74static GLOBAL_SESSION_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
75
76const SIZE_LIMIT: usize = 50_000;
77
78pub fn summary_cursor_conflict(summary: Option<bool>, cursor: Option<&str>) -> bool {
79 summary == Some(true) && cursor.is_some()
80}
81
82fn error_meta(
83 category: &'static str,
84 is_retryable: bool,
85 suggested_action: &'static str,
86) -> Option<serde_json::Value> {
87 Some(serde_json::json!({
88 "errorCategory": category,
89 "isRetryable": is_retryable,
90 "suggestedAction": suggested_action,
91 }))
92}
93
94fn err_to_tool_result(e: ErrorData) -> CallToolResult {
95 CallToolResult::error(vec![Content::text(e.message)])
96}
97
98fn no_cache_meta() -> Meta {
99 let mut m = serde_json::Map::new();
100 m.insert(
101 "cache_hint".to_string(),
102 serde_json::Value::String("no-cache".to_string()),
103 );
104 Meta(m)
105}
106
107fn paginate_focus_chains(
110 chains: &[graph::CallChain],
111 mode: PaginationMode,
112 offset: usize,
113 page_size: usize,
114) -> Result<(Vec<graph::CallChain>, Option<String>), ErrorData> {
115 let paginated = paginate_slice(chains, offset, page_size, mode).map_err(|e| {
116 ErrorData::new(
117 rmcp::model::ErrorCode::INTERNAL_ERROR,
118 e.to_string(),
119 error_meta("transient", true, "retry the request"),
120 )
121 })?;
122
123 if paginated.next_cursor.is_none() && offset == 0 {
124 return Ok((paginated.items, None));
125 }
126
127 let next = if let Some(raw_cursor) = paginated.next_cursor {
128 let decoded = decode_cursor(&raw_cursor).map_err(|e| {
129 ErrorData::new(
130 rmcp::model::ErrorCode::INVALID_PARAMS,
131 e.to_string(),
132 error_meta("validation", false, "invalid cursor format"),
133 )
134 })?;
135 Some(
136 encode_cursor(&CursorData {
137 mode,
138 offset: decoded.offset,
139 })
140 .map_err(|e| {
141 ErrorData::new(
142 rmcp::model::ErrorCode::INVALID_PARAMS,
143 e.to_string(),
144 error_meta("validation", false, "invalid cursor format"),
145 )
146 })?,
147 )
148 } else {
149 None
150 };
151
152 Ok((paginated.items, next))
153}
154
155#[derive(Clone)]
156pub struct CodeAnalyzer {
157 tool_router: ToolRouter<Self>,
158 cache: AnalysisCache,
159 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
160 log_level_filter: Arc<Mutex<LevelFilter>>,
161 event_rx: Arc<TokioMutex<Option<mpsc::UnboundedReceiver<LogEvent>>>>,
162 metrics_tx: crate::metrics::MetricsSender,
163 session_call_seq: Arc<std::sync::atomic::AtomicU32>,
164 session_id: Arc<TokioMutex<Option<String>>>,
165}
166
167#[tool_router]
168impl CodeAnalyzer {
169 pub fn list_tools() -> Vec<rmcp::model::Tool> {
170 Self::tool_router().list_all()
171 }
172
173 pub fn new(
174 peer: Arc<TokioMutex<Option<Peer<RoleServer>>>>,
175 log_level_filter: Arc<Mutex<LevelFilter>>,
176 event_rx: mpsc::UnboundedReceiver<LogEvent>,
177 metrics_tx: crate::metrics::MetricsSender,
178 ) -> Self {
179 CodeAnalyzer {
180 tool_router: Self::tool_router(),
181 cache: AnalysisCache::new(100),
182 peer,
183 log_level_filter,
184 event_rx: Arc::new(TokioMutex::new(Some(event_rx))),
185 metrics_tx,
186 session_call_seq: Arc::new(std::sync::atomic::AtomicU32::new(0)),
187 session_id: Arc::new(TokioMutex::new(None)),
188 }
189 }
190
191 #[instrument(skip(self))]
192 async fn emit_progress(
193 &self,
194 peer: Option<Peer<RoleServer>>,
195 token: &ProgressToken,
196 progress: f64,
197 total: f64,
198 message: String,
199 ) {
200 if let Some(peer) = peer {
201 let notification = ServerNotification::ProgressNotification(Notification::new(
202 ProgressNotificationParam {
203 progress_token: token.clone(),
204 progress,
205 total: Some(total),
206 message: Some(message),
207 },
208 ));
209 if let Err(e) = peer.send_notification(notification).await {
210 warn!("Failed to send progress notification: {}", e);
211 }
212 }
213 }
214
215 #[instrument(skip(self, params, ct))]
219 async fn handle_overview_mode(
220 &self,
221 params: &AnalyzeDirectoryParams,
222 ct: tokio_util::sync::CancellationToken,
223 ) -> Result<analyze::AnalysisOutput, ErrorData> {
224 let path = Path::new(¶ms.path);
225 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
226 let counter_clone = counter.clone();
227 let path_owned = path.to_path_buf();
228 let max_depth = params.max_depth;
229 let ct_clone = ct.clone();
230
231 let all_entries = walk_directory(path, None).map_err(|e| {
233 ErrorData::new(
234 rmcp::model::ErrorCode::INTERNAL_ERROR,
235 format!("Failed to walk directory: {}", e),
236 error_meta("resource", false, "check path permissions and availability"),
237 )
238 })?;
239
240 let subtree_counts = if max_depth.is_some_and(|d| d > 0) {
242 Some(traversal::subtree_counts_from_entries(path, &all_entries))
243 } else {
244 None
245 };
246
247 let entries: Vec<traversal::WalkEntry> = if let Some(depth) = max_depth
249 && depth > 0
250 {
251 all_entries
252 .into_iter()
253 .filter(|e| e.depth <= depth as usize)
254 .collect()
255 } else {
256 all_entries
257 };
258
259 let total_files = entries.iter().filter(|e| !e.is_dir).count();
261
262 let handle = tokio::task::spawn_blocking(move || {
264 analyze::analyze_directory_with_progress(&path_owned, entries, counter_clone, ct_clone)
265 });
266
267 let token = ProgressToken(NumberOrString::String(
269 format!(
270 "analyze-overview-{}",
271 std::time::SystemTime::now()
272 .duration_since(std::time::UNIX_EPOCH)
273 .map(|d| d.as_nanos())
274 .unwrap_or(0)
275 )
276 .into(),
277 ));
278 let peer = self.peer.lock().await.clone();
279 let mut last_progress = 0usize;
280 let mut cancelled = false;
281 loop {
282 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
283 if ct.is_cancelled() {
284 cancelled = true;
285 break;
286 }
287 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
288 if current != last_progress && total_files > 0 {
289 self.emit_progress(
290 peer.clone(),
291 &token,
292 current as f64,
293 total_files as f64,
294 format!("Analyzing {}/{} files", current, total_files),
295 )
296 .await;
297 last_progress = current;
298 }
299 if handle.is_finished() {
300 break;
301 }
302 }
303
304 if !cancelled && total_files > 0 {
306 self.emit_progress(
307 peer.clone(),
308 &token,
309 total_files as f64,
310 total_files as f64,
311 format!("Completed analyzing {} files", total_files),
312 )
313 .await;
314 }
315
316 match handle.await {
317 Ok(Ok(mut output)) => {
318 output.subtree_counts = subtree_counts;
319 Ok(output)
320 }
321 Ok(Err(analyze::AnalyzeError::Cancelled)) => Err(ErrorData::new(
322 rmcp::model::ErrorCode::INTERNAL_ERROR,
323 "Analysis cancelled".to_string(),
324 error_meta("transient", true, "analysis was cancelled"),
325 )),
326 Ok(Err(e)) => Err(ErrorData::new(
327 rmcp::model::ErrorCode::INTERNAL_ERROR,
328 format!("Error analyzing directory: {}", e),
329 error_meta("resource", false, "check path and file permissions"),
330 )),
331 Err(e) => Err(ErrorData::new(
332 rmcp::model::ErrorCode::INTERNAL_ERROR,
333 format!("Task join error: {}", e),
334 error_meta("transient", true, "retry the request"),
335 )),
336 }
337 }
338
339 #[instrument(skip(self, params))]
342 async fn handle_file_details_mode(
343 &self,
344 params: &AnalyzeFileParams,
345 ) -> Result<std::sync::Arc<analyze::FileAnalysisOutput>, ErrorData> {
346 let cache_key = std::fs::metadata(¶ms.path).ok().and_then(|meta| {
348 meta.modified().ok().map(|mtime| cache::CacheKey {
349 path: std::path::PathBuf::from(¶ms.path),
350 modified: mtime,
351 mode: AnalysisMode::FileDetails,
352 })
353 });
354
355 if let Some(ref key) = cache_key
357 && let Some(cached) = self.cache.get(key)
358 {
359 return Ok(cached);
360 }
361
362 match analyze::analyze_file(¶ms.path, params.ast_recursion_limit) {
364 Ok(output) => {
365 let arc_output = std::sync::Arc::new(output);
366 if let Some(ref key) = cache_key {
367 self.cache.put(key.clone(), arc_output.clone());
368 }
369 Ok(arc_output)
370 }
371 Err(e) => Err(ErrorData::new(
372 rmcp::model::ErrorCode::INTERNAL_ERROR,
373 format!("Error analyzing file: {}", e),
374 error_meta("resource", false, "check file path and permissions"),
375 )),
376 }
377 }
378
379 #[instrument(skip(self, params, ct))]
383 async fn handle_focused_mode(
384 &self,
385 params: &AnalyzeSymbolParams,
386 ct: tokio_util::sync::CancellationToken,
387 ) -> Result<analyze::FocusedAnalysisOutput, ErrorData> {
388 let follow_depth = params.follow_depth.unwrap_or(1);
389 let counter = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
390 let counter_clone = counter.clone();
391 let path = Path::new(¶ms.path);
392 let path_owned = path.to_path_buf();
393 let max_depth = params.max_depth;
394 let symbol_owned = params.symbol.clone();
395 let match_mode = params.match_mode.clone().unwrap_or_default();
396 let ast_recursion_limit = params.ast_recursion_limit;
397 let ct_clone = ct.clone();
398
399 let use_summary_for_task = params.output_control.force != Some(true)
401 && params.output_control.summary == Some(true);
402
403 let total_files = match walk_directory(path, max_depth) {
405 Ok(entries) => entries.iter().filter(|e| !e.is_dir).count(),
406 Err(_) => 0,
407 };
408
409 let handle = tokio::task::spawn_blocking(move || {
411 analyze::analyze_focused_with_progress(
412 &path_owned,
413 &symbol_owned,
414 match_mode,
415 follow_depth,
416 max_depth,
417 ast_recursion_limit,
418 counter_clone,
419 ct_clone,
420 use_summary_for_task,
421 )
422 });
423
424 let token = ProgressToken(NumberOrString::String(
426 format!(
427 "analyze-symbol-{}",
428 std::time::SystemTime::now()
429 .duration_since(std::time::UNIX_EPOCH)
430 .map(|d| d.as_nanos())
431 .unwrap_or(0)
432 )
433 .into(),
434 ));
435 let peer = self.peer.lock().await.clone();
436 let mut last_progress = 0usize;
437 let mut cancelled = false;
438 loop {
439 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
440 if ct.is_cancelled() {
441 cancelled = true;
442 break;
443 }
444 let current = counter.load(std::sync::atomic::Ordering::Relaxed);
445 if current != last_progress && total_files > 0 {
446 self.emit_progress(
447 peer.clone(),
448 &token,
449 current as f64,
450 total_files as f64,
451 format!(
452 "Analyzing {}/{} files for symbol '{}'",
453 current, total_files, params.symbol
454 ),
455 )
456 .await;
457 last_progress = current;
458 }
459 if handle.is_finished() {
460 break;
461 }
462 }
463
464 if !cancelled && total_files > 0 {
466 self.emit_progress(
467 peer.clone(),
468 &token,
469 total_files as f64,
470 total_files as f64,
471 format!(
472 "Completed analyzing {} files for symbol '{}'",
473 total_files, params.symbol
474 ),
475 )
476 .await;
477 }
478
479 let mut output = match handle.await {
480 Ok(Ok(output)) => output,
481 Ok(Err(analyze::AnalyzeError::Cancelled)) => {
482 return Err(ErrorData::new(
483 rmcp::model::ErrorCode::INTERNAL_ERROR,
484 "Analysis cancelled".to_string(),
485 error_meta("transient", true, "analysis was cancelled"),
486 ));
487 }
488 Ok(Err(e)) => {
489 return Err(ErrorData::new(
490 rmcp::model::ErrorCode::INTERNAL_ERROR,
491 format!("Error analyzing symbol: {}", e),
492 error_meta("resource", false, "check symbol name and file"),
493 ));
494 }
495 Err(e) => {
496 return Err(ErrorData::new(
497 rmcp::model::ErrorCode::INTERNAL_ERROR,
498 format!("Task join error: {}", e),
499 error_meta("transient", true, "retry the request"),
500 ));
501 }
502 };
503
504 if params.output_control.summary.is_none()
507 && params.output_control.force != Some(true)
508 && output.formatted.len() > SIZE_LIMIT
509 {
510 let path_owned2 = Path::new(¶ms.path).to_path_buf();
511 let symbol_owned2 = params.symbol.clone();
512 let match_mode2 = params.match_mode.clone().unwrap_or_default();
513 let follow_depth2 = params.follow_depth.unwrap_or(1);
514 let max_depth2 = params.max_depth;
515 let ast_recursion_limit2 = params.ast_recursion_limit;
516 let counter2 = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
517 let ct2 = ct.clone();
518 let summary_result = tokio::task::spawn_blocking(move || {
519 analyze::analyze_focused_with_progress(
520 &path_owned2,
521 &symbol_owned2,
522 match_mode2,
523 follow_depth2,
524 max_depth2,
525 ast_recursion_limit2,
526 counter2,
527 ct2,
528 true, )
530 })
531 .await;
532 match summary_result {
533 Ok(Ok(summary_output)) => {
534 output.formatted = summary_output.formatted;
535 }
536 _ => {
537 let estimated_tokens = output.formatted.len() / 4;
539 let message = format!(
540 "Output exceeds 50K chars ({} chars, ~{} tokens). Use summary=true or force=true.",
541 output.formatted.len(),
542 estimated_tokens
543 );
544 return Err(ErrorData::new(
545 rmcp::model::ErrorCode::INVALID_PARAMS,
546 message,
547 error_meta("validation", false, "use summary=true or force=true"),
548 ));
549 }
550 }
551 } else if output.formatted.len() > SIZE_LIMIT
552 && params.output_control.force != Some(true)
553 && params.output_control.summary == Some(false)
554 {
555 let estimated_tokens = output.formatted.len() / 4;
557 let message = format!(
558 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
559 - force=true to return full output\n\
560 - summary=true to get compact summary\n\
561 - Narrow your scope (smaller directory, specific file)",
562 output.formatted.len(),
563 estimated_tokens
564 );
565 return Err(ErrorData::new(
566 rmcp::model::ErrorCode::INVALID_PARAMS,
567 message,
568 error_meta(
569 "validation",
570 false,
571 "use force=true, summary=true, or narrow scope",
572 ),
573 ));
574 }
575
576 Ok(output)
577 }
578
579 #[instrument(skip(self, context))]
580 #[tool(
581 name = "analyze_directory",
582 description = "Analyze directory structure and code metrics for multi-file overview. Use this tool for directories; use analyze_file for a single file. Returns a tree with LOC, function count, class count, and test file markers. Respects .gitignore (results may differ from raw filesystem listing because .gitignore rules are applied). For repos with 1000+ files, use max_depth=2-3 and summary=true to stay within token budgets. Note: max_depth controls what is analyzed (traversal depth), while page_size controls how results are returned (chunking); these are independent. Strategy comparison: prefer pagination (page_size=50) over force=true to reduce per-call token overhead; use summary=true when counts and structure are sufficient and no pagination is needed; force=true is an escape hatch for exceptional cases. Empty directories return an empty tree with zero counts. Output auto-summarizes at 50K chars; use summary=true to force compact output. Paginate large results with cursor and page_size. Example queries: Analyze the src/ directory to understand module structure; What files are in the tests/ directory and how large are they? summary=true and cursor are mutually exclusive; passing both returns an error.",
583 output_schema = schema_for_type::<analyze::AnalysisOutput>(),
584 annotations(
585 title = "Analyze Directory",
586 read_only_hint = true,
587 destructive_hint = false,
588 idempotent_hint = true,
589 open_world_hint = false
590 )
591 )]
592 async fn analyze_directory(
593 &self,
594 params: Parameters<AnalyzeDirectoryParams>,
595 context: RequestContext<RoleServer>,
596 ) -> Result<CallToolResult, ErrorData> {
597 let params = params.0;
598 let ct = context.ct.clone();
599 let _t_start = std::time::Instant::now();
600 let _param_path = params.path.clone();
601 let _max_depth_val = params.max_depth;
602 let _seq = self
603 .session_call_seq
604 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
605 let _sid = self.session_id.lock().await.clone();
606
607 let mut output = match self.handle_overview_mode(¶ms, ct).await {
609 Ok(v) => v,
610 Err(e) => return Ok(err_to_tool_result(e)),
611 };
612
613 if summary_cursor_conflict(
616 params.output_control.summary,
617 params.pagination.cursor.as_deref(),
618 ) {
619 return Ok(err_to_tool_result(ErrorData::new(
620 rmcp::model::ErrorCode::INVALID_PARAMS,
621 "summary=true is incompatible with a pagination cursor; use one or the other"
622 .to_string(),
623 error_meta("validation", false, "remove cursor or set summary=false"),
624 )));
625 }
626
627 let use_summary = if params.output_control.force == Some(true) {
629 false
630 } else if params.output_control.summary == Some(true) {
631 true
632 } else if params.output_control.summary == Some(false) {
633 false
634 } else {
635 output.formatted.len() > SIZE_LIMIT
636 };
637
638 if use_summary {
639 output.formatted = format_summary(
640 &output.entries,
641 &output.files,
642 params.max_depth,
643 Some(Path::new(¶ms.path)),
644 output.subtree_counts.as_deref(),
645 );
646 }
647
648 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
650 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
651 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
652 ErrorData::new(
653 rmcp::model::ErrorCode::INVALID_PARAMS,
654 e.to_string(),
655 error_meta("validation", false, "invalid cursor format"),
656 )
657 }) {
658 Ok(v) => v,
659 Err(e) => return Ok(err_to_tool_result(e)),
660 };
661 cursor_data.offset
662 } else {
663 0
664 };
665
666 let paginated =
668 match paginate_slice(&output.files, offset, page_size, PaginationMode::Default) {
669 Ok(v) => v,
670 Err(e) => {
671 return Ok(err_to_tool_result(ErrorData::new(
672 rmcp::model::ErrorCode::INTERNAL_ERROR,
673 e.to_string(),
674 error_meta("transient", true, "retry the request"),
675 )));
676 }
677 };
678
679 let verbose = params.output_control.verbose.unwrap_or(false);
680 if !use_summary {
681 output.formatted = format_structure_paginated(
682 &paginated.items,
683 paginated.total,
684 params.max_depth,
685 Some(Path::new(¶ms.path)),
686 verbose,
687 );
688 }
689
690 if use_summary {
692 output.next_cursor = None;
693 } else {
694 output.next_cursor = paginated.next_cursor.clone();
695 }
696
697 let mut final_text = output.formatted.clone();
699 if !use_summary && let Some(cursor) = paginated.next_cursor {
700 final_text.push('\n');
701 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
702 }
703
704 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
705 .with_meta(Some(no_cache_meta()));
706 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
707 result.structured_content = Some(structured);
708 let _dur = _t_start.elapsed().as_millis() as u64;
709 self.metrics_tx.send(crate::metrics::MetricEvent {
710 ts: crate::metrics::unix_ms(),
711 tool: "analyze_directory",
712 duration_ms: _dur,
713 output_chars: final_text.chars().count(),
714 param_path_depth: crate::metrics::path_component_count(&_param_path),
715 max_depth: _max_depth_val,
716 result: "ok",
717 error_type: None,
718 session_id: _sid,
719 seq: Some(_seq),
720 });
721 Ok(result)
722 }
723
724 #[instrument(skip(self, context))]
725 #[tool(
726 name = "analyze_file",
727 description = "Extract semantic structure from a single source file only; pass a directory to analyze_directory instead. Returns functions with signatures, types, and line ranges; class and method definitions with inheritance, fields, and imports. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported file extensions return an error. Common mistake: passing a directory path returns an error; use analyze_directory for directories. Generated code with deeply nested ASTs may exceed 50K chars; use summary=true to get counts only. Supports pagination for large files via cursor/page_size. Use summary=true for compact output. Example queries: What functions are defined in src/lib.rs?; Show me the classes and their methods in src/analyzer.py",
728 output_schema = schema_for_type::<analyze::FileAnalysisOutput>(),
729 annotations(
730 title = "Analyze File",
731 read_only_hint = true,
732 destructive_hint = false,
733 idempotent_hint = true,
734 open_world_hint = false
735 )
736 )]
737 async fn analyze_file(
738 &self,
739 params: Parameters<AnalyzeFileParams>,
740 context: RequestContext<RoleServer>,
741 ) -> Result<CallToolResult, ErrorData> {
742 let params = params.0;
743 let _ct = context.ct.clone();
744 let _t_start = std::time::Instant::now();
745 let _param_path = params.path.clone();
746 let _seq = self
747 .session_call_seq
748 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
749 let _sid = self.session_id.lock().await.clone();
750
751 let arc_output = match self.handle_file_details_mode(¶ms).await {
753 Ok(v) => v,
754 Err(e) => return Ok(err_to_tool_result(e)),
755 };
756
757 let mut formatted = arc_output.formatted.clone();
761 let line_count = arc_output.line_count;
762
763 let use_summary = if params.output_control.force == Some(true) {
765 false
766 } else if params.output_control.summary == Some(true) {
767 true
768 } else if params.output_control.summary == Some(false) {
769 false
770 } else {
771 formatted.len() > SIZE_LIMIT
772 };
773
774 if use_summary {
775 formatted = format_file_details_summary(&arc_output.semantic, ¶ms.path, line_count);
776 } else if formatted.len() > SIZE_LIMIT && params.output_control.force != Some(true) {
777 let estimated_tokens = formatted.len() / 4;
778 let message = format!(
779 "Output exceeds 50K chars ({} chars, ~{} tokens). Use one of:\n\
780 - force=true to return full output\n\
781 - Narrow your scope (smaller directory, specific file)\n\
782 - Use analyze_symbol mode for targeted analysis\n\
783 - Reduce max_depth parameter",
784 formatted.len(),
785 estimated_tokens
786 );
787 return Ok(err_to_tool_result(ErrorData::new(
788 rmcp::model::ErrorCode::INVALID_PARAMS,
789 message,
790 error_meta("validation", false, "use force=true or narrow scope"),
791 )));
792 }
793
794 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
796 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
797 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
798 ErrorData::new(
799 rmcp::model::ErrorCode::INVALID_PARAMS,
800 e.to_string(),
801 error_meta("validation", false, "invalid cursor format"),
802 )
803 }) {
804 Ok(v) => v,
805 Err(e) => return Ok(err_to_tool_result(e)),
806 };
807 cursor_data.offset
808 } else {
809 0
810 };
811
812 let top_level_fns: Vec<crate::types::FunctionInfo> = arc_output
814 .semantic
815 .functions
816 .iter()
817 .filter(|func| {
818 !arc_output
819 .semantic
820 .classes
821 .iter()
822 .any(|class| func.line >= class.line && func.end_line <= class.end_line)
823 })
824 .cloned()
825 .collect();
826
827 let paginated =
829 match paginate_slice(&top_level_fns, offset, page_size, PaginationMode::Default) {
830 Ok(v) => v,
831 Err(e) => {
832 return Ok(err_to_tool_result(ErrorData::new(
833 rmcp::model::ErrorCode::INTERNAL_ERROR,
834 e.to_string(),
835 error_meta("transient", true, "retry the request"),
836 )));
837 }
838 };
839
840 let verbose = params.output_control.verbose.unwrap_or(false);
842 if !use_summary {
843 formatted = format_file_details_paginated(
844 &paginated.items,
845 paginated.total,
846 &arc_output.semantic,
847 ¶ms.path,
848 line_count,
849 offset,
850 verbose,
851 );
852 if offset == 0 {
854 formatted.push_str(&crate::formatter::format_related_section(
855 std::path::Path::new(¶ms.path),
856 None,
857 ));
858 }
859 }
860
861 let next_cursor = if use_summary {
863 None
864 } else {
865 paginated.next_cursor.clone()
866 };
867
868 let mut final_text = formatted.clone();
870 if !use_summary && let Some(ref cursor) = next_cursor {
871 final_text.push('\n');
872 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
873 }
874
875 let response_output = analyze::FileAnalysisOutput {
877 formatted,
878 semantic: arc_output.semantic.clone(),
879 line_count,
880 next_cursor,
881 };
882
883 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
884 .with_meta(Some(no_cache_meta()));
885 let structured = serde_json::to_value(&response_output).unwrap_or(Value::Null);
886 result.structured_content = Some(structured);
887 let _dur = _t_start.elapsed().as_millis() as u64;
888 self.metrics_tx.send(crate::metrics::MetricEvent {
889 ts: crate::metrics::unix_ms(),
890 tool: "analyze_file",
891 duration_ms: _dur,
892 output_chars: final_text.chars().count(),
893 param_path_depth: crate::metrics::path_component_count(&_param_path),
894 max_depth: None,
895 result: "ok",
896 error_type: None,
897 session_id: _sid,
898 seq: Some(_seq),
899 });
900 Ok(result)
901 }
902
903 #[instrument(skip(self, context))]
904 #[tool(
905 name = "analyze_symbol",
906 description = "Build call graph for a named function or method across all files in a directory to trace a specific function's usage. Returns direct callers and callees. Default symbol lookup is case-sensitive exact-match (match_mode=exact); myFunc and myfunc are different symbols. If exact match fails, retry with match_mode=insensitive for a case-insensitive search. To list candidates matching a prefix, use match_mode=prefix. To find symbols containing a substring, use match_mode=contains. When prefix or contains matches multiple symbols, an error is returned listing all candidates so you can refine to a single match. A symbol unknown to the graph (not defined and not referenced) returns an error; a symbol that is defined but has no callers or callees returns empty chains without error. follow_depth warning: each increment can multiply output size exponentially; use follow_depth=1 for production use; follow_depth=2+ only for targeted deep dives. Use cursor/page_size to paginate call chains when results exceed page_size. Example queries: Find all callers of the parse_config function; Trace the call chain for MyClass.process_request up to 2 levels deep",
907 output_schema = schema_for_type::<analyze::FocusedAnalysisOutput>(),
908 annotations(
909 title = "Analyze Symbol",
910 read_only_hint = true,
911 destructive_hint = false,
912 idempotent_hint = true,
913 open_world_hint = false
914 )
915 )]
916 async fn analyze_symbol(
917 &self,
918 params: Parameters<AnalyzeSymbolParams>,
919 context: RequestContext<RoleServer>,
920 ) -> Result<CallToolResult, ErrorData> {
921 let params = params.0;
922 let ct = context.ct.clone();
923 let _t_start = std::time::Instant::now();
924 let _param_path = params.path.clone();
925 let _max_depth_val = params.follow_depth;
926 let _seq = self
927 .session_call_seq
928 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
929 let _sid = self.session_id.lock().await.clone();
930
931 let mut output = match self.handle_focused_mode(¶ms, ct).await {
933 Ok(v) => v,
934 Err(e) => return Ok(err_to_tool_result(e)),
935 };
936
937 let page_size = params.pagination.page_size.unwrap_or(DEFAULT_PAGE_SIZE);
939 let offset = if let Some(ref cursor_str) = params.pagination.cursor {
940 let cursor_data = match decode_cursor(cursor_str).map_err(|e| {
941 ErrorData::new(
942 rmcp::model::ErrorCode::INVALID_PARAMS,
943 e.to_string(),
944 error_meta("validation", false, "invalid cursor format"),
945 )
946 }) {
947 Ok(v) => v,
948 Err(e) => return Ok(err_to_tool_result(e)),
949 };
950 cursor_data.offset
951 } else {
952 0
953 };
954
955 let cursor_mode = if let Some(ref cursor_str) = params.pagination.cursor {
957 decode_cursor(cursor_str)
958 .map(|c| c.mode)
959 .unwrap_or(PaginationMode::Callers)
960 } else {
961 PaginationMode::Callers
962 };
963
964 let paginated_next_cursor = match cursor_mode {
965 PaginationMode::Callers => {
966 let (paginated_items, paginated_next) = match paginate_focus_chains(
967 &output.prod_chains,
968 PaginationMode::Callers,
969 offset,
970 page_size,
971 ) {
972 Ok(v) => v,
973 Err(e) => return Ok(err_to_tool_result(e)),
974 };
975
976 let verbose = params.output_control.verbose.unwrap_or(false);
977 if paginated_next.is_some() || offset > 0 || !verbose {
978 let base_path = Path::new(¶ms.path);
979 output.formatted = format_focused_paginated(
980 &paginated_items,
981 output.prod_chains.len(),
982 PaginationMode::Callers,
983 ¶ms.symbol,
984 &output.prod_chains,
985 &output.test_chains,
986 &output.outgoing_chains,
987 output.def_count,
988 offset,
989 Some(base_path),
990 verbose,
991 );
992 paginated_next
993 } else {
994 None
995 }
996 }
997 PaginationMode::Callees => {
998 let (paginated_items, paginated_next) = match paginate_focus_chains(
999 &output.outgoing_chains,
1000 PaginationMode::Callees,
1001 offset,
1002 page_size,
1003 ) {
1004 Ok(v) => v,
1005 Err(e) => return Ok(err_to_tool_result(e)),
1006 };
1007
1008 let verbose = params.output_control.verbose.unwrap_or(false);
1009 if paginated_next.is_some() || offset > 0 || !verbose {
1010 let base_path = Path::new(¶ms.path);
1011 output.formatted = format_focused_paginated(
1012 &paginated_items,
1013 output.outgoing_chains.len(),
1014 PaginationMode::Callees,
1015 ¶ms.symbol,
1016 &output.prod_chains,
1017 &output.test_chains,
1018 &output.outgoing_chains,
1019 output.def_count,
1020 offset,
1021 Some(base_path),
1022 verbose,
1023 );
1024 paginated_next
1025 } else {
1026 None
1027 }
1028 }
1029 PaginationMode::Default => {
1030 unreachable!("SymbolFocus should only use Callers or Callees modes")
1031 }
1032 };
1033
1034 let mut final_text = output.formatted.clone();
1036 if let Some(cursor) = paginated_next_cursor {
1037 final_text.push('\n');
1038 final_text.push_str(&format!("NEXT_CURSOR: {}", cursor));
1039 }
1040
1041 let mut result = CallToolResult::success(vec![Content::text(final_text.clone())])
1042 .with_meta(Some(no_cache_meta()));
1043 let structured = serde_json::to_value(&output).unwrap_or(Value::Null);
1044 result.structured_content = Some(structured);
1045 let _dur = _t_start.elapsed().as_millis() as u64;
1046 self.metrics_tx.send(crate::metrics::MetricEvent {
1047 ts: crate::metrics::unix_ms(),
1048 tool: "analyze_symbol",
1049 duration_ms: _dur,
1050 output_chars: final_text.chars().count(),
1051 param_path_depth: crate::metrics::path_component_count(&_param_path),
1052 max_depth: _max_depth_val,
1053 result: "ok",
1054 error_type: None,
1055 session_id: _sid,
1056 seq: Some(_seq),
1057 });
1058 Ok(result)
1059 }
1060
1061 #[instrument(skip(self))]
1062 #[tool(
1063 name = "analyze_module",
1064 description = "Index functions and imports in a single source file with minimal token cost. Returns name, line_count, language, function names with line numbers, and import list only -- no signatures, no types, no call graphs, no references. ~75% smaller output than analyze_file. Use analyze_file when you need function signatures, types, or class details; use analyze_module when you only need a function/import index to orient in a file or survey many files in sequence. Use analyze_directory for multi-file overviews; use analyze_symbol to trace call graphs for a specific function. Supported languages: Rust, Go, Java, Python, TypeScript, TSX; unsupported extensions return an error. Example queries: What functions are defined in src/analyze.rs?; List all imports in src/lib.rs. Pagination, summary, force, and verbose parameters are not supported by this tool.",
1065 output_schema = schema_for_type::<types::ModuleInfo>(),
1066 annotations(
1067 title = "Analyze Module",
1068 read_only_hint = true,
1069 destructive_hint = false,
1070 idempotent_hint = true,
1071 open_world_hint = false
1072 )
1073 )]
1074 async fn analyze_module(
1075 &self,
1076 params: Parameters<AnalyzeModuleParams>,
1077 _context: RequestContext<RoleServer>,
1078 ) -> Result<CallToolResult, ErrorData> {
1079 let params = params.0;
1080 let _t_start = std::time::Instant::now();
1081 let _param_path = params.path.clone();
1082 let _seq = self
1083 .session_call_seq
1084 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
1085 let _sid = self.session_id.lock().await.clone();
1086
1087 if std::fs::metadata(¶ms.path)
1089 .map(|m| m.is_dir())
1090 .unwrap_or(false)
1091 {
1092 let _dur = _t_start.elapsed().as_millis() as u64;
1093 self.metrics_tx.send(crate::metrics::MetricEvent {
1094 ts: crate::metrics::unix_ms(),
1095 tool: "analyze_module",
1096 duration_ms: _dur,
1097 output_chars: 0,
1098 param_path_depth: crate::metrics::path_component_count(&_param_path),
1099 max_depth: None,
1100 result: "error",
1101 error_type: Some("invalid_params".to_string()),
1102 session_id: _sid.clone(),
1103 seq: Some(_seq),
1104 });
1105 return Ok(err_to_tool_result(ErrorData::new(
1106 rmcp::model::ErrorCode::INVALID_PARAMS,
1107 format!(
1108 "'{}' is a directory. Use analyze_directory to analyze a directory, or pass a specific file path to analyze_module.",
1109 params.path
1110 ),
1111 error_meta("validation", false, "use analyze_directory for directories"),
1112 )));
1113 }
1114
1115 let module_info = match analyze::analyze_module_file(¶ms.path).map_err(|e| {
1116 ErrorData::new(
1117 rmcp::model::ErrorCode::INVALID_PARAMS,
1118 format!("Failed to analyze module: {}", e),
1119 error_meta(
1120 "validation",
1121 false,
1122 "ensure file exists, is readable, and has a supported extension",
1123 ),
1124 )
1125 }) {
1126 Ok(v) => v,
1127 Err(e) => return Ok(err_to_tool_result(e)),
1128 };
1129
1130 let text = format_module_info(&module_info);
1131 let mut result = CallToolResult::success(vec![Content::text(text.clone())])
1132 .with_meta(Some(no_cache_meta()));
1133 let structured = match serde_json::to_value(&module_info).map_err(|e| {
1134 ErrorData::new(
1135 rmcp::model::ErrorCode::INTERNAL_ERROR,
1136 format!("serialization failed: {}", e),
1137 error_meta("internal", false, "report this as a bug"),
1138 )
1139 }) {
1140 Ok(v) => v,
1141 Err(e) => return Ok(err_to_tool_result(e)),
1142 };
1143 result.structured_content = Some(structured);
1144 let _dur = _t_start.elapsed().as_millis() as u64;
1145 self.metrics_tx.send(crate::metrics::MetricEvent {
1146 ts: crate::metrics::unix_ms(),
1147 tool: "analyze_module",
1148 duration_ms: _dur,
1149 output_chars: text.chars().count(),
1150 param_path_depth: crate::metrics::path_component_count(&_param_path),
1151 max_depth: None,
1152 result: "ok",
1153 error_type: None,
1154 session_id: _sid,
1155 seq: Some(_seq),
1156 });
1157 Ok(result)
1158 }
1159}
1160
1161#[tool_handler]
1162impl ServerHandler for CodeAnalyzer {
1163 fn get_info(&self) -> InitializeResult {
1164 let excluded = crate::EXCLUDED_DIRS.join(", ");
1165 let instructions = format!(
1166 "Recommended workflow for unknown repositories:\n\
1167 1. Start with analyze_directory(path=<repo_root>, max_depth=2, summary=true) to identify the source package directory \
1168 (typically the largest directory by file count; exclude {excluded}).\n\
1169 2. Re-run analyze_directory(path=<source_package>, max_depth=2, summary=true) for a module map with per-package class and function counts.\n\
1170 3. Use analyze_file on key files identified in step 2 (prefer files with high class counts for framework entry points).\n\
1171 4. Use analyze_symbol to trace call graphs for specific functions found in step 3.\n\
1172 Use analyze_module for a minimal schema (name, line count, functions, imports) when token budget is critical. \
1173 Prefer summary=true on large directories (1000+ files). Set max_depth=2 for the first call; increase only if packages are too large to differentiate. \
1174 Paginate with cursor/page_size. For subagents: DISABLE_PROMPT_CACHING=1."
1175 );
1176 let capabilities = ServerCapabilities::builder()
1177 .enable_logging()
1178 .enable_tools()
1179 .enable_tool_list_changed()
1180 .enable_completions()
1181 .build();
1182 let server_info = Implementation::new("code-analyze-mcp", env!("CARGO_PKG_VERSION"))
1183 .with_title("Code Analyze MCP")
1184 .with_description("MCP server for code structure analysis using tree-sitter");
1185 InitializeResult::new(capabilities)
1186 .with_server_info(server_info)
1187 .with_instructions(&instructions)
1188 }
1189
1190 async fn on_initialized(&self, context: NotificationContext<RoleServer>) {
1191 let mut peer_lock = self.peer.lock().await;
1192 *peer_lock = Some(context.peer.clone());
1193 drop(peer_lock);
1194
1195 let millis = std::time::SystemTime::now()
1197 .duration_since(std::time::UNIX_EPOCH)
1198 .unwrap_or_default()
1199 .as_millis() as u64;
1200 let counter = GLOBAL_SESSION_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
1201 let sid = format!("{}-{}", millis, counter);
1202 {
1203 let mut session_id_lock = self.session_id.lock().await;
1204 *session_id_lock = Some(sid);
1205 }
1206 self.session_call_seq
1207 .store(0, std::sync::atomic::Ordering::Relaxed);
1208
1209 let peer = self.peer.clone();
1211 let event_rx = self.event_rx.clone();
1212
1213 tokio::spawn(async move {
1214 let rx = {
1215 let mut rx_lock = event_rx.lock().await;
1216 rx_lock.take()
1217 };
1218
1219 if let Some(mut receiver) = rx {
1220 let mut buffer = Vec::with_capacity(64);
1221 loop {
1222 receiver.recv_many(&mut buffer, 64).await;
1224
1225 if buffer.is_empty() {
1226 break;
1228 }
1229
1230 let peer_lock = peer.lock().await;
1232 if let Some(peer) = peer_lock.as_ref() {
1233 for log_event in buffer.drain(..) {
1234 let notification = ServerNotification::LoggingMessageNotification(
1235 Notification::new(LoggingMessageNotificationParam {
1236 level: log_event.level,
1237 logger: Some(log_event.logger),
1238 data: log_event.data,
1239 }),
1240 );
1241 if let Err(e) = peer.send_notification(notification).await {
1242 warn!("Failed to send logging notification: {}", e);
1243 }
1244 }
1245 }
1246 }
1247 }
1248 });
1249 }
1250
1251 #[instrument(skip(self, _context))]
1252 async fn on_cancelled(
1253 &self,
1254 notification: CancelledNotificationParam,
1255 _context: NotificationContext<RoleServer>,
1256 ) {
1257 tracing::info!(
1258 request_id = ?notification.request_id,
1259 reason = ?notification.reason,
1260 "Received cancellation notification"
1261 );
1262 }
1263
1264 #[instrument(skip(self, _context))]
1265 async fn complete(
1266 &self,
1267 request: CompleteRequestParams,
1268 _context: RequestContext<RoleServer>,
1269 ) -> Result<CompleteResult, ErrorData> {
1270 let argument_name = &request.argument.name;
1272 let argument_value = &request.argument.value;
1273
1274 let completions = match argument_name.as_str() {
1275 "path" => {
1276 let root = Path::new(".");
1278 completion::path_completions(root, argument_value)
1279 }
1280 "symbol" => {
1281 let path_arg = request
1283 .context
1284 .as_ref()
1285 .and_then(|ctx| ctx.get_argument("path"));
1286
1287 match path_arg {
1288 Some(path_str) => {
1289 let path = Path::new(path_str);
1290 completion::symbol_completions(&self.cache, path, argument_value)
1291 }
1292 None => Vec::new(),
1293 }
1294 }
1295 _ => Vec::new(),
1296 };
1297
1298 let total_count = completions.len() as u32;
1300 let (values, has_more) = if completions.len() > 100 {
1301 (completions.into_iter().take(100).collect(), true)
1302 } else {
1303 (completions, false)
1304 };
1305
1306 let completion_info =
1307 match CompletionInfo::with_pagination(values, Some(total_count), has_more) {
1308 Ok(info) => info,
1309 Err(_) => {
1310 CompletionInfo::with_all_values(Vec::new())
1312 .unwrap_or_else(|_| CompletionInfo::new(Vec::new()).unwrap())
1313 }
1314 };
1315
1316 Ok(CompleteResult::new(completion_info))
1317 }
1318
1319 async fn set_level(
1320 &self,
1321 params: SetLevelRequestParams,
1322 _context: RequestContext<RoleServer>,
1323 ) -> Result<(), ErrorData> {
1324 let level_filter = match params.level {
1325 LoggingLevel::Debug => LevelFilter::DEBUG,
1326 LoggingLevel::Info => LevelFilter::INFO,
1327 LoggingLevel::Notice => LevelFilter::INFO,
1328 LoggingLevel::Warning => LevelFilter::WARN,
1329 LoggingLevel::Error => LevelFilter::ERROR,
1330 LoggingLevel::Critical => LevelFilter::ERROR,
1331 LoggingLevel::Alert => LevelFilter::ERROR,
1332 LoggingLevel::Emergency => LevelFilter::ERROR,
1333 };
1334
1335 let mut filter_lock = self.log_level_filter.lock().unwrap();
1336 *filter_lock = level_filter;
1337 Ok(())
1338 }
1339}
1340
1341#[cfg(test)]
1342mod tests {
1343 use super::*;
1344
1345 #[tokio::test]
1346 async fn test_emit_progress_none_peer_is_noop() {
1347 let peer = Arc::new(TokioMutex::new(None));
1348 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1349 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1350 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1351 let analyzer = CodeAnalyzer::new(
1352 peer,
1353 log_level_filter,
1354 rx,
1355 crate::metrics::MetricsSender(metrics_tx),
1356 );
1357 let token = ProgressToken(NumberOrString::String("test".into()));
1358 analyzer
1360 .emit_progress(None, &token, 0.0, 10.0, "test".to_string())
1361 .await;
1362 }
1363
1364 #[tokio::test]
1365 async fn test_handle_overview_mode_verbose_no_summary_block() {
1366 use crate::pagination::{PaginationMode, paginate_slice};
1367 use crate::types::{AnalyzeDirectoryParams, OutputControlParams, PaginationParams};
1368 use tempfile::TempDir;
1369
1370 let tmp = TempDir::new().unwrap();
1371 std::fs::write(tmp.path().join("main.rs"), "fn main() {}").unwrap();
1372
1373 let peer = Arc::new(TokioMutex::new(None));
1374 let log_level_filter = Arc::new(Mutex::new(LevelFilter::INFO));
1375 let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
1376 let (metrics_tx, _metrics_rx) = tokio::sync::mpsc::unbounded_channel();
1377 let analyzer = CodeAnalyzer::new(
1378 peer,
1379 log_level_filter,
1380 rx,
1381 crate::metrics::MetricsSender(metrics_tx),
1382 );
1383
1384 let params = AnalyzeDirectoryParams {
1385 path: tmp.path().to_str().unwrap().to_string(),
1386 max_depth: None,
1387 pagination: PaginationParams {
1388 cursor: None,
1389 page_size: None,
1390 },
1391 output_control: OutputControlParams {
1392 summary: None,
1393 force: None,
1394 verbose: Some(true),
1395 },
1396 };
1397
1398 let ct = tokio_util::sync::CancellationToken::new();
1399 let output = analyzer.handle_overview_mode(¶ms, ct).await.unwrap();
1400
1401 let use_summary = output.formatted.len() > SIZE_LIMIT; let paginated =
1404 paginate_slice(&output.files, 0, DEFAULT_PAGE_SIZE, PaginationMode::Default).unwrap();
1405 let verbose = true;
1406 let formatted = if !use_summary {
1407 format_structure_paginated(
1408 &paginated.items,
1409 paginated.total,
1410 params.max_depth,
1411 Some(std::path::Path::new(¶ms.path)),
1412 verbose,
1413 )
1414 } else {
1415 output.formatted.clone()
1416 };
1417
1418 assert!(
1420 !formatted.contains("SUMMARY:"),
1421 "verbose=true must not emit SUMMARY: block; got: {}",
1422 &formatted[..formatted.len().min(300)]
1423 );
1424 assert!(
1425 formatted.contains("PAGINATED:"),
1426 "verbose=true must emit PAGINATED: header"
1427 );
1428 assert!(
1429 formatted.contains("FILES [LOC, FUNCTIONS, CLASSES]"),
1430 "verbose=true must emit FILES section header"
1431 );
1432 }
1433}