1use crate::project_settings::ProjectSettings;
18use crate::tools::warm_guard::WarmGuard;
19use async_trait::async_trait;
20use limit_agent::AgentError;
21use limit_agent::Tool;
22use limit_tldr::{Config as TldrConfig, Language, TLDR};
23use serde::{Deserialize, Serialize};
24use serde_json::{json, Value};
25use std::path::{Path, PathBuf};
26use std::sync::atomic::{AtomicBool, Ordering};
27use std::sync::Arc;
28use tokio::sync::{Notify, OnceCell};
29use tracing::{info, trace, warn};
30
31#[derive(Debug, Clone, Serialize, Deserialize)]
33#[serde(rename_all = "snake_case")]
34pub enum AnalysisType {
35 Context,
37 Source,
39 Impact,
41 Cfg,
43 Dfg,
45 DeadCode,
47 Architecture,
49 Search,
51}
52
53#[derive(Debug, Clone, Serialize, Deserialize)]
55pub struct TldrParams {
56 pub analysis_type: AnalysisType,
58
59 pub function: Option<String>,
61
62 pub file: Option<String>,
64
65 #[serde(default = "default_depth")]
67 pub depth: usize,
68
69 #[serde(default = "default_entries")]
71 pub entries: Vec<String>,
72
73 pub query: Option<String>,
75
76 #[serde(default = "default_limit")]
78 pub limit: usize,
79
80 pub project_path: Option<String>,
82}
83
84fn default_depth() -> usize {
85 2
86}
87fn default_entries() -> Vec<String> {
88 vec!["main".to_string()]
89}
90fn default_limit() -> usize {
91 10
92}
93
94pub struct TldrTool {
96 cache: Arc<OnceCell<(PathBuf, Arc<TLDR>)>>,
98 default_project: PathBuf,
100 warm_notify: Arc<Notify>,
102 warm_started: Arc<AtomicBool>,
104}
105
106impl TldrTool {
107 pub fn new() -> Self {
109 let default_project = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
110 Self {
111 cache: Arc::new(OnceCell::new()),
112 default_project,
113 warm_notify: Arc::new(Notify::new()),
114 warm_started: Arc::new(AtomicBool::new(false)),
115 }
116 }
117
118 pub fn with_project<P: Into<PathBuf>>(project: P) -> Self {
120 Self {
121 cache: Arc::new(OnceCell::new()),
122 default_project: project.into(),
123 warm_notify: Arc::new(Notify::new()),
124 warm_started: Arc::new(AtomicBool::new(false)),
125 }
126 }
127
128 fn ensure_pre_warm_started(&self) {
130 if self
131 .warm_started
132 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
133 .is_ok()
134 {
135 let project = self.default_project.clone();
136 let cache = Arc::clone(&self.cache);
137 let notify = Arc::clone(&self.warm_notify);
138
139 tokio::spawn(async move {
140 Self::pre_warm(project, cache, notify).await;
141 });
142 }
143 }
144
145 async fn pre_warm(
147 project_path: PathBuf,
148 cache: Arc<OnceCell<(PathBuf, Arc<TLDR>)>>,
149 notify: Arc<Notify>,
150 ) {
151 let cache_dir = match Self::get_cache_dir(&project_path) {
152 Ok(dir) => dir,
153 Err(e) => {
154 warn!("pre_warm: failed to get cache dir: {}", e);
155 notify.notify_waiters();
156 return;
157 }
158 };
159
160 let guard = WarmGuard::new(&cache_dir);
161 if guard.is_fresh(&project_path) {
162 info!("pre_warm: cache files fresh, loading without re-warming");
163 let config = TldrConfig {
167 language: Language::Auto,
168 max_depth: 3,
169 cache_dir: Some(cache_dir),
170 };
171 match TLDR::new(&project_path, config).await {
172 Ok(mut tldr) => match tldr.warm().await {
173 Ok(()) => {
174 let _ = cache.set((project_path, Arc::new(tldr))).map_err(|_| {
175 trace!("pre_warm: OnceCell already set (race with get_tldr)");
176 });
177 info!("pre_warm: warm from cache complete");
178 }
179 Err(e) => warn!("pre_warm: warm from cache failed: {}", e),
180 },
181 Err(e) => warn!("pre_warm: TLDR::new failed: {}", e),
182 }
183 notify.notify_waiters();
184 return;
185 }
186
187 info!("pre_warm: warming TLDR for {:?}", project_path);
188 let config = TldrConfig {
189 language: Language::Auto,
190 max_depth: 3,
191 cache_dir: Some(cache_dir),
192 };
193
194 match TLDR::new(&project_path, config).await {
195 Ok(mut tldr) => match tldr.warm().await {
196 Ok(()) => {
197 guard.save(&project_path);
198 info!("pre_warm: warm complete");
199 let _ = cache.set((project_path, Arc::new(tldr))).map_err(|_| {
200 trace!("pre_warm: OnceCell already set (race with get_tldr)");
201 });
202 notify.notify_waiters();
203 }
204 Err(e) => warn!("pre_warm: warm failed: {}", e),
205 },
206 Err(e) => warn!("pre_warm: TLDR::new failed: {}", e),
207 }
208 notify.notify_waiters();
209 }
210
211 async fn get_tldr(&self, project_path: &Path) -> Result<Arc<TLDR>, AgentError> {
216 let project_path = project_path.to_path_buf();
217 let project_path_for_check = project_path.clone();
218
219 self.ensure_pre_warm_started();
221
222 if let Some((cached_path, tldr)) = self.cache.get() {
224 if *cached_path == project_path_for_check {
225 trace!("TLDR cache hit for project: {:?}", project_path_for_check);
226 return Ok(Arc::clone(tldr));
227 }
228 warn!(
229 "get_tldr: ignoring project_path {:?}, using cached {:?}",
230 project_path_for_check, cached_path
231 );
232 return Ok(Arc::clone(tldr));
233 }
234
235 info!("get_tldr: waiting for pre_warm...");
237 tokio::select! {
238 _ = self.warm_notify.notified() => {
239 if let Some((cached_path, tldr)) = self.cache.get() {
241 if *cached_path == project_path_for_check {
242 trace!("TLDR cache hit after pre_warm for: {:?}", project_path_for_check);
243 return Ok(Arc::clone(tldr));
244 }
245 warn!(
246 "get_tldr: ignoring project_path {:?}, using cached {:?}",
247 project_path_for_check, cached_path
248 );
249 return Ok(Arc::clone(tldr));
250 }
251 warn!("get_tldr: pre_warm did not populate cache, falling back to lazy");
253 }
254 _ = tokio::time::sleep(std::time::Duration::from_secs(30)) => {
255 warn!("get_tldr: pre_warm timed out after 30s");
256 if let Some((cached_path, tldr)) = self.cache.get() {
258 if *cached_path == project_path_for_check {
259 info!("get_tldr: pre_warm completed during timeout, using cached result");
260 return Ok(Arc::clone(tldr));
261 }
262 warn!(
263 "get_tldr: ignoring project_path {:?}, using cached {:?}",
264 project_path_for_check, cached_path
265 );
266 return Ok(Arc::clone(tldr));
267 }
268 info!("get_tldr: falling back to lazy creation");
269 }
270 }
271
272 let cache = Arc::clone(&self.cache);
274 let result: Result<&(PathBuf, Arc<TLDR>), AgentError> = cache
275 .get_or_try_init(|| async {
276 info!(
277 "Lazy creating TLDR instance for project: {:?}",
278 project_path
279 );
280 let config = TldrConfig {
281 language: Language::Auto,
282 max_depth: 3,
283 cache_dir: Some(Self::get_cache_dir(&project_path)?),
284 };
285
286 let mut tldr = TLDR::new(&project_path, config)
287 .await
288 .map_err(|e| AgentError::ToolError(format!("Failed to create TLDR: {}", e)))?;
289
290 info!("Warming TLDR indexes...");
291 tldr.warm()
292 .await
293 .map_err(|e| AgentError::ToolError(format!("Failed to warm TLDR: {}", e)))?;
294
295 Ok((project_path, Arc::new(tldr)))
296 })
297 .await;
298
299 let (_cached_path, tldr) = result?;
300 trace!(
301 "TLDR cache hit (lazy) for project: {:?}",
302 project_path_for_check
303 );
304 Ok(Arc::clone(tldr))
305 }
306
307 fn get_cache_dir(project_path: &Path) -> Result<PathBuf, AgentError> {
309 let home = dirs::home_dir()
310 .ok_or_else(|| AgentError::ToolError("Cannot find home directory".into()))?;
311
312 let project_id = project_path
314 .canonicalize()
315 .map_err(|e| AgentError::ToolError(format!("Cannot canonicalize path: {}", e)))?
316 .to_string_lossy()
317 .to_string();
318
319 use std::collections::hash_map::DefaultHasher;
321 use std::hash::{Hash, Hasher};
322 let mut hasher = DefaultHasher::new();
323 project_id.hash(&mut hasher);
324 let hash = format!("{:x}", hasher.finish());
325
326 Ok(home
327 .join(".limit")
328 .join("projects")
329 .join(&hash)
330 .join("tldr"))
331 }
332
333 async fn build_source_result(
335 &self,
336 function: &str,
337 source_file: PathBuf,
338 start_line: usize,
339 end_line: usize,
340 project_path: &Path,
341 ) -> Result<Value, AgentError> {
342 let relative_file = source_file
343 .strip_prefix(project_path)
344 .unwrap_or(&source_file)
345 .to_path_buf();
346
347 let file_path = project_path.join(&source_file);
348 let source = tokio::fs::read_to_string(&file_path)
349 .await
350 .map_err(|e| AgentError::ToolError(format!("Failed to read file: {}", e)))?;
351
352 let lines: Vec<&str> = source.lines().collect();
353 let start = start_line.saturating_sub(1);
354 let end = end_line.min(lines.len());
355 let max_lines = 80;
356 let truncated = (end - start) > max_lines;
357 let actual_end = if truncated { start + max_lines } else { end };
358
359 let function_source = lines[start..actual_end].join("\n");
360
361 let mut result = json!({
362 "type": "source",
363 "function": function,
364 "file": relative_file.display().to_string(),
365 "line": start_line,
366 "end_line": actual_end,
367 "source": function_source
368 });
369 if truncated {
370 result["truncated"] = json!(true);
371 result["total_lines"] = json!(end - start);
372 }
373
374 Ok(result)
375 }
376
377 async fn analyze(&self, params: TldrParams) -> Result<Value, AgentError> {
379 let project_path = params
380 .project_path
381 .map(PathBuf::from)
382 .unwrap_or_else(|| self.default_project.clone());
383
384 let tldr = self.get_tldr(&project_path).await?;
385
386 let result = match params.analysis_type {
387 AnalysisType::Context => {
388 let function = params.function.ok_or_else(|| {
389 AgentError::ToolError("function parameter required for context analysis".into())
390 })?;
391
392 let context = tldr
393 .get_context(&function, params.depth)
394 .await
395 .map_err(|e| {
396 AgentError::ToolError(format!("Context analysis failed: {}", e))
397 })?;
398
399 Ok(json!({
400 "type": "context",
401 "function": function,
402 "depth": params.depth,
403 "context": context
404 }))
405 }
406
407 AnalysisType::Source => {
408 let function = params.function.ok_or_else(|| {
409 AgentError::ToolError("function parameter required for source analysis".into())
410 })?;
411
412 let (function, file_override) = if !function.starts_with("struct ") {
415 if let Some(pos) = function.find("::") {
416 let class_name = &function[..pos];
417 let method_name = &function[pos + 2..];
418 if !method_name.is_empty() {
419 let class_info = if let Some(ref file) = params.file {
420 let file_path = project_path.join(file);
421 tldr.find_class_in(class_name, &file_path).unwrap_or(None)
422 } else {
423 tldr.find_class(class_name).unwrap_or(None)
424 };
425 if let Some(info) = class_info {
426 let resolved_file = info
427 .file
428 .strip_prefix(&project_path)
429 .unwrap_or(&info.file)
430 .to_string_lossy()
431 .to_string();
432 (method_name.to_string(), Some(resolved_file))
433 } else {
434 (function, None)
435 }
436 } else {
437 (function, None)
438 }
439 } else {
440 (function, None)
441 }
442 } else {
443 (function, None)
444 };
445 let effective_file = file_override.or(params.file.clone());
447
448 let is_struct = function.starts_with("struct ");
449 let lookup_name = if is_struct {
450 function.strip_prefix("struct ").unwrap()
451 } else {
452 &function
453 };
454
455 let (source_file, start_line, end_line) = if is_struct {
456 let class_info = if let Some(ref file) = effective_file {
458 let file_path = project_path.join(file);
459 tldr.find_class_in(lookup_name, &file_path)
460 .map_err(|e| {
461 AgentError::ToolError(format!("Source analysis failed: {}", e))
462 })?
463 .ok_or_else(|| {
464 AgentError::ToolError(format!(
465 "Struct '{}' not found in '{}'",
466 lookup_name, file
467 ))
468 })?
469 } else {
470 tldr.find_class(lookup_name)
471 .map_err(|e| {
472 AgentError::ToolError(format!("Source analysis failed: {}", e))
473 })?
474 .ok_or_else(|| {
475 AgentError::ToolError(format!("Struct not found: {}", lookup_name))
476 })?
477 };
478 (class_info.file, class_info.line, class_info.end_line)
479 } else {
480 let func_info = if let Some(ref file) = effective_file {
482 let file_path = project_path.join(file);
483 if let Some(func) =
485 tldr.find_function_in(&function, &file_path).map_err(|e| {
486 AgentError::ToolError(format!("Source analysis failed: {}", e))
487 })?
488 {
489 func
490 } else if let Some(cls) =
491 tldr.find_class_in(&function, &file_path).map_err(|e| {
492 AgentError::ToolError(format!("Source analysis failed: {}", e))
493 })?
494 {
495 return self
497 .build_source_result(
498 &function,
499 cls.file,
500 cls.line,
501 cls.end_line,
502 &project_path,
503 )
504 .await;
505 } else {
506 return Err(AgentError::ToolError(format!(
507 "Function or struct '{}' not found in '{}'",
508 function, file
509 )));
510 }
511 } else {
512 let all_matches = tldr.find_all_functions(&function);
514 if all_matches.len() > 1 {
515 let match_list: Vec<String> = all_matches
516 .iter()
517 .take(5)
518 .map(|f| {
519 let relative =
520 f.file.strip_prefix(&project_path).unwrap_or(&f.file);
521 format!("{} ({}:{})", f.name, relative.display(), f.line)
522 })
523 .collect();
524 return Ok(json!({
525 "type": "disambiguation_needed",
526 "function": function,
527 "match_count": all_matches.len(),
528 "matches": match_list,
529 "hint": format!(
530 "Use file parameter to disambiguate, e.g.: {{\"analysis_type\": \"source\", \"function\": \"{}\", \"file\": \"path/to/file.rs\"}}",
531 function
532 )
533 }));
534 }
535 tldr.find_function(&function)
536 .await
537 .map_err(|e| {
538 AgentError::ToolError(format!("Source analysis failed: {}", e))
539 })?
540 .ok_or_else(|| {
541 AgentError::ToolError(format!("Function not found: {}", function))
542 })?
543 };
544 (func_info.file, func_info.line, func_info.end_line)
545 };
546
547 self.build_source_result(
548 &function,
549 source_file,
550 start_line,
551 end_line,
552 &project_path,
553 )
554 .await
555 }
556
557 AnalysisType::Impact => {
558 let function = params.function.ok_or_else(|| {
559 AgentError::ToolError("function parameter required for impact analysis".into())
560 })?;
561
562 let callers = tldr
563 .get_impact(&function)
564 .map_err(|e| AgentError::ToolError(format!("Impact analysis failed: {}", e)))?;
565
566 Ok(json!({
567 "type": "impact",
568 "function": function,
569 "callers": callers.iter().map(|c| json!({
570 "function": c.function,
571 "file": c.file.display().to_string(),
572 "line": c.line
573 })).collect::<Vec<_>>(),
574 "caller_count": callers.len()
575 }))
576 }
577
578 AnalysisType::Cfg => {
579 let file = params.file.ok_or_else(|| {
580 AgentError::ToolError("file parameter required for CFG analysis".into())
581 })?;
582 let function = params.function.ok_or_else(|| {
583 AgentError::ToolError("function parameter required for CFG analysis".into())
584 })?;
585
586 let file_path = project_path.join(&file);
587 let cfg = tldr
588 .get_cfg(&file_path, &function)
589 .map_err(|e| AgentError::ToolError(format!("CFG analysis failed: {}", e)))?;
590
591 Ok(json!({
592 "type": "cfg",
593 "function": function,
594 "file": file,
595 "complexity": cfg.complexity,
596 "blocks": cfg.blocks.len()
597 }))
598 }
599
600 AnalysisType::Dfg => {
601 let file = params.file.ok_or_else(|| {
602 AgentError::ToolError("file parameter required for DFG analysis".into())
603 })?;
604 let function = params.function.ok_or_else(|| {
605 AgentError::ToolError("function parameter required for DFG analysis".into())
606 })?;
607
608 let file_path = project_path.join(&file);
609 let dfg = tldr
610 .get_dfg(&file_path, &function)
611 .map_err(|e| AgentError::ToolError(format!("DFG analysis failed: {}", e)))?;
612
613 Ok(json!({
614 "type": "dfg",
615 "function": function,
616 "file": file,
617 "variables": dfg.variables,
618 "flows": dfg.flows.len()
619 }))
620 }
621
622 AnalysisType::DeadCode => {
623 let entries: Vec<&str> = params.entries.iter().map(|s| s.as_str()).collect();
624 let dead = tldr.find_dead_code(&entries).map_err(|e| {
625 AgentError::ToolError(format!("Dead code analysis failed: {}", e))
626 })?;
627
628 Ok(json!({
629 "type": "dead_code",
630 "entries": params.entries,
631 "dead_functions": dead.iter().map(|f| json!({
632 "name": f.name,
633 "file": f.file.display().to_string(),
634 "line": f.line
635 })).collect::<Vec<_>>(),
636 "dead_count": dead.len()
637 }))
638 }
639
640 AnalysisType::Architecture => {
641 let arch = tldr.detect_architecture().map_err(|e| {
642 AgentError::ToolError(format!("Architecture detection failed: {}", e))
643 })?;
644
645 let entry_sample: Vec<_> = arch.entry.iter().take(10).collect();
648 let middle_sample: Vec<_> = arch.middle.iter().take(10).collect();
649 let leaf_sample: Vec<_> = arch.leaf.iter().take(10).collect();
650
651 Ok(json!({
652 "type": "architecture",
653 "summary": {
654 "entry_points_count": arch.entry.len(),
655 "middle_layer_count": arch.middle.len(),
656 "leaf_functions_count": arch.leaf.len()
657 },
658 "sample_entry_points": entry_sample,
659 "sample_middle_layer": middle_sample,
660 "sample_leaf_functions": leaf_sample,
661 "note": "Showing top 10 of each category. Use Search analysis for specific functions."
662 }))
663 }
664
665 AnalysisType::Search => {
666 let query = params
667 .query
668 .unwrap_or_else(|| params.function.clone().unwrap_or_default());
669
670 let results = tldr
671 .semantic_search(&query, params.limit)
672 .await
673 .map_err(|e| AgentError::ToolError(format!("Search failed: {}", e)))?;
674
675 Ok(json!({
676 "type": "search",
677 "query": query,
678 "results": results.iter().map(|r| {
679 let relative = r
680 .file
681 .strip_prefix(&project_path)
682 .unwrap_or(&r.file);
683 json!({
684 "function": r.function,
685 "file": relative.display().to_string(),
686 "line": r.line,
687 "score": r.score,
688 "signature": r.signature
689 })
690 }).collect::<Vec<_>>()
691 }))
692 }
693 };
694
695 trace!("Analysis complete for: {:?}", params.analysis_type);
696 result
697 }
698}
699
700impl Default for TldrTool {
701 fn default() -> Self {
702 Self::new()
703 }
704}
705
706#[async_trait]
707impl Tool for TldrTool {
708 fn name(&self) -> &str {
709 "tldr_analyze"
710 }
711
712 async fn execute(&self, args: Value) -> Result<Value, AgentError> {
713 let params: TldrParams = serde_json::from_value(args)
714 .map_err(|e| AgentError::ToolError(format!("Invalid parameters: {}", e)))?;
715
716 let project_path = params
717 .project_path
718 .as_ref()
719 .map(PathBuf::from)
720 .unwrap_or_else(|| self.default_project.clone());
721
722 let settings = ProjectSettings::new().map_err(|e| {
723 AgentError::ToolError(format!("Failed to check project settings: {}", e))
724 })?;
725
726 if !settings.is_warm_enabled(&project_path) {
727 info!("tldr_analyze: warm not enabled for project, requesting permission");
728 return Ok(json!({
729 "type": "warm_permission_required",
730 "message": "Code analysis (TLDR) is not enabled for this project.",
731 "instruction": "Ask the user if they want to enable code analysis. If yes, tell them to run: /tldr",
732 "benefit": "Enabling allows fast code search, context analysis, and impact tracking with 95% token savings."
733 }));
734 }
735
736 info!("tldr_analyze invoked: type={:?}", params.analysis_type);
737 if let Some(ref f) = ¶ms.function {
738 trace!(" function: {}", f);
739 }
740 if let Some(ref q) = ¶ms.query {
741 trace!(" query: {}", q);
742 }
743
744 let result = match self.analyze(params).await {
745 Ok(r) => r,
746 Err(e) => {
747 tracing::warn!("tldr_analyze failed: {}", e);
748 return Err(e);
749 }
750 };
751 let result_str =
752 serde_json::to_string(&result).unwrap_or_else(|_| "serialize error".to_string());
753 info!(
754 "tldr_analyze result: {} chars, {} bytes",
755 result_str.chars().count(),
756 result_str.len()
757 );
758 Ok(result)
759 }
760}
761
762pub fn tldr_tool_definition() -> Value {
764 json!({
765 "name": "tldr_analyze",
766 "description": "Token-efficient code analysis. ALWAYS USE THIS when the user asks: 'what does X do', 'how does X work', 'explain X', 'tell me about X', 'what is X'. Saves 95% tokens vs reading raw code. Do NOT combine with file_read or bash — this tool provides all needed context. STRATEGY: (1) search to find functions/constants/structs, (2) source for 1-3 key items only, (3) write answer. Do NOT read every function. Analysis types: search=find by keyword (functions, constants, structs), context=dependencies, source=function code, impact=callers, architecture=layers. NOTE: If this tool returns 'warm_permission_required', ask the user if they want to enable code analysis for this project.",
767 "parameters": {
768 "type": "object",
769 "properties": {
770 "analysis_type": {
771 "type": "string",
772 "enum": ["search", "context", "source", "impact", "cfg", "dfg", "dead_code", "architecture"],
773 "description": "Type: search=find by keyword, context=dependencies+callers, source=function code (use instead of file_read), impact=who calls this, cfg=control flow, dfg=data flow, dead_code=unreachable, architecture=module layers"
774 },
775 "function": {
776 "type": "string",
777 "description": "Function or struct name (required for context, source, impact, cfg, dfg). For structs, prefix with 'struct ' (e.g., 'struct AppConfig')"
778 },
779 "file": {
780 "type": "string",
781 "description": "File path relative to project root. Required for cfg, dfg. Optional for source (use to disambiguate when function name exists in multiple files)"
782 },
783 "depth": {
784 "type": "integer",
785 "description": "Depth for context traversal (default: 2)",
786 "default": 2
787 },
788 "entries": {
789 "type": "array",
790 "items": {"type": "string"},
791 "description": "Entry points for dead code detection (default: [\"main\"])",
792 "default": ["main"]
793 },
794 "query": {
795 "type": "string",
796 "description": "Search query for finding functions, constants, or structs (supports patterns like 'daemon', 'SYSTEM_PROMPT', 'handle_*')"
797 },
798 "limit": {
799 "type": "integer",
800 "description": "Maximum results for search (default: 10)",
801 "default": 10
802 },
803 "project_path": {
804 "type": "string",
805 "description": "Project root directory (defaults to current directory). Do NOT use file paths here — use 'file' parameter for file paths."
806 }
807 },
808 "required": ["analysis_type"]
809 }
810 })
811}
812
813#[cfg(test)]
814mod tests {
815 use super::*;
816
817 #[test]
818 fn test_tool_definition() {
819 let def = tldr_tool_definition();
820 assert_eq!(def["name"], "tldr_analyze");
821 assert!(def["parameters"]["properties"]["analysis_type"]["enum"].is_array());
822 }
823
824 #[test]
825 fn test_params_deserialization() {
826 let json = json!({
827 "analysis_type": "context",
828 "function": "main",
829 "depth": 3
830 });
831
832 let params: TldrParams = serde_json::from_value(json).unwrap();
833 assert!(matches!(params.analysis_type, AnalysisType::Context));
834 assert_eq!(params.function, Some("main".to_string()));
835 assert_eq!(params.depth, 3);
836 }
837
838 #[tokio::test]
839 #[ignore = "requires fastembed model download — run with: cargo test -- --ignored test_cache_returns_cached_instance"]
840 async fn test_cache_returns_cached_instance() {
841 let tool = TldrTool::new();
842 let test_path = std::env::current_dir().unwrap();
843
844 let tldr1 = tool.get_tldr(&test_path).await.unwrap();
845 let tldr2 = tool.get_tldr(&test_path).await.unwrap();
846
847 let addr1 = Arc::as_ptr(&tldr1) as usize;
848 let addr2 = Arc::as_ptr(&tldr2) as usize;
849 assert_eq!(
850 addr1, addr2,
851 "Second call should return cached instance (same memory address)"
852 );
853 }
854}