1use std::collections::hash_map::DefaultHasher;
15use std::collections::{HashMap, HashSet};
16use std::hash::{Hash, Hasher};
17use std::path::PathBuf;
18use std::sync::atomic::{AtomicBool, Ordering};
19use std::sync::Arc;
20use std::time::Instant;
21
22use dashmap::DashMap;
23use tokio::sync::{watch, RwLock};
24
25use super::error::{DaemonError, DaemonResult};
26use super::ipc::{read_command, send_response, IpcListener, IpcStream};
27use super::salsa::{QueryCache, QueryKey};
28use super::types::{
29 AllSessionsSummary, DaemonCommand, DaemonConfig, DaemonResponse, DaemonStatus, HookStats,
30 SalsaCacheStats, SessionStats, HOOK_FLUSH_THRESHOLD,
31};
32
33use tldr_core::{
34 architecture_analysis, build_project_call_graph, change_impact, collect_all_functions,
35 dead_code_analysis, detect_or_parse_language, extract_file, find_importers, get_cfg_context,
36 get_code_structure, get_dfg_context, get_file_tree, get_imports, get_relevant_context,
37 get_slice, impact_analysis, search as tldr_search, FileTree, Language, NodeType, SliceDirection,
38};
39#[cfg(feature = "semantic")]
40use tldr_core::semantic::{BuildOptions, CacheConfig, IndexSearchOptions, SemanticIndex};
41#[cfg(test)]
42use super::types::DEFAULT_REINDEX_THRESHOLD;
43
44fn hash_str_args(parts: &[&str]) -> u64 {
50 let mut hasher = DefaultHasher::new();
51 for part in parts {
52 part.hash(&mut hasher);
53 }
54 hasher.finish()
55}
56
57fn count_tree_files(tree: &FileTree) -> usize {
59 match tree.node_type {
60 NodeType::File => 1,
61 NodeType::Dir => tree.children.iter().map(count_tree_files).sum(),
62 }
63}
64
65pub struct TLDRDaemon {
78 project: PathBuf,
80 config: DaemonConfig,
82 start_time: Instant,
84 status: Arc<RwLock<DaemonStatus>>,
86 cache: QueryCache,
88 sessions: DashMap<String, SessionStats>,
90 hooks: DashMap<String, HookStats>,
92 dirty_files: Arc<RwLock<HashSet<PathBuf>>>,
94 shutdown_tx: watch::Sender<bool>,
96 stopping: AtomicBool,
98 last_activity: Arc<RwLock<Instant>>,
100 indexed_files: Arc<RwLock<usize>>,
102 #[cfg(feature = "semantic")]
104 semantic_index: Arc<RwLock<Option<SemanticIndex>>>,
105}
106
107impl TLDRDaemon {
108 pub fn new(project: PathBuf, config: DaemonConfig) -> Self {
113 let (shutdown_tx, _shutdown_rx) = watch::channel(false);
114
115 Self {
116 project,
117 config,
118 start_time: Instant::now(),
119 status: Arc::new(RwLock::new(DaemonStatus::Initializing)),
120 cache: QueryCache::with_defaults(),
121 sessions: DashMap::new(),
122 hooks: DashMap::new(),
123 dirty_files: Arc::new(RwLock::new(HashSet::new())),
124 shutdown_tx,
125 stopping: AtomicBool::new(false),
126 last_activity: Arc::new(RwLock::new(Instant::now())),
127 indexed_files: Arc::new(RwLock::new(0)),
128 #[cfg(feature = "semantic")]
129 semantic_index: Arc::new(RwLock::new(None)),
130 }
131 }
132
133 pub async fn status(&self) -> DaemonStatus {
135 *self.status.read().await
136 }
137
138 pub fn uptime(&self) -> f64 {
140 self.start_time.elapsed().as_secs_f64()
141 }
142
143 pub fn uptime_human(&self) -> String {
145 let secs = self.start_time.elapsed().as_secs();
146 let hours = secs / 3600;
147 let minutes = (secs % 3600) / 60;
148 let seconds = secs % 60;
149 format!("{}h {}m {}s", hours, minutes, seconds)
150 }
151
152 pub fn cache_stats(&self) -> SalsaCacheStats {
154 self.cache.stats()
155 }
156
157 pub fn project(&self) -> &PathBuf {
159 &self.project
160 }
161
162 pub async fn indexed_files(&self) -> usize {
164 *self.indexed_files.read().await
165 }
166
167 pub fn all_sessions_summary(&self) -> AllSessionsSummary {
169 let mut summary = AllSessionsSummary {
170 active_sessions: self.sessions.len(),
171 ..AllSessionsSummary::default()
172 };
173
174 for entry in self.sessions.iter() {
175 let stats = entry.value();
176 summary.total_raw_tokens += stats.raw_tokens;
177 summary.total_tldr_tokens += stats.tldr_tokens;
178 summary.total_requests += stats.requests;
179 }
180
181 summary
182 }
183
184 pub fn hook_stats(&self) -> HashMap<String, HookStats> {
186 self.hooks
187 .iter()
188 .map(|e| (e.key().clone(), e.value().clone()))
189 .collect()
190 }
191
192 pub fn shutdown(&self) {
194 self.stopping.store(true, Ordering::SeqCst);
195 let _ = self.shutdown_tx.send(true);
196 }
197
198 pub async fn run(self: Arc<Self>, listener: IpcListener) -> DaemonResult<()> {
205 {
207 let mut status = self.status.write().await;
208 *status = DaemonStatus::Ready;
209 }
210
211 #[cfg(unix)]
213 {
214 let daemon = Arc::clone(&self);
215 tokio::spawn(async move {
216 let mut sigterm =
217 tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
218 .expect("Failed to register SIGTERM handler");
219 let mut sigint =
220 tokio::signal::unix::signal(tokio::signal::unix::SignalKind::interrupt())
221 .expect("Failed to register SIGINT handler");
222
223 tokio::select! {
224 _ = sigterm.recv() => {
225 daemon.shutdown();
226 }
227 _ = sigint.recv() => {
228 daemon.shutdown();
229 }
230 }
231 });
232 }
233
234 let idle_timeout = std::time::Duration::from_secs(self.config.idle_timeout_secs);
236
237 loop {
238 if self.stopping.load(Ordering::SeqCst) {
240 break;
241 }
242
243 if !self.project.exists() {
245 eprintln!(
246 "Project directory {} no longer exists, shutting down",
247 self.project.display()
248 );
249 break;
250 }
251
252 {
254 let last = self.last_activity.read().await;
255 if last.elapsed() > idle_timeout {
256 eprintln!(
257 "No client activity for {}s, shutting down",
258 self.config.idle_timeout_secs
259 );
260 break;
261 }
262 }
263
264 let accept_future = listener.accept();
266 let timeout = tokio::time::Duration::from_millis(100);
267
268 match tokio::time::timeout(timeout, accept_future).await {
269 Ok(Ok(mut stream)) => {
270 *self.last_activity.write().await = Instant::now();
272
273 let daemon = Arc::clone(&self);
275 tokio::spawn(async move {
276 if let Err(e) = daemon.handle_connection(&mut stream).await {
277 eprintln!("Connection error: {}", e);
278 }
279 });
280 }
281 Ok(Err(e)) => {
282 eprintln!("Accept error: {}", e);
284 }
285 Err(_) => {
286 continue;
288 }
289 }
290 }
291
292 {
294 let mut status = self.status.write().await;
295 *status = DaemonStatus::ShuttingDown;
296 }
297
298 self.persist_stats().await?;
300
301 {
303 let mut status = self.status.write().await;
304 *status = DaemonStatus::Stopped;
305 }
306
307 Ok(())
308 }
309
310 async fn handle_connection(self: &Arc<Self>, stream: &mut IpcStream) -> DaemonResult<()> {
312 let cmd = read_command(stream).await?;
314
315 let response = self.handle_command(cmd).await;
317
318 send_response(stream, &response).await?;
320
321 Ok(())
322 }
323
324 pub async fn handle_command(&self, cmd: DaemonCommand) -> DaemonResponse {
326 match cmd {
327 DaemonCommand::Ping => DaemonResponse::Status {
328 status: "ok".to_string(),
329 message: Some("pong".to_string()),
330 },
331
332 DaemonCommand::Status { session } => self.handle_status(session).await,
333
334 DaemonCommand::Shutdown => {
335 self.shutdown();
336 DaemonResponse::Status {
337 status: "shutting_down".to_string(),
338 message: Some("Daemon is shutting down".to_string()),
339 }
340 }
341
342 DaemonCommand::Notify { file } => self.handle_notify(file).await,
343
344 DaemonCommand::Track {
345 hook,
346 success,
347 metrics,
348 } => self.handle_track(hook, success, metrics).await,
349
350 DaemonCommand::Warm { language } => {
351 let lang = language
352 .as_deref()
353 .and_then(|l| l.parse::<Language>().ok())
354 .unwrap_or(Language::Python);
355
356 let mut warmed = Vec::new();
357 let mut errors = Vec::new();
358
359 let calls_key = QueryKey::new(
361 "calls",
362 hash_str_args(&[&self.project.to_string_lossy()]),
363 );
364 if self.cache.get::<serde_json::Value>(&calls_key).is_some() {
365 warmed.push("call_graph (cached)");
366 } else {
367 match build_project_call_graph(&self.project, lang, None, true) {
368 Ok(result) => {
369 let val = serde_json::to_value(&result).unwrap_or_default();
370 self.cache.insert(calls_key, &val, vec![]);
371 warmed.push("call_graph");
372 }
373 Err(e) => errors.push(format!("call_graph: {}", e)),
374 }
375 }
376
377 let struct_key = QueryKey::new(
379 "structure",
380 hash_str_args(&[&self.project.to_string_lossy(), ""]),
381 );
382 if self.cache.get::<serde_json::Value>(&struct_key).is_some() {
383 warmed.push("structure (cached)");
384 } else {
385 match get_code_structure(&self.project, lang, 0, None) {
386 Ok(result) => {
387 let val = serde_json::to_value(&result).unwrap_or_default();
388 self.cache.insert(struct_key, &val, vec![]);
389 warmed.push("structure");
390 }
391 Err(e) => errors.push(format!("structure: {}", e)),
392 }
393 }
394
395 let tree_key = QueryKey::new(
397 "tree",
398 hash_str_args(&[&self.project.to_string_lossy()]),
399 );
400 if self.cache.get::<serde_json::Value>(&tree_key).is_some() {
401 warmed.push("file_tree (cached)");
402 } else {
403 match get_file_tree(&self.project, None, true, None) {
404 Ok(result) => {
405 let file_count = count_tree_files(&result);
406 let val = serde_json::to_value(&result).unwrap_or_default();
407 self.cache.insert(tree_key, &val, vec![]);
408 *self.indexed_files.write().await = file_count;
409 warmed.push("file_tree");
410 }
411 Err(e) => errors.push(format!("file_tree: {}", e)),
412 }
413 }
414
415 #[cfg(feature = "semantic")]
417 {
418 let mut index_guard = self.semantic_index.write().await;
419 if index_guard.is_some() {
420 warmed.push("semantic_index (cached)");
421 } else {
422 let build_opts = BuildOptions {
423 show_progress: false,
424 use_cache: true,
425 ..Default::default()
426 };
427 match SemanticIndex::build(
428 &self.project,
429 build_opts,
430 Some(CacheConfig::default()),
431 ) {
432 Ok(idx) => {
433 *index_guard = Some(idx);
434 warmed.push("semantic_index");
435 }
436 Err(e) => errors.push(format!("semantic_index: {}", e)),
437 }
438 }
439 }
440
441 let message = if errors.is_empty() {
442 format!("Warmed: {}", warmed.join(", "))
443 } else {
444 format!(
445 "Warmed: {}. Errors: {}",
446 warmed.join(", "),
447 errors.join("; ")
448 )
449 };
450
451 DaemonResponse::Status {
452 status: "ok".to_string(),
453 message: Some(message),
454 }
455 }
456
457 #[cfg(feature = "semantic")]
458 DaemonCommand::Semantic { query, top_k } => {
459 let mut index_guard = self.semantic_index.write().await;
461
462 if index_guard.is_none() {
464 let build_opts = BuildOptions {
465 show_progress: false,
466 use_cache: true,
467 ..Default::default()
468 };
469 let cache_config = Some(CacheConfig::default());
470
471 match SemanticIndex::build(&self.project, build_opts, cache_config) {
472 Ok(idx) => {
473 *index_guard = Some(idx);
474 }
475 Err(e) => {
476 return DaemonResponse::Error {
477 status: "error".to_string(),
478 error: format!("Failed to build semantic index: {}", e),
479 };
480 }
481 }
482 }
483
484 let index = index_guard.as_mut().unwrap();
486 let search_opts = IndexSearchOptions {
487 top_k,
488 threshold: 0.5,
489 include_snippet: true,
490 snippet_lines: 5,
491 };
492
493 match index.search(&query, &search_opts) {
494 Ok(report) => match serde_json::to_value(&report) {
495 Ok(value) => DaemonResponse::Result(value),
496 Err(e) => DaemonResponse::Error {
497 status: "error".to_string(),
498 error: format!("Serialization error: {}", e),
499 },
500 },
501 Err(e) => DaemonResponse::Error {
502 status: "error".to_string(),
503 error: format!("Semantic search failed: {}", e),
504 },
505 }
506 }
507
508 #[cfg(not(feature = "semantic"))]
509 DaemonCommand::Semantic { .. } => DaemonResponse::Error {
510 status: "error".to_string(),
511 error: "Semantic search requires the 'semantic' feature".to_string(),
512 },
513
514 DaemonCommand::Search {
516 pattern,
517 max_results,
518 } => {
519 let max = max_results.unwrap_or(100);
520 let key = QueryKey::new(
521 "search",
522 hash_str_args(&[&pattern, &max.to_string()]),
523 );
524 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
525 return DaemonResponse::Result(cached);
526 }
527 match tldr_search(
528 &pattern,
529 &self.project,
530 None,
531 2,
532 max,
533 1000,
534 None,
535 ) {
536 Ok(result) => {
537 let val = serde_json::to_value(&result).unwrap_or_default();
538 self.cache.insert(key, &val, vec![]);
539 DaemonResponse::Result(val)
540 }
541 Err(e) => DaemonResponse::Error {
542 status: "error".to_string(),
543 error: e.to_string(),
544 },
545 }
546 }
547
548 DaemonCommand::Extract { file, session: _ } => {
549 let file_str = file.to_string_lossy().to_string();
550 let key = QueryKey::new("extract", hash_str_args(&[&file_str]));
551 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
552 return DaemonResponse::Result(cached);
553 }
554 let file_hash = super::salsa::hash_path(&file);
555 match extract_file(&file, Some(&self.project)) {
556 Ok(result) => {
557 let val = serde_json::to_value(&result).unwrap_or_default();
558 self.cache.insert(key, &val, vec![file_hash]);
559 DaemonResponse::Result(val)
560 }
561 Err(e) => DaemonResponse::Error {
562 status: "error".to_string(),
563 error: e.to_string(),
564 },
565 }
566 }
567
568 DaemonCommand::Tree { path } => {
569 let root = path.unwrap_or_else(|| self.project.clone());
570 let root_str = root.to_string_lossy().to_string();
571 let key = QueryKey::new("tree", hash_str_args(&[&root_str]));
572 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
573 return DaemonResponse::Result(cached);
574 }
575 match get_file_tree(&root, None, true, None) {
576 Ok(result) => {
577 let val = serde_json::to_value(&result).unwrap_or_default();
578 self.cache.insert(key, &val, vec![]);
579 DaemonResponse::Result(val)
580 }
581 Err(e) => DaemonResponse::Error {
582 status: "error".to_string(),
583 error: e.to_string(),
584 },
585 }
586 }
587
588 DaemonCommand::Structure { path, lang } => {
589 let path_str = path.to_string_lossy().to_string();
590 let lang_str = lang.as_deref().unwrap_or("");
591 let key = QueryKey::new(
592 "structure",
593 hash_str_args(&[&path_str, lang_str]),
594 );
595 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
596 return DaemonResponse::Result(cached);
597 }
598 let language = match detect_or_parse_language(lang.as_deref(), &path) {
599 Ok(l) => l,
600 Err(e) => {
601 return DaemonResponse::Error {
602 status: "error".to_string(),
603 error: e.to_string(),
604 }
605 }
606 };
607 match get_code_structure(&path, language, 0, None) {
608 Ok(result) => {
609 let val = serde_json::to_value(&result).unwrap_or_default();
610 self.cache.insert(key, &val, vec![]);
611 DaemonResponse::Result(val)
612 }
613 Err(e) => DaemonResponse::Error {
614 status: "error".to_string(),
615 error: e.to_string(),
616 },
617 }
618 }
619
620 DaemonCommand::Context { entry, depth } => {
621 let d = depth.unwrap_or(2);
622 let key = QueryKey::new(
623 "context",
624 hash_str_args(&[&entry, &d.to_string()]),
625 );
626 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
627 return DaemonResponse::Result(cached);
628 }
629 match get_relevant_context(
630 &self.project,
631 &entry,
632 d,
633 Language::Python,
634 true,
635 None,
636 ) {
637 Ok(result) => {
638 let val = serde_json::to_value(&result).unwrap_or_default();
639 self.cache.insert(key, &val, vec![]);
640 DaemonResponse::Result(val)
641 }
642 Err(e) => DaemonResponse::Error {
643 status: "error".to_string(),
644 error: e.to_string(),
645 },
646 }
647 }
648
649 DaemonCommand::Cfg { file, function } => {
650 let file_str = file.to_string_lossy().to_string();
651 let key = QueryKey::new(
652 "cfg",
653 hash_str_args(&[&file_str, &function]),
654 );
655 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
656 return DaemonResponse::Result(cached);
657 }
658 let language = match detect_or_parse_language(None, &file) {
659 Ok(l) => l,
660 Err(e) => {
661 return DaemonResponse::Error {
662 status: "error".to_string(),
663 error: e.to_string(),
664 }
665 }
666 };
667 let file_hash = super::salsa::hash_path(&file);
668 match get_cfg_context(&file_str, &function, language) {
669 Ok(result) => {
670 let val = serde_json::to_value(&result).unwrap_or_default();
671 self.cache.insert(key, &val, vec![file_hash]);
672 DaemonResponse::Result(val)
673 }
674 Err(e) => DaemonResponse::Error {
675 status: "error".to_string(),
676 error: e.to_string(),
677 },
678 }
679 }
680
681 DaemonCommand::Dfg { file, function } => {
682 let file_str = file.to_string_lossy().to_string();
683 let key = QueryKey::new(
684 "dfg",
685 hash_str_args(&[&file_str, &function]),
686 );
687 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
688 return DaemonResponse::Result(cached);
689 }
690 let language = match detect_or_parse_language(None, &file) {
691 Ok(l) => l,
692 Err(e) => {
693 return DaemonResponse::Error {
694 status: "error".to_string(),
695 error: e.to_string(),
696 }
697 }
698 };
699 let file_hash = super::salsa::hash_path(&file);
700 match get_dfg_context(&file_str, &function, language) {
701 Ok(result) => {
702 let val = serde_json::to_value(&result).unwrap_or_default();
703 self.cache.insert(key, &val, vec![file_hash]);
704 DaemonResponse::Result(val)
705 }
706 Err(e) => DaemonResponse::Error {
707 status: "error".to_string(),
708 error: e.to_string(),
709 },
710 }
711 }
712
713 DaemonCommand::Slice {
714 file,
715 function,
716 line,
717 } => {
718 let file_str = file.to_string_lossy().to_string();
719 let key = QueryKey::new(
720 "slice",
721 hash_str_args(&[&file_str, &function, &line.to_string()]),
722 );
723 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
724 return DaemonResponse::Result(cached);
725 }
726 let language = match detect_or_parse_language(None, &file) {
727 Ok(l) => l,
728 Err(e) => {
729 return DaemonResponse::Error {
730 status: "error".to_string(),
731 error: e.to_string(),
732 }
733 }
734 };
735 let file_hash = super::salsa::hash_path(&file);
736 match get_slice(
737 &file_str,
738 &function,
739 line as u32,
740 SliceDirection::Backward,
741 None,
742 language,
743 ) {
744 Ok(result) => {
745 let val = serde_json::to_value(&result).unwrap_or_default();
746 self.cache.insert(key, &val, vec![file_hash]);
747 DaemonResponse::Result(val)
748 }
749 Err(e) => DaemonResponse::Error {
750 status: "error".to_string(),
751 error: e.to_string(),
752 },
753 }
754 }
755
756 DaemonCommand::Calls { path } => {
757 let root = path.unwrap_or_else(|| self.project.clone());
758 let root_str = root.to_string_lossy().to_string();
759 let key = QueryKey::new("calls", hash_str_args(&[&root_str]));
760 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
761 return DaemonResponse::Result(cached);
762 }
763 match build_project_call_graph(&root, Language::Python, None, true) {
764 Ok(result) => {
765 let val = serde_json::to_value(&result).unwrap_or_default();
766 self.cache.insert(key, &val, vec![]);
767 DaemonResponse::Result(val)
768 }
769 Err(e) => DaemonResponse::Error {
770 status: "error".to_string(),
771 error: e.to_string(),
772 },
773 }
774 }
775
776 DaemonCommand::Impact { func, depth } => {
777 let d = depth.unwrap_or(3);
778 let key = QueryKey::new(
779 "impact",
780 hash_str_args(&[&func, &d.to_string()]),
781 );
782 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
783 return DaemonResponse::Result(cached);
784 }
785 let graph =
786 match build_project_call_graph(&self.project, Language::Python, None, true) {
787 Ok(g) => g,
788 Err(e) => {
789 return DaemonResponse::Error {
790 status: "error".to_string(),
791 error: e.to_string(),
792 }
793 }
794 };
795 match impact_analysis(&graph, &func, d, None) {
796 Ok(result) => {
797 let val = serde_json::to_value(&result).unwrap_or_default();
798 self.cache.insert(key, &val, vec![]);
799 DaemonResponse::Result(val)
800 }
801 Err(e) => DaemonResponse::Error {
802 status: "error".to_string(),
803 error: e.to_string(),
804 },
805 }
806 }
807
808 DaemonCommand::Dead { path, entry } => {
809 let root = path.unwrap_or_else(|| self.project.clone());
810 let root_str = root.to_string_lossy().to_string();
811 let entry_str = entry
812 .as_ref()
813 .map(|v| v.join(","))
814 .unwrap_or_default();
815 let key = QueryKey::new(
816 "dead",
817 hash_str_args(&[&root_str, &entry_str]),
818 );
819 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
820 return DaemonResponse::Result(cached);
821 }
822 let graph =
823 match build_project_call_graph(&root, Language::Python, None, true) {
824 Ok(g) => g,
825 Err(e) => {
826 return DaemonResponse::Error {
827 status: "error".to_string(),
828 error: e.to_string(),
829 }
830 }
831 };
832 let extensions: HashSet<String> =
834 Language::Python.extensions().iter().map(|s| s.to_string()).collect();
835 let file_tree = match get_file_tree(&root, Some(&extensions), true, None) {
836 Ok(t) => t,
837 Err(e) => {
838 return DaemonResponse::Error {
839 status: "error".to_string(),
840 error: e.to_string(),
841 }
842 }
843 };
844 let files = tldr_core::fs::tree::collect_files(&file_tree, &root);
845 let mut module_infos = Vec::new();
846 for file_path in files {
847 if let Ok(info) = extract_file(&file_path, Some(&root)) {
848 module_infos.push((file_path, info));
849 }
850 }
851 let all_functions = collect_all_functions(&module_infos);
852 let entry_strings: Option<Vec<String>> = entry;
853 let entry_refs: Option<&[String]> = entry_strings.as_deref();
854 match dead_code_analysis(&graph, &all_functions, entry_refs) {
855 Ok(result) => {
856 let val = serde_json::to_value(&result).unwrap_or_default();
857 self.cache.insert(key, &val, vec![]);
858 DaemonResponse::Result(val)
859 }
860 Err(e) => DaemonResponse::Error {
861 status: "error".to_string(),
862 error: e.to_string(),
863 },
864 }
865 }
866
867 DaemonCommand::Arch { path } => {
868 let root = path.unwrap_or_else(|| self.project.clone());
869 let root_str = root.to_string_lossy().to_string();
870 let key = QueryKey::new("arch", hash_str_args(&[&root_str]));
871 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
872 return DaemonResponse::Result(cached);
873 }
874 let graph =
875 match build_project_call_graph(&root, Language::Python, None, true) {
876 Ok(g) => g,
877 Err(e) => {
878 return DaemonResponse::Error {
879 status: "error".to_string(),
880 error: e.to_string(),
881 }
882 }
883 };
884 match architecture_analysis(&graph) {
885 Ok(result) => {
886 let val = serde_json::to_value(&result).unwrap_or_default();
887 self.cache.insert(key, &val, vec![]);
888 DaemonResponse::Result(val)
889 }
890 Err(e) => DaemonResponse::Error {
891 status: "error".to_string(),
892 error: e.to_string(),
893 },
894 }
895 }
896
897 DaemonCommand::Imports { file } => {
898 let file_str = file.to_string_lossy().to_string();
899 let key = QueryKey::new("imports", hash_str_args(&[&file_str]));
900 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
901 return DaemonResponse::Result(cached);
902 }
903 let language = match detect_or_parse_language(None, &file) {
904 Ok(l) => l,
905 Err(e) => {
906 return DaemonResponse::Error {
907 status: "error".to_string(),
908 error: e.to_string(),
909 }
910 }
911 };
912 let file_hash = super::salsa::hash_path(&file);
913 match get_imports(&file, language) {
914 Ok(result) => {
915 let val = serde_json::to_value(&result).unwrap_or_default();
916 self.cache.insert(key, &val, vec![file_hash]);
917 DaemonResponse::Result(val)
918 }
919 Err(e) => DaemonResponse::Error {
920 status: "error".to_string(),
921 error: e.to_string(),
922 },
923 }
924 }
925
926 DaemonCommand::Importers { module, path } => {
927 let root = path.unwrap_or_else(|| self.project.clone());
928 let root_str = root.to_string_lossy().to_string();
929 let key = QueryKey::new(
930 "importers",
931 hash_str_args(&[&module, &root_str]),
932 );
933 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
934 return DaemonResponse::Result(cached);
935 }
936 match find_importers(&root, &module, Language::Python) {
937 Ok(result) => {
938 let val = serde_json::to_value(&result).unwrap_or_default();
939 self.cache.insert(key, &val, vec![]);
940 DaemonResponse::Result(val)
941 }
942 Err(e) => DaemonResponse::Error {
943 status: "error".to_string(),
944 error: e.to_string(),
945 },
946 }
947 }
948
949 DaemonCommand::Diagnostics { path, project: _ } => {
950 DaemonResponse::Error {
951 status: "error".to_string(),
952 error: format!(
953 "Diagnostics requires external tool orchestration; \
954 use CLI directly: tldr diagnostics {}",
955 path.display()
956 ),
957 }
958 }
959
960 DaemonCommand::ChangeImpact {
961 files,
962 session: _,
963 git: _,
964 } => {
965 let files_str = files
966 .as_ref()
967 .map(|v| {
968 v.iter()
969 .map(|p| p.to_string_lossy().to_string())
970 .collect::<Vec<_>>()
971 .join(",")
972 })
973 .unwrap_or_default();
974 let key = QueryKey::new(
975 "change_impact",
976 hash_str_args(&[&files_str]),
977 );
978 if let Some(cached) = self.cache.get::<serde_json::Value>(&key) {
979 return DaemonResponse::Result(cached);
980 }
981 let changed: Option<Vec<PathBuf>> = files;
982 match change_impact(
983 &self.project,
984 changed.as_deref(),
985 Language::Python,
986 ) {
987 Ok(result) => {
988 let val = serde_json::to_value(&result).unwrap_or_default();
989 self.cache.insert(key, &val, vec![]);
990 DaemonResponse::Result(val)
991 }
992 Err(e) => DaemonResponse::Error {
993 status: "error".to_string(),
994 error: e.to_string(),
995 },
996 }
997 }
998 }
999 }
1000
1001 async fn handle_status(&self, session: Option<String>) -> DaemonResponse {
1003 let status = self.status().await;
1004 let uptime = self.uptime();
1005 let files = self.indexed_files().await;
1006 let salsa_stats = self.cache_stats();
1007 let all_sessions = Some(self.all_sessions_summary());
1008 let hook_stats = Some(self.hook_stats());
1009
1010 let session_stats =
1012 session.and_then(|id| self.sessions.get(&id).map(|entry| entry.value().clone()));
1013
1014 DaemonResponse::FullStatus {
1015 status,
1016 uptime,
1017 files,
1018 project: self.project.clone(),
1019 salsa_stats,
1020 dedup_stats: None,
1021 session_stats,
1022 all_sessions,
1023 hook_stats,
1024 }
1025 }
1026
1027 async fn handle_notify(&self, file: PathBuf) -> DaemonResponse {
1029 let dirty_count = {
1031 let mut dirty = self.dirty_files.write().await;
1032 dirty.insert(file.clone());
1033 dirty.len()
1034 };
1035
1036 let file_hash = super::salsa::hash_path(&file);
1038 self.cache.invalidate_by_input(file_hash);
1039
1040 #[cfg(feature = "semantic")]
1042 {
1043 let mut idx = self.semantic_index.write().await;
1044 *idx = None;
1045 }
1046
1047 let threshold = self.config.auto_reindex_threshold;
1048 let reindex_triggered = dirty_count >= threshold;
1049
1050 if reindex_triggered {
1052 let mut dirty = self.dirty_files.write().await;
1054 dirty.clear();
1055
1056 }
1059
1060 DaemonResponse::NotifyResponse {
1061 status: "ok".to_string(),
1062 dirty_count,
1063 threshold,
1064 reindex_triggered,
1065 }
1066 }
1067
1068 async fn handle_track(
1070 &self,
1071 hook: String,
1072 success: bool,
1073 metrics: HashMap<String, f64>,
1074 ) -> DaemonResponse {
1075 let mut entry = self
1077 .hooks
1078 .entry(hook.clone())
1079 .or_insert_with(|| HookStats::new(hook.clone()));
1080
1081 let metrics_opt = if metrics.is_empty() {
1083 None
1084 } else {
1085 Some(metrics)
1086 };
1087 entry.record_invocation(success, metrics_opt);
1088
1089 let total_invocations = entry.invocations;
1090 let flushed = total_invocations.is_multiple_of(HOOK_FLUSH_THRESHOLD as u64);
1091
1092 if flushed {
1094 }
1097
1098 DaemonResponse::TrackResponse {
1099 status: "ok".to_string(),
1100 hook,
1101 total_invocations,
1102 flushed,
1103 }
1104 }
1105
1106 async fn persist_stats(&self) -> DaemonResult<()> {
1108 let cache_dir = self.project.join(".tldr/cache");
1110 if !cache_dir.exists() {
1111 std::fs::create_dir_all(&cache_dir)?;
1112 }
1113
1114 let salsa_stats_path = cache_dir.join("salsa_stats.json");
1116 let stats = self.cache_stats();
1117 let json = serde_json::to_string_pretty(&stats)?;
1118 std::fs::write(salsa_stats_path, json)?;
1119
1120 let cache_path = cache_dir.join("query_cache.bin");
1122 self.cache.save_to_file(&cache_path)?;
1123
1124 Ok(())
1125 }
1126}
1127
1128pub async fn start_daemon_background(project: &std::path::Path) -> DaemonResult<u32> {
1136 use std::process::Command;
1137
1138 let exe_path = std::env::current_exe().map_err(DaemonError::Io)?;
1140
1141 #[cfg(unix)]
1143 {
1144 use std::os::unix::process::CommandExt;
1145
1146 let child = unsafe {
1147 Command::new(&exe_path)
1148 .args(["daemon", "start", "--project"])
1149 .arg(project.as_os_str())
1150 .arg("--foreground")
1151 .stdin(std::process::Stdio::null())
1152 .stdout(std::process::Stdio::null())
1153 .stderr(std::process::Stdio::null())
1154 .pre_exec(|| {
1155 libc::setsid();
1157 Ok(())
1158 })
1159 .spawn()
1160 .map_err(DaemonError::Io)?
1161 };
1162
1163 Ok(child.id())
1164 }
1165
1166 #[cfg(windows)]
1167 {
1168 use std::os::windows::process::CommandExt;
1169 const DETACHED_PROCESS: u32 = 0x00000008;
1170 const CREATE_NO_WINDOW: u32 = 0x08000000;
1171
1172 let child = Command::new(&exe_path)
1173 .args(["daemon", "start", "--project"])
1174 .arg(project.as_os_str())
1175 .arg("--foreground")
1176 .stdin(std::process::Stdio::null())
1177 .stdout(std::process::Stdio::null())
1178 .stderr(std::process::Stdio::null())
1179 .creation_flags(DETACHED_PROCESS | CREATE_NO_WINDOW)
1180 .spawn()
1181 .map_err(DaemonError::Io)?;
1182
1183 Ok(child.id())
1184 }
1185}
1186
1187pub async fn wait_for_daemon(project: &std::path::Path, timeout_secs: u64) -> DaemonResult<()> {
1191 let start = Instant::now();
1192 let timeout = std::time::Duration::from_secs(timeout_secs);
1193
1194 while start.elapsed() < timeout {
1195 if super::ipc::check_socket_alive(project).await {
1197 return Ok(());
1198 }
1199
1200 tokio::time::sleep(std::time::Duration::from_millis(100)).await;
1202 }
1203
1204 Err(DaemonError::ConnectionTimeout { timeout_secs })
1205}
1206
1207#[cfg(test)]
1212mod tests {
1213 use super::*;
1214 use tempfile::TempDir;
1215
1216 #[test]
1217 fn test_daemon_new() {
1218 let temp = TempDir::new().unwrap();
1219 let config = DaemonConfig::default();
1220 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1221
1222 assert_eq!(daemon.project(), temp.path());
1223 assert!(daemon.uptime() < 1.0);
1224 }
1225
1226 #[tokio::test]
1227 async fn test_daemon_status_initial() {
1228 let temp = TempDir::new().unwrap();
1229 let config = DaemonConfig::default();
1230 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1231
1232 assert_eq!(daemon.status().await, DaemonStatus::Initializing);
1233 }
1234
1235 #[tokio::test]
1236 async fn test_daemon_uptime_human() {
1237 let temp = TempDir::new().unwrap();
1238 let config = DaemonConfig::default();
1239 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1240
1241 let uptime = daemon.uptime_human();
1242 assert!(uptime.contains("h"));
1243 assert!(uptime.contains("m"));
1244 assert!(uptime.contains("s"));
1245 }
1246
1247 #[tokio::test]
1248 async fn test_daemon_handle_ping() {
1249 let temp = TempDir::new().unwrap();
1250 let config = DaemonConfig::default();
1251 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1252
1253 let response = daemon.handle_command(DaemonCommand::Ping).await;
1254
1255 match response {
1256 DaemonResponse::Status { status, message } => {
1257 assert_eq!(status, "ok");
1258 assert_eq!(message, Some("pong".to_string()));
1259 }
1260 _ => panic!("Expected Status response"),
1261 }
1262 }
1263
1264 #[tokio::test]
1265 async fn test_daemon_handle_shutdown() {
1266 let temp = TempDir::new().unwrap();
1267 let config = DaemonConfig::default();
1268 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1269
1270 let response = daemon.handle_command(DaemonCommand::Shutdown).await;
1271
1272 match response {
1273 DaemonResponse::Status { status, .. } => {
1274 assert_eq!(status, "shutting_down");
1275 }
1276 _ => panic!("Expected Status response"),
1277 }
1278
1279 assert!(daemon.stopping.load(Ordering::SeqCst));
1281 }
1282
1283 #[tokio::test]
1284 async fn test_daemon_handle_notify() {
1285 let temp = TempDir::new().unwrap();
1286 let config = DaemonConfig::default();
1287 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1288
1289 let file = temp.path().join("test.rs");
1290 let response = daemon.handle_command(DaemonCommand::Notify { file }).await;
1291
1292 match response {
1293 DaemonResponse::NotifyResponse {
1294 dirty_count,
1295 threshold,
1296 reindex_triggered,
1297 ..
1298 } => {
1299 assert_eq!(dirty_count, 1);
1300 assert_eq!(threshold, DEFAULT_REINDEX_THRESHOLD);
1301 assert!(!reindex_triggered);
1302 }
1303 _ => panic!("Expected NotifyResponse"),
1304 }
1305 }
1306
1307 #[tokio::test]
1308 async fn test_daemon_handle_notify_threshold() {
1309 let temp = TempDir::new().unwrap();
1310 let config = DaemonConfig {
1311 auto_reindex_threshold: 3, ..DaemonConfig::default()
1313 };
1314 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1315
1316 for i in 0..3 {
1318 let file = temp.path().join(format!("test{}.rs", i));
1319 daemon.handle_command(DaemonCommand::Notify { file }).await;
1320 }
1321
1322 let file = temp.path().join("test3.rs");
1324 let response = daemon.handle_command(DaemonCommand::Notify { file }).await;
1325
1326 match response {
1327 DaemonResponse::NotifyResponse {
1328 reindex_triggered: _, ..
1329 } => {
1330 }
1333 _ => panic!("Expected NotifyResponse"),
1334 }
1335 }
1336
1337 #[tokio::test]
1338 async fn test_daemon_handle_track() {
1339 let temp = TempDir::new().unwrap();
1340 let config = DaemonConfig::default();
1341 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1342
1343 let response = daemon
1344 .handle_command(DaemonCommand::Track {
1345 hook: "test-hook".to_string(),
1346 success: true,
1347 metrics: HashMap::new(),
1348 })
1349 .await;
1350
1351 match response {
1352 DaemonResponse::TrackResponse {
1353 hook,
1354 total_invocations,
1355 ..
1356 } => {
1357 assert_eq!(hook, "test-hook");
1358 assert_eq!(total_invocations, 1);
1359 }
1360 _ => panic!("Expected TrackResponse"),
1361 }
1362 }
1363
1364 #[tokio::test]
1365 async fn test_daemon_all_sessions_summary() {
1366 let temp = TempDir::new().unwrap();
1367 let config = DaemonConfig::default();
1368 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1369
1370 daemon.sessions.insert(
1372 "test-session".to_string(),
1373 SessionStats {
1374 session_id: "test-session".to_string(),
1375 raw_tokens: 1000,
1376 tldr_tokens: 100,
1377 requests: 10,
1378 started_at: None,
1379 },
1380 );
1381
1382 let summary = daemon.all_sessions_summary();
1383 assert_eq!(summary.active_sessions, 1);
1384 assert_eq!(summary.total_raw_tokens, 1000);
1385 assert_eq!(summary.total_tldr_tokens, 100);
1386 assert_eq!(summary.total_requests, 10);
1387 }
1388
1389 fn create_test_project() -> TempDir {
1395 let temp = TempDir::new().unwrap();
1396 let py_file = temp.path().join("main.py");
1397 std::fs::write(
1398 &py_file,
1399 "def hello():\n \"\"\"Say hello.\"\"\"\n return 'hello'\n\ndef main():\n hello()\n",
1400 )
1401 .unwrap();
1402 temp
1403 }
1404
1405 #[test]
1406 fn test_hash_str_args_deterministic() {
1407 let h1 = hash_str_args(&["search", "pattern", "100"]);
1408 let h2 = hash_str_args(&["search", "pattern", "100"]);
1409 assert_eq!(h1, h2);
1410 }
1411
1412 #[test]
1413 fn test_hash_str_args_different_inputs() {
1414 let h1 = hash_str_args(&["search", "pattern_a"]);
1415 let h2 = hash_str_args(&["search", "pattern_b"]);
1416 assert_ne!(h1, h2);
1417 }
1418
1419 #[tokio::test]
1420 async fn test_daemon_search_returns_result() {
1421 let temp = create_test_project();
1422 let config = DaemonConfig::default();
1423 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1424
1425 let response = daemon
1426 .handle_command(DaemonCommand::Search {
1427 pattern: "def hello".to_string(),
1428 max_results: Some(10),
1429 })
1430 .await;
1431
1432 match response {
1433 DaemonResponse::Result(val) => {
1434 assert!(val.is_array(), "Search should return an array of matches");
1435 let arr = val.as_array().unwrap();
1436 assert!(!arr.is_empty(), "Should find at least one match for 'def hello'");
1437 }
1438 DaemonResponse::Error { error, .. } => {
1439 panic!("Search returned error: {}", error);
1440 }
1441 other => panic!("Expected Result response, got {:?}", other),
1442 }
1443 }
1444
1445 #[tokio::test]
1446 async fn test_daemon_search_caches_result() {
1447 let temp = create_test_project();
1448 let config = DaemonConfig::default();
1449 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1450
1451 let _r1 = daemon
1453 .handle_command(DaemonCommand::Search {
1454 pattern: "def hello".to_string(),
1455 max_results: Some(10),
1456 })
1457 .await;
1458
1459 let _r2 = daemon
1461 .handle_command(DaemonCommand::Search {
1462 pattern: "def hello".to_string(),
1463 max_results: Some(10),
1464 })
1465 .await;
1466
1467 let stats = daemon.cache_stats();
1468 assert!(stats.hits >= 1, "Second call should hit cache");
1469 }
1470
1471 #[tokio::test]
1472 async fn test_daemon_extract_returns_result() {
1473 let temp = create_test_project();
1474 let config = DaemonConfig::default();
1475 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1476
1477 let response = daemon
1478 .handle_command(DaemonCommand::Extract {
1479 file: temp.path().join("main.py"),
1480 session: None,
1481 })
1482 .await;
1483
1484 match response {
1485 DaemonResponse::Result(val) => {
1486 assert!(val.is_object(), "Extract should return a module info object");
1487 assert!(val.get("functions").is_some(), "Should have 'functions' field");
1489 }
1490 DaemonResponse::Error { error, .. } => {
1491 panic!("Extract returned error: {}", error);
1492 }
1493 other => panic!("Expected Result response, got {:?}", other),
1494 }
1495 }
1496
1497 #[tokio::test]
1498 async fn test_daemon_extract_nonexistent_file() {
1499 let temp = TempDir::new().unwrap();
1500 let config = DaemonConfig::default();
1501 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1502
1503 let response = daemon
1504 .handle_command(DaemonCommand::Extract {
1505 file: temp.path().join("nonexistent.py"),
1506 session: None,
1507 })
1508 .await;
1509
1510 match response {
1511 DaemonResponse::Error { error, .. } => {
1512 assert!(
1513 !error.is_empty(),
1514 "Should return an error for nonexistent file"
1515 );
1516 }
1517 _ => panic!("Expected Error response for nonexistent file"),
1518 }
1519 }
1520
1521 #[tokio::test]
1522 async fn test_daemon_tree_returns_result() {
1523 let temp = create_test_project();
1524 let config = DaemonConfig::default();
1525 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1526
1527 let response = daemon
1528 .handle_command(DaemonCommand::Tree { path: None })
1529 .await;
1530
1531 match response {
1532 DaemonResponse::Result(val) => {
1533 assert!(val.is_object(), "Tree should return a FileTree object");
1534 }
1535 DaemonResponse::Error { error, .. } => {
1536 panic!("Tree returned error: {}", error);
1537 }
1538 other => panic!("Expected Result response, got {:?}", other),
1539 }
1540 }
1541
1542 #[tokio::test]
1543 async fn test_daemon_structure_returns_result() {
1544 let temp = create_test_project();
1545 let config = DaemonConfig::default();
1546 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1547
1548 let response = daemon
1549 .handle_command(DaemonCommand::Structure {
1550 path: temp.path().to_path_buf(),
1551 lang: Some("python".to_string()),
1552 })
1553 .await;
1554
1555 match response {
1556 DaemonResponse::Result(val) => {
1557 assert!(val.is_object(), "Structure should return a CodeStructure object");
1558 }
1559 DaemonResponse::Error { error, .. } => {
1560 panic!("Structure returned error: {}", error);
1561 }
1562 other => panic!("Expected Result response, got {:?}", other),
1563 }
1564 }
1565
1566 #[tokio::test]
1567 async fn test_daemon_imports_returns_result() {
1568 let temp = create_test_project();
1569 let config = DaemonConfig::default();
1570 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1571
1572 let response = daemon
1573 .handle_command(DaemonCommand::Imports {
1574 file: temp.path().join("main.py"),
1575 })
1576 .await;
1577
1578 match response {
1579 DaemonResponse::Result(val) => {
1580 assert!(val.is_array(), "Imports should return an array");
1581 }
1582 DaemonResponse::Error { error, .. } => {
1583 panic!("Imports returned error: {}", error);
1584 }
1585 other => panic!("Expected Result response, got {:?}", other),
1586 }
1587 }
1588
1589 #[tokio::test]
1590 async fn test_daemon_cfg_returns_result() {
1591 let temp = create_test_project();
1592 let config = DaemonConfig::default();
1593 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1594
1595 let file = temp.path().join("main.py");
1596 let response = daemon
1597 .handle_command(DaemonCommand::Cfg {
1598 file,
1599 function: "hello".to_string(),
1600 })
1601 .await;
1602
1603 match response {
1604 DaemonResponse::Result(val) => {
1605 assert!(val.is_object(), "Cfg should return a CfgInfo object");
1606 assert!(val.get("function").is_some(), "Should have 'function' field");
1607 }
1608 DaemonResponse::Error { error, .. } => {
1609 panic!("Cfg returned error: {}", error);
1610 }
1611 other => panic!("Expected Result response, got {:?}", other),
1612 }
1613 }
1614
1615 #[tokio::test]
1616 async fn test_daemon_dfg_returns_result() {
1617 let temp = create_test_project();
1618 let config = DaemonConfig::default();
1619 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1620
1621 let file = temp.path().join("main.py");
1622 let response = daemon
1623 .handle_command(DaemonCommand::Dfg {
1624 file,
1625 function: "hello".to_string(),
1626 })
1627 .await;
1628
1629 match response {
1630 DaemonResponse::Result(val) => {
1631 assert!(val.is_object(), "Dfg should return a DfgInfo object");
1632 assert!(val.get("function").is_some(), "Should have 'function' field");
1633 }
1634 DaemonResponse::Error { error, .. } => {
1635 panic!("Dfg returned error: {}", error);
1636 }
1637 other => panic!("Expected Result response, got {:?}", other),
1638 }
1639 }
1640
1641 #[tokio::test]
1642 async fn test_daemon_calls_returns_result() {
1643 let temp = create_test_project();
1644 let config = DaemonConfig::default();
1645 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1646
1647 let response = daemon
1648 .handle_command(DaemonCommand::Calls { path: None })
1649 .await;
1650
1651 match response {
1652 DaemonResponse::Result(val) => {
1653 assert!(val.is_object(), "Calls should return a ProjectCallGraph object");
1654 }
1655 DaemonResponse::Error { error, .. } => {
1656 panic!("Calls returned error: {}", error);
1657 }
1658 other => panic!("Expected Result response, got {:?}", other),
1659 }
1660 }
1661
1662 #[tokio::test]
1663 async fn test_daemon_arch_returns_result() {
1664 let temp = create_test_project();
1665 let config = DaemonConfig::default();
1666 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1667
1668 let response = daemon
1669 .handle_command(DaemonCommand::Arch { path: None })
1670 .await;
1671
1672 match response {
1673 DaemonResponse::Result(val) => {
1674 assert!(val.is_object(), "Arch should return an ArchitectureReport object");
1675 }
1676 DaemonResponse::Error { error, .. } => {
1677 panic!("Arch returned error: {}", error);
1678 }
1679 other => panic!("Expected Result response, got {:?}", other),
1680 }
1681 }
1682
1683 #[tokio::test]
1684 async fn test_daemon_diagnostics_returns_error_with_guidance() {
1685 let temp = TempDir::new().unwrap();
1686 let config = DaemonConfig::default();
1687 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1688
1689 let path = temp.path().join("src");
1690 let response = daemon
1691 .handle_command(DaemonCommand::Diagnostics {
1692 path: path.clone(),
1693 project: None,
1694 })
1695 .await;
1696
1697 match response {
1698 DaemonResponse::Error { error, .. } => {
1699 assert!(
1700 error.contains("Diagnostics requires external tool orchestration"),
1701 "Error should explain that diagnostics needs CLI: {}",
1702 error
1703 );
1704 assert!(
1705 error.contains("tldr diagnostics"),
1706 "Error should suggest CLI usage"
1707 );
1708 }
1709 other => panic!("Expected Error response, got {:?}", other),
1710 }
1711 }
1712
1713 #[tokio::test]
1714 async fn test_daemon_importers_returns_result() {
1715 let temp = create_test_project();
1716 let config = DaemonConfig::default();
1717 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1718
1719 let response = daemon
1720 .handle_command(DaemonCommand::Importers {
1721 module: "os".to_string(),
1722 path: None,
1723 })
1724 .await;
1725
1726 match response {
1727 DaemonResponse::Result(val) => {
1728 assert!(val.is_object(), "Importers should return an ImportersReport object");
1729 }
1730 DaemonResponse::Error { error, .. } => {
1731 panic!("Importers returned error: {}", error);
1732 }
1733 other => panic!("Expected Result response, got {:?}", other),
1734 }
1735 }
1736
1737 #[tokio::test]
1738 async fn test_daemon_dead_returns_result() {
1739 let temp = create_test_project();
1740 let config = DaemonConfig::default();
1741 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1742
1743 let response = daemon
1744 .handle_command(DaemonCommand::Dead {
1745 path: None,
1746 entry: None,
1747 })
1748 .await;
1749
1750 match response {
1751 DaemonResponse::Result(val) => {
1752 assert!(val.is_object(), "Dead should return a DeadCodeReport object");
1753 }
1754 DaemonResponse::Error { error, .. } => {
1755 panic!("Dead returned error: {}", error);
1756 }
1757 other => panic!("Expected Result response, got {:?}", other),
1758 }
1759 }
1760
1761 #[tokio::test]
1762 async fn test_daemon_change_impact_returns_result() {
1763 let temp = create_test_project();
1764 let config = DaemonConfig::default();
1765 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1766
1767 let response = daemon
1768 .handle_command(DaemonCommand::ChangeImpact {
1769 files: Some(vec![temp.path().join("main.py")]),
1770 session: None,
1771 git: None,
1772 })
1773 .await;
1774
1775 match response {
1776 DaemonResponse::Result(val) => {
1777 assert!(
1778 val.is_object(),
1779 "ChangeImpact should return a ChangeImpactReport object"
1780 );
1781 }
1782 DaemonResponse::Error { error, .. } => {
1783 panic!("ChangeImpact returned error: {}", error);
1784 }
1785 other => panic!("Expected Result response, got {:?}", other),
1786 }
1787 }
1788
1789 #[tokio::test]
1790 async fn test_daemon_extract_cache_invalidation() {
1791 let temp = create_test_project();
1792 let config = DaemonConfig::default();
1793 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1794
1795 let file = temp.path().join("main.py");
1796
1797 let r1 = daemon
1799 .handle_command(DaemonCommand::Extract {
1800 file: file.clone(),
1801 session: None,
1802 })
1803 .await;
1804 assert!(matches!(r1, DaemonResponse::Result(_)));
1805
1806 daemon
1808 .handle_command(DaemonCommand::Notify { file: file.clone() })
1809 .await;
1810
1811 let _r2 = daemon
1813 .handle_command(DaemonCommand::Extract {
1814 file,
1815 session: None,
1816 })
1817 .await;
1818
1819 let stats = daemon.cache_stats();
1820 assert!(
1822 stats.invalidations >= 1,
1823 "File notify should have caused invalidation"
1824 );
1825 }
1826
1827 #[tokio::test]
1828 async fn test_daemon_slice_returns_result() {
1829 let temp = create_test_project();
1830 let config = DaemonConfig::default();
1831 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1832
1833 let file = temp.path().join("main.py");
1834 let response = daemon
1835 .handle_command(DaemonCommand::Slice {
1836 file,
1837 function: "hello".to_string(),
1838 line: 3,
1839 })
1840 .await;
1841
1842 match response {
1843 DaemonResponse::Result(val) => {
1844 assert!(val.is_array(), "Slice should return an array of line numbers");
1845 }
1846 DaemonResponse::Error { error, .. } => {
1847 panic!("Slice returned error: {}", error);
1848 }
1849 other => panic!("Expected Result response, got {:?}", other),
1850 }
1851 }
1852
1853 #[tokio::test]
1854 async fn test_daemon_context_returns_result_or_error() {
1855 let temp = create_test_project();
1856 let config = DaemonConfig::default();
1857 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1858
1859 let response = daemon
1860 .handle_command(DaemonCommand::Context {
1861 entry: "main".to_string(),
1862 depth: Some(1),
1863 })
1864 .await;
1865
1866 match response {
1869 DaemonResponse::Result(val) => {
1870 assert!(val.is_object(), "Context should return a RelevantContext object");
1871 }
1872 DaemonResponse::Error { .. } => {
1873 }
1875 other => panic!("Expected Result or Error response, got {:?}", other),
1876 }
1877 }
1878
1879 #[tokio::test]
1880 async fn test_daemon_impact_returns_result_or_error() {
1881 let temp = create_test_project();
1882 let config = DaemonConfig::default();
1883 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1884
1885 let response = daemon
1886 .handle_command(DaemonCommand::Impact {
1887 func: "hello".to_string(),
1888 depth: Some(2),
1889 })
1890 .await;
1891
1892 match response {
1894 DaemonResponse::Result(val) => {
1895 assert!(val.is_object(), "Impact should return an ImpactReport object");
1896 }
1897 DaemonResponse::Error { .. } => {
1898 }
1900 other => panic!("Expected Result or Error response, got {:?}", other),
1901 }
1902 }
1903
1904 #[cfg(feature = "semantic")]
1905 #[tokio::test]
1906 async fn test_semantic_search_builds_index() {
1907 let temp = tempfile::tempdir().unwrap();
1909 let py_file = temp.path().join("hello.py");
1910 std::fs::write(
1911 &py_file,
1912 "def greet(name):\n return f'Hello, {name}!'\n\ndef farewell(name):\n return f'Goodbye, {name}!'\n",
1913 )
1914 .unwrap();
1915
1916 let config = DaemonConfig::default();
1917 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1918
1919 let response = daemon
1920 .handle_command(DaemonCommand::Semantic {
1921 query: "greeting function".to_string(),
1922 top_k: 5,
1923 })
1924 .await;
1925
1926 match &response {
1928 DaemonResponse::Result(value) => {
1929 assert!(value.get("query").is_some());
1930 assert!(value.get("results").is_some());
1931 }
1932 DaemonResponse::Error { error, .. } => {
1933 assert!(
1936 !error.contains("not yet implemented"),
1937 "Semantic search should be wired, got: {}",
1938 error
1939 );
1940 }
1941 other => panic!("Unexpected response: {:?}", other),
1942 }
1943 }
1944
1945 #[cfg(feature = "semantic")]
1946 #[tokio::test]
1947 async fn test_semantic_index_invalidated_on_notify() {
1948 let temp = tempfile::tempdir().unwrap();
1949 let py_file = temp.path().join("example.py");
1950 std::fs::write(&py_file, "def compute(x):\n return x * 2\n").unwrap();
1951
1952 let config = DaemonConfig::default();
1953 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
1954
1955 let _ = daemon
1957 .handle_command(DaemonCommand::Semantic {
1958 query: "computation".to_string(),
1959 top_k: 5,
1960 })
1961 .await;
1962
1963 {
1965 let idx = daemon.semantic_index.read().await;
1966 let _ = idx.is_some();
1969 }
1970
1971 let _ = daemon
1973 .handle_command(DaemonCommand::Notify {
1974 file: py_file.clone(),
1975 })
1976 .await;
1977
1978 {
1980 let idx = daemon.semantic_index.read().await;
1981 assert!(
1982 idx.is_none(),
1983 "Semantic index should be invalidated after Notify"
1984 );
1985 }
1986 }
1987
1988 #[tokio::test]
1989 async fn test_daemon_warm_wires_caches() {
1990 let temp = tempfile::tempdir().unwrap();
1991 let py_file = temp.path().join("example.py");
1992 std::fs::write(
1993 &py_file,
1994 "def add(a, b):\n return a + b\n\ndef multiply(x, y):\n return x * y\n",
1995 )
1996 .unwrap();
1997
1998 let config = DaemonConfig::default();
1999 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
2000
2001 let response = daemon
2002 .handle_command(DaemonCommand::Warm { language: None })
2003 .await;
2004
2005 match &response {
2006 DaemonResponse::Status { status, message } => {
2007 assert_eq!(status, "ok");
2008 let msg = message.as_deref().unwrap_or("");
2009 assert!(
2011 msg.contains("Warmed"),
2012 "Expected warm details, got: {}",
2013 msg
2014 );
2015 }
2016 other => panic!("Expected Status response, got {:?}", other),
2017 }
2018 }
2019
2020 #[tokio::test]
2021 async fn test_daemon_warm_with_language() {
2022 let temp = tempfile::tempdir().unwrap();
2023 let rs_file = temp.path().join("lib.rs");
2024 std::fs::write(
2025 &rs_file,
2026 "pub fn hello() -> String {\n \"hello\".to_string()\n}\n",
2027 )
2028 .unwrap();
2029
2030 let config = DaemonConfig::default();
2031 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
2032
2033 let response = daemon
2034 .handle_command(DaemonCommand::Warm {
2035 language: Some("rust".to_string()),
2036 })
2037 .await;
2038
2039 match &response {
2040 DaemonResponse::Status { status, .. } => {
2041 assert_eq!(status, "ok");
2042 }
2043 other => panic!("Expected Status response, got {:?}", other),
2044 }
2045 }
2046
2047 #[tokio::test]
2048 async fn test_daemon_last_activity_updated_on_command() {
2049 let temp = tempfile::tempdir().unwrap();
2050 let config = DaemonConfig::default();
2051 let daemon = TLDRDaemon::new(temp.path().to_path_buf(), config);
2052
2053 let before = *daemon.last_activity.read().await;
2055
2056 tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
2058
2059 let _ = daemon
2062 .handle_command(DaemonCommand::Ping)
2063 .await;
2064
2065 let after = *daemon.last_activity.read().await;
2068 assert_eq!(before, after);
2069 }
2070
2071 #[tokio::test]
2072 async fn test_daemon_created_with_nonexistent_project() {
2073 let fake_path = PathBuf::from("/tmp/nonexistent-project-dir-12345");
2076 let config = DaemonConfig::default();
2077 let daemon = TLDRDaemon::new(fake_path.clone(), config);
2078
2079 assert_eq!(daemon.project(), &fake_path);
2080 assert!(!fake_path.exists());
2083 }
2084}