1use async_trait::async_trait;
5use chrono::{DateTime, Utc};
6use rustant_core::error::ToolError;
7use rustant_core::types::{RiskLevel, ToolOutput};
8use serde::{Deserialize, Serialize};
9use serde_json::{Value, json};
10use std::collections::HashMap;
11use std::path::PathBuf;
12use std::time::Duration;
13
14use crate::registry::Tool;
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
21struct LanguageStats {
22 language: String,
23 files: usize,
24 lines: usize,
25 extensions: Vec<String>,
26}
27
28#[derive(Debug, Clone, Serialize, Deserialize)]
29struct DirectoryInfo {
30 path: String,
31 classification: String,
32 file_count: usize,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
36struct ArchitectureSnapshot {
37 project_root: String,
38 languages: Vec<LanguageStats>,
39 directories: Vec<DirectoryInfo>,
40 entry_points: Vec<String>,
41 config_files: Vec<String>,
42 total_files: usize,
43 total_lines: usize,
44 analyzed_at: DateTime<Utc>,
45}
46
47#[derive(Debug, Clone, Serialize, Deserialize)]
48struct PatternMatch {
49 pattern_name: String,
50 file_path: String,
51 line_number: usize,
52 snippet: String,
53 confidence: f64,
54}
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
57struct TechDebtItem {
58 file_path: String,
59 line_number: usize,
60 category: String,
61 description: String,
62 severity: String,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
66struct ApiEntry {
67 name: String,
68 kind: String,
69 file_path: String,
70 line_number: usize,
71 signature: String,
72 visibility: String,
73}
74
75#[derive(Debug, Clone, Serialize, Deserialize)]
76struct DependencyEntry {
77 name: String,
78 version: String,
79 dep_type: String,
80 source_file: String,
81}
82
83#[derive(Debug, Default, Serialize, Deserialize)]
84struct CodeIntelCache {
85 last_snapshot: Option<ArchitectureSnapshot>,
86}
87
88pub struct CodeIntelligenceTool {
93 workspace: PathBuf,
94}
95
96impl CodeIntelligenceTool {
97 pub fn new(workspace: PathBuf) -> Self {
98 Self { workspace }
99 }
100
101 fn state_path(&self) -> PathBuf {
102 self.workspace
103 .join(".rustant")
104 .join("code_intel")
105 .join("cache.json")
106 }
107
108 fn load_cache(&self) -> CodeIntelCache {
109 let path = self.state_path();
110 if path.exists() {
111 std::fs::read_to_string(&path)
112 .ok()
113 .and_then(|s| serde_json::from_str(&s).ok())
114 .unwrap_or_default()
115 } else {
116 CodeIntelCache::default()
117 }
118 }
119
120 fn save_cache(&self, cache: &CodeIntelCache) -> Result<(), ToolError> {
121 let path = self.state_path();
122 if let Some(parent) = path.parent() {
123 std::fs::create_dir_all(parent).map_err(|e| ToolError::ExecutionFailed {
124 name: "code_intelligence".to_string(),
125 message: format!("Failed to create cache dir: {}", e),
126 })?;
127 }
128 let json = serde_json::to_string_pretty(cache).map_err(|e| ToolError::ExecutionFailed {
129 name: "code_intelligence".to_string(),
130 message: format!("Failed to serialize cache: {}", e),
131 })?;
132 let tmp = path.with_extension("json.tmp");
133 std::fs::write(&tmp, &json).map_err(|e| ToolError::ExecutionFailed {
134 name: "code_intelligence".to_string(),
135 message: format!("Failed to write cache: {}", e),
136 })?;
137 std::fs::rename(&tmp, &path).map_err(|e| ToolError::ExecutionFailed {
138 name: "code_intelligence".to_string(),
139 message: format!("Failed to rename cache file: {}", e),
140 })?;
141 Ok(())
142 }
143
144 fn resolve_path(&self, args: &Value) -> PathBuf {
150 args.get("path")
151 .and_then(|v| v.as_str())
152 .map(|p| {
153 let pb = PathBuf::from(p);
154 if pb.is_absolute() {
155 pb
156 } else {
157 self.workspace.join(pb)
158 }
159 })
160 .unwrap_or_else(|| self.workspace.clone())
161 }
162
163 fn ext_to_language(ext: &str) -> Option<&'static str> {
165 match ext {
166 "rs" => Some("Rust"),
167 "py" | "pyi" => Some("Python"),
168 "js" | "mjs" | "cjs" => Some("JavaScript"),
169 "ts" | "mts" | "cts" => Some("TypeScript"),
170 "jsx" => Some("JavaScript (JSX)"),
171 "tsx" => Some("TypeScript (TSX)"),
172 "go" => Some("Go"),
173 "java" => Some("Java"),
174 "rb" => Some("Ruby"),
175 "c" | "h" => Some("C"),
176 "cpp" | "cc" | "cxx" | "hpp" | "hxx" => Some("C++"),
177 "cs" => Some("C#"),
178 "swift" => Some("Swift"),
179 "kt" | "kts" => Some("Kotlin"),
180 "sh" | "bash" | "zsh" => Some("Shell"),
181 "html" | "htm" => Some("HTML"),
182 "css" | "scss" | "sass" => Some("CSS"),
183 "json" => Some("JSON"),
184 "toml" => Some("TOML"),
185 "yaml" | "yml" => Some("YAML"),
186 "xml" => Some("XML"),
187 "md" | "markdown" => Some("Markdown"),
188 "sql" => Some("SQL"),
189 _ => None,
190 }
191 }
192
193 fn is_binary_ext(ext: &str) -> bool {
195 matches!(
196 ext,
197 "png"
198 | "jpg"
199 | "jpeg"
200 | "gif"
201 | "bmp"
202 | "ico"
203 | "svg"
204 | "woff"
205 | "woff2"
206 | "ttf"
207 | "otf"
208 | "eot"
209 | "pdf"
210 | "zip"
211 | "tar"
212 | "gz"
213 | "bz2"
214 | "xz"
215 | "7z"
216 | "rar"
217 | "exe"
218 | "dll"
219 | "so"
220 | "dylib"
221 | "o"
222 | "a"
223 | "class"
224 | "jar"
225 | "war"
226 | "pyc"
227 | "pyo"
228 | "wasm"
229 | "db"
230 | "sqlite"
231 | "lock"
232 )
233 }
234
235 fn classify_dir(name: &str) -> &'static str {
237 match name {
238 "src" | "lib" | "app" | "pkg" | "internal" | "cmd" => "source",
239 "test" | "tests" | "spec" | "specs" | "__tests__" | "test_data" | "testdata" => "test",
240 "doc" | "docs" | "documentation" => "docs",
241 "build" | "target" | "dist" | "out" | "output" | "bin" | "obj" => "build",
242 "vendor" | "node_modules" | "third_party" | "external" | "deps" => "vendor",
243 "config" | "configs" | "conf" | "etc" | "settings" | ".github" | ".vscode" => "config",
244 _ => "source",
245 }
246 }
247
248 fn is_entry_point(name: &str) -> bool {
250 matches!(
251 name,
252 "main.rs"
253 | "main.py"
254 | "__main__.py"
255 | "index.js"
256 | "index.ts"
257 | "index.tsx"
258 | "index.jsx"
259 | "main.go"
260 | "Main.java"
261 | "Program.cs"
262 | "main.c"
263 | "main.cpp"
264 | "main.rb"
265 | "app.py"
266 | "app.js"
267 | "app.ts"
268 | "server.js"
269 | "server.ts"
270 | "manage.py"
271 )
272 }
273
274 fn is_config_file(name: &str) -> bool {
276 matches!(
277 name,
278 "Cargo.toml"
279 | "package.json"
280 | "tsconfig.json"
281 | "pyproject.toml"
282 | "setup.py"
283 | "setup.cfg"
284 | "requirements.txt"
285 | "go.mod"
286 | "go.sum"
287 | "Gemfile"
288 | "Makefile"
289 | "CMakeLists.txt"
290 | "Dockerfile"
291 | "docker-compose.yml"
292 | "docker-compose.yaml"
293 | ".gitignore"
294 | ".editorconfig"
295 | "jest.config.js"
296 | "jest.config.ts"
297 | "webpack.config.js"
298 | "vite.config.ts"
299 | "vite.config.js"
300 | "babel.config.js"
301 | ".eslintrc.json"
302 | ".eslintrc.js"
303 | ".prettierrc"
304 | "tox.ini"
305 | "Pipfile"
306 | "poetry.lock"
307 | ".env.example"
308 )
309 }
310
311 fn count_lines(content: &str) -> usize {
313 if content.is_empty() {
314 0
315 } else {
316 content.lines().count()
317 }
318 }
319
320 fn analyze_architecture(&self, args: &Value) -> Result<ToolOutput, ToolError> {
325 let force = args.get("force").and_then(|v| v.as_bool()).unwrap_or(false);
326
327 if !force {
329 let cache = self.load_cache();
330 if let Some(snapshot) = &cache.last_snapshot {
331 let out = serde_json::to_string_pretty(snapshot).unwrap_or_default();
332 return Ok(ToolOutput::text(format!(
333 "Architecture snapshot (cached):\n{}",
334 out
335 )));
336 }
337 }
338
339 let root = self.resolve_path(args);
340 if !root.exists() {
341 return Err(ToolError::ExecutionFailed {
342 name: "code_intelligence".to_string(),
343 message: format!("Path does not exist: {}", root.display()),
344 });
345 }
346
347 let mut lang_map: HashMap<String, (usize, usize, std::collections::HashSet<String>)> =
349 HashMap::new();
350 let mut dir_map: HashMap<String, (String, usize)> = HashMap::new();
352 let mut entry_points: Vec<String> = Vec::new();
353 let mut config_files: Vec<String> = Vec::new();
354 let mut total_files: usize = 0;
355 let mut total_lines: usize = 0;
356 let max_files: usize = 5000;
357
358 let walker = ignore::WalkBuilder::new(&root)
359 .hidden(false)
360 .git_ignore(true)
361 .build();
362
363 for entry in walker {
364 let entry = match entry {
365 Ok(e) => e,
366 Err(_) => continue,
367 };
368
369 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
370 continue;
371 }
372
373 if total_files >= max_files {
374 break;
375 }
376
377 let path = entry.path();
378 let rel = path
379 .strip_prefix(&root)
380 .unwrap_or(path)
381 .to_string_lossy()
382 .to_string();
383
384 let file_name = path
385 .file_name()
386 .map(|n| n.to_string_lossy().to_string())
387 .unwrap_or_default();
388
389 let ext = path
390 .extension()
391 .map(|e| e.to_string_lossy().to_string())
392 .unwrap_or_default();
393
394 if Self::is_binary_ext(&ext) {
396 continue;
397 }
398
399 total_files += 1;
400
401 if Self::is_entry_point(&file_name) {
403 entry_points.push(rel.clone());
404 }
405
406 if Self::is_config_file(&file_name) {
408 config_files.push(rel.clone());
409 }
410
411 let lines = std::fs::read_to_string(path)
413 .map(|c| Self::count_lines(&c))
414 .unwrap_or(0);
415 total_lines += lines;
416
417 if let Some(lang) = Self::ext_to_language(&ext) {
419 let entry = lang_map
420 .entry(lang.to_string())
421 .or_insert_with(|| (0, 0, std::collections::HashSet::new()));
422 entry.0 += 1;
423 entry.1 += lines;
424 entry.2.insert(ext.clone());
425 }
426
427 if let Some(parent) = path.parent() {
429 let dir_rel = parent
430 .strip_prefix(&root)
431 .unwrap_or(parent)
432 .to_string_lossy()
433 .to_string();
434 let dir_name = parent
435 .file_name()
436 .map(|n| n.to_string_lossy().to_string())
437 .unwrap_or_else(|| dir_rel.clone());
438 let classification = Self::classify_dir(&dir_name).to_string();
439 let dir_entry = dir_map
440 .entry(dir_rel)
441 .or_insert_with(|| (classification, 0));
442 dir_entry.1 += 1;
443 }
444 }
445
446 let mut languages: Vec<LanguageStats> = lang_map
448 .into_iter()
449 .map(|(lang, (files, lines, exts))| {
450 let mut ext_vec: Vec<String> = exts.into_iter().collect();
451 ext_vec.sort();
452 LanguageStats {
453 language: lang,
454 files,
455 lines,
456 extensions: ext_vec,
457 }
458 })
459 .collect();
460 languages.sort_by(|a, b| b.files.cmp(&a.files));
461
462 let mut directories: Vec<DirectoryInfo> = dir_map
464 .into_iter()
465 .map(|(path, (classification, file_count))| DirectoryInfo {
466 path,
467 classification,
468 file_count,
469 })
470 .collect();
471 directories.sort_by(|a, b| b.file_count.cmp(&a.file_count));
472
473 entry_points.sort();
474 config_files.sort();
475
476 let snapshot = ArchitectureSnapshot {
477 project_root: root.to_string_lossy().to_string(),
478 languages,
479 directories,
480 entry_points,
481 config_files,
482 total_files,
483 total_lines,
484 analyzed_at: Utc::now(),
485 };
486
487 let cache = CodeIntelCache {
489 last_snapshot: Some(snapshot.clone()),
490 };
491 self.save_cache(&cache)?;
492
493 let out = serde_json::to_string_pretty(&snapshot).unwrap_or_default();
494 Ok(ToolOutput::text(format!("Architecture snapshot:\n{}", out)))
495 }
496
497 fn detect_patterns(&self, args: &Value) -> Result<ToolOutput, ToolError> {
498 let root = self.resolve_path(args);
499 if !root.exists() {
500 return Err(ToolError::ExecutionFailed {
501 name: "code_intelligence".to_string(),
502 message: format!("Path does not exist: {}", root.display()),
503 });
504 }
505
506 let pattern_filter = args.get("pattern").and_then(|v| v.as_str());
507 let mut matches: Vec<PatternMatch> = Vec::new();
508 let max_files: usize = 1000;
509 let mut file_count: usize = 0;
510
511 let walker = ignore::WalkBuilder::new(&root)
512 .hidden(false)
513 .git_ignore(true)
514 .build();
515
516 for entry in walker {
517 let entry = match entry {
518 Ok(e) => e,
519 Err(_) => continue,
520 };
521
522 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
523 continue;
524 }
525
526 if file_count >= max_files {
527 break;
528 }
529
530 let path = entry.path();
531 let ext = path
532 .extension()
533 .map(|e| e.to_string_lossy().to_string())
534 .unwrap_or_default();
535
536 if Self::is_binary_ext(&ext) {
537 continue;
538 }
539
540 if Self::ext_to_language(&ext).is_none() {
542 continue;
543 }
544
545 let content = match std::fs::read_to_string(path) {
546 Ok(c) => c,
547 Err(_) => continue,
548 };
549
550 file_count += 1;
551
552 let rel = path
553 .strip_prefix(&root)
554 .unwrap_or(path)
555 .to_string_lossy()
556 .to_string();
557
558 for (line_num, line) in content.lines().enumerate() {
559 let trimmed = line.trim();
560
561 if pattern_filter.is_none()
563 || pattern_filter == Some("todo")
564 || pattern_filter == Some("fixme")
565 || pattern_filter == Some("hack")
566 {
567 if trimmed.contains("TODO") || trimmed.contains("FIXME") {
568 let pname = if trimmed.contains("TODO") {
569 "TODO"
570 } else {
571 "FIXME"
572 };
573 matches.push(PatternMatch {
574 pattern_name: pname.to_string(),
575 file_path: rel.clone(),
576 line_number: line_num + 1,
577 snippet: trimmed.to_string(),
578 confidence: 1.0,
579 });
580 }
581 if trimmed.contains("HACK") {
582 matches.push(PatternMatch {
583 pattern_name: "HACK".to_string(),
584 file_path: rel.clone(),
585 line_number: line_num + 1,
586 snippet: trimmed.to_string(),
587 confidence: 1.0,
588 });
589 }
590 }
591
592 if let Some(filter) = pattern_filter {
594 match filter {
595 "singleton" => {
596 if (trimmed.contains("static")
597 && (trimmed.contains("instance") || trimmed.contains("INSTANCE")))
598 || trimmed.contains("get_instance")
599 || trimmed.contains("getInstance")
600 {
601 matches.push(PatternMatch {
602 pattern_name: "Singleton".to_string(),
603 file_path: rel.clone(),
604 line_number: line_num + 1,
605 snippet: trimmed.to_string(),
606 confidence: 0.8,
607 });
608 }
609 }
610 "factory" => {
611 if trimmed.contains("fn create_")
612 || trimmed.contains("fn new_")
613 || trimmed.contains("def create_")
614 || trimmed.contains("function create")
615 || trimmed.contains("Factory")
616 {
617 matches.push(PatternMatch {
618 pattern_name: "Factory".to_string(),
619 file_path: rel.clone(),
620 line_number: line_num + 1,
621 snippet: trimmed.to_string(),
622 confidence: 0.7,
623 });
624 }
625 }
626 "builder" => {
627 if trimmed.contains("fn builder(")
628 || trimmed.contains(".builder()")
629 || trimmed.contains(".build()")
630 || trimmed.contains("Builder")
631 {
632 matches.push(PatternMatch {
633 pattern_name: "Builder".to_string(),
634 file_path: rel.clone(),
635 line_number: line_num + 1,
636 snippet: trimmed.to_string(),
637 confidence: 0.7,
638 });
639 }
640 }
641 "observer" => {
642 if trimmed.contains("on_event")
643 || trimmed.contains("addEventListener")
644 || trimmed.contains("subscribe")
645 || trimmed.contains("notify")
646 || trimmed.contains("Observer")
647 {
648 matches.push(PatternMatch {
649 pattern_name: "Observer".to_string(),
650 file_path: rel.clone(),
651 line_number: line_num + 1,
652 snippet: trimmed.to_string(),
653 confidence: 0.6,
654 });
655 }
656 }
657 "repository" => {
658 if trimmed.contains("find_by")
659 || trimmed.contains("findBy")
660 || trimmed.contains("get_all")
661 || trimmed.contains("getAll")
662 || trimmed.contains("Repository")
663 {
664 matches.push(PatternMatch {
665 pattern_name: "Repository".to_string(),
666 file_path: rel.clone(),
667 line_number: line_num + 1,
668 snippet: trimmed.to_string(),
669 confidence: 0.7,
670 });
671 }
672 }
673 _ => {}
674 }
675 }
676 }
677 }
678
679 let out = serde_json::to_string_pretty(&matches).unwrap_or_default();
680 Ok(ToolOutput::text(format!(
681 "Detected {} pattern matches:\n{}",
682 matches.len(),
683 out
684 )))
685 }
686
687 fn translate_snippet(&self, args: &Value) -> Result<ToolOutput, ToolError> {
688 let code = args.get("code").and_then(|v| v.as_str()).ok_or_else(|| {
689 ToolError::InvalidArguments {
690 name: "code_intelligence".to_string(),
691 reason: "Missing required parameter 'code'".to_string(),
692 }
693 })?;
694 let from_lang = args
695 .get("from_language")
696 .and_then(|v| v.as_str())
697 .ok_or_else(|| ToolError::InvalidArguments {
698 name: "code_intelligence".to_string(),
699 reason: "Missing required parameter 'from_language'".to_string(),
700 })?;
701 let to_lang = args
702 .get("to_language")
703 .and_then(|v| v.as_str())
704 .ok_or_else(|| ToolError::InvalidArguments {
705 name: "code_intelligence".to_string(),
706 reason: "Missing required parameter 'to_language'".to_string(),
707 })?;
708
709 let semantics_from = Self::language_semantics_notes(from_lang);
710 let semantics_to = Self::language_semantics_notes(to_lang);
711
712 let prompt = format!(
713 "Translate the following {from_lang} code to {to_lang}.\n\n\
714 ## Source Code ({from_lang})\n```{from_ext}\n{code}\n```\n\n\
715 ## {from_lang} Semantics\n{semantics_from}\n\n\
716 ## {to_lang} Semantics\n{semantics_to}\n\n\
717 ## Instructions\n\
718 - Produce idiomatic {to_lang} code.\n\
719 - Preserve the original logic and behavior.\n\
720 - Use {to_lang} conventions for naming, error handling, and structure.\n\
721 - Add brief comments where the translation involves non-obvious choices.",
722 from_lang = from_lang,
723 to_lang = to_lang,
724 from_ext = from_lang.to_lowercase(),
725 code = code,
726 semantics_from = semantics_from,
727 semantics_to = semantics_to,
728 );
729
730 Ok(ToolOutput::text(prompt))
731 }
732
733 fn language_semantics_notes(lang: &str) -> &'static str {
734 match lang.to_lowercase().as_str() {
735 "rust" => {
736 "Ownership & borrowing, no GC, Result/Option for errors, pattern matching, traits for polymorphism, lifetimes."
737 }
738 "python" => {
739 "Dynamic typing, GC, exceptions for errors, duck typing, indentation-based blocks, list comprehensions."
740 }
741 "javascript" | "js" => {
742 "Dynamic typing, prototype-based OOP, async/await with Promises, closures, event loop concurrency."
743 }
744 "typescript" | "ts" => {
745 "Structural typing over JavaScript, interfaces, generics, union/intersection types, async/await."
746 }
747 "go" => {
748 "Static typing, GC, error values (not exceptions), goroutines/channels for concurrency, interfaces (implicit), no generics (pre-1.18)."
749 }
750 "java" => {
751 "Static typing, GC, checked exceptions, class-based OOP, interfaces, generics with type erasure."
752 }
753 "c" => "Manual memory management, pointers, no OOP, preprocessor macros, header files.",
754 "c++" | "cpp" => {
755 "Manual memory + RAII/smart pointers, templates, OOP with multiple inheritance, operator overloading."
756 }
757 "ruby" => {
758 "Dynamic typing, GC, everything is an object, blocks/procs/lambdas, mixins via modules."
759 }
760 _ => "General-purpose programming language.",
761 }
762 }
763
764 fn compare_implementations(&self, args: &Value) -> Result<ToolOutput, ToolError> {
765 let file_a = args.get("file_a").and_then(|v| v.as_str()).ok_or_else(|| {
766 ToolError::InvalidArguments {
767 name: "code_intelligence".to_string(),
768 reason: "Missing required parameter 'file_a'".to_string(),
769 }
770 })?;
771 let file_b = args.get("file_b").and_then(|v| v.as_str()).ok_or_else(|| {
772 ToolError::InvalidArguments {
773 name: "code_intelligence".to_string(),
774 reason: "Missing required parameter 'file_b'".to_string(),
775 }
776 })?;
777
778 let path_a = {
779 let p = PathBuf::from(file_a);
780 if p.is_absolute() {
781 p
782 } else {
783 self.workspace.join(p)
784 }
785 };
786 let path_b = {
787 let p = PathBuf::from(file_b);
788 if p.is_absolute() {
789 p
790 } else {
791 self.workspace.join(p)
792 }
793 };
794
795 let content_a =
796 std::fs::read_to_string(&path_a).map_err(|e| ToolError::ExecutionFailed {
797 name: "code_intelligence".to_string(),
798 message: format!("Failed to read file_a '{}': {}", path_a.display(), e),
799 })?;
800 let content_b =
801 std::fs::read_to_string(&path_b).map_err(|e| ToolError::ExecutionFailed {
802 name: "code_intelligence".to_string(),
803 message: format!("Failed to read file_b '{}': {}", path_b.display(), e),
804 })?;
805
806 let lang_a = path_a
807 .extension()
808 .and_then(|e| Self::ext_to_language(&e.to_string_lossy()))
809 .unwrap_or("Unknown");
810 let lang_b = path_b
811 .extension()
812 .and_then(|e| Self::ext_to_language(&e.to_string_lossy()))
813 .unwrap_or("Unknown");
814
815 let lines_a = Self::count_lines(&content_a);
816 let lines_b = Self::count_lines(&content_b);
817
818 let fn_count_a = Self::count_functions(&content_a, lang_a);
820 let fn_count_b = Self::count_functions(&content_b, lang_b);
821
822 let output = format!(
823 "## Implementation Comparison\n\n\
824 | Metric | File A | File B |\n\
825 |--------|--------|--------|\n\
826 | Path | {file_a} | {file_b} |\n\
827 | Language | {lang_a} | {lang_b} |\n\
828 | Lines | {lines_a} | {lines_b} |\n\
829 | Functions | {fn_count_a} | {fn_count_b} |\n\n\
830 ### File A: {file_a}\n```{ext_a}\n{preview_a}\n```\n\n\
831 ### File B: {file_b}\n```{ext_b}\n{preview_b}\n```",
832 file_a = file_a,
833 file_b = file_b,
834 lang_a = lang_a,
835 lang_b = lang_b,
836 lines_a = lines_a,
837 lines_b = lines_b,
838 fn_count_a = fn_count_a,
839 fn_count_b = fn_count_b,
840 ext_a = lang_a.to_lowercase(),
841 ext_b = lang_b.to_lowercase(),
842 preview_a = Self::preview_content(&content_a, 50),
843 preview_b = Self::preview_content(&content_b, 50),
844 );
845
846 Ok(ToolOutput::text(output))
847 }
848
849 fn count_functions(content: &str, language: &str) -> usize {
851 let mut count = 0;
852 for line in content.lines() {
853 let trimmed = line.trim();
854 match language {
855 "Rust" => {
856 if (trimmed.starts_with("fn ")
857 || trimmed.starts_with("pub fn ")
858 || trimmed.starts_with("pub(crate) fn ")
859 || trimmed.starts_with("async fn ")
860 || trimmed.starts_with("pub async fn "))
861 && trimmed.contains('(')
862 {
863 count += 1;
864 }
865 }
866 "Python" => {
867 if trimmed.starts_with("def ") && trimmed.contains('(') {
868 count += 1;
869 }
870 }
871 "JavaScript" | "JavaScript (JSX)" | "TypeScript" | "TypeScript (TSX)" => {
872 if (trimmed.starts_with("function ")
873 || trimmed.starts_with("async function ")
874 || trimmed.starts_with("export function ")
875 || trimmed.starts_with("export async function "))
876 && trimmed.contains('(')
877 {
878 count += 1;
879 }
880 }
881 "Go" => {
882 if trimmed.starts_with("func ") && trimmed.contains('(') {
883 count += 1;
884 }
885 }
886 "Java" | "C#" => {
887 if (trimmed.contains("public ")
888 || trimmed.contains("private ")
889 || trimmed.contains("protected "))
890 && trimmed.contains('(')
891 && !trimmed.contains("class ")
892 && !trimmed.contains("interface ")
893 {
894 count += 1;
895 }
896 }
897 "Ruby" => {
898 if trimmed.starts_with("def ") {
899 count += 1;
900 }
901 }
902 "C" | "C++" => {
903 if trimmed.contains('(')
904 && trimmed.contains(')')
905 && (trimmed.ends_with('{') || trimmed.ends_with(") {"))
906 && !trimmed.starts_with("if ")
907 && !trimmed.starts_with("for ")
908 && !trimmed.starts_with("while ")
909 && !trimmed.starts_with("switch ")
910 && !trimmed.starts_with("//")
911 && !trimmed.starts_with('#')
912 {
913 count += 1;
914 }
915 }
916 _ => {}
917 }
918 }
919 count
920 }
921
922 fn preview_content(content: &str, max_lines: usize) -> String {
924 let lines: Vec<&str> = content.lines().take(max_lines).collect();
925 let preview = lines.join("\n");
926 let total = content.lines().count();
927 if total > max_lines {
928 format!("{}\n\n... ({} more lines)", preview, total - max_lines)
929 } else {
930 preview
931 }
932 }
933
934 fn tech_debt_report(&self, args: &Value) -> Result<ToolOutput, ToolError> {
935 let root = self.resolve_path(args);
936 if !root.exists() {
937 return Err(ToolError::ExecutionFailed {
938 name: "code_intelligence".to_string(),
939 message: format!("Path does not exist: {}", root.display()),
940 });
941 }
942
943 let severity_filter = args.get("severity").and_then(|v| v.as_str());
944
945 let mut items: Vec<TechDebtItem> = Vec::new();
946 let max_files: usize = 1000;
947 let mut file_count: usize = 0;
948
949 let walker = ignore::WalkBuilder::new(&root)
950 .hidden(false)
951 .git_ignore(true)
952 .build();
953
954 for entry in walker {
955 let entry = match entry {
956 Ok(e) => e,
957 Err(_) => continue,
958 };
959
960 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
961 continue;
962 }
963
964 if file_count >= max_files {
965 break;
966 }
967
968 let path = entry.path();
969 let ext = path
970 .extension()
971 .map(|e| e.to_string_lossy().to_string())
972 .unwrap_or_default();
973
974 if Self::is_binary_ext(&ext) || Self::ext_to_language(&ext).is_none() {
975 continue;
976 }
977
978 let content = match std::fs::read_to_string(path) {
979 Ok(c) => c,
980 Err(_) => continue,
981 };
982
983 file_count += 1;
984
985 let rel = path
986 .strip_prefix(&root)
987 .unwrap_or(path)
988 .to_string_lossy()
989 .to_string();
990
991 let mut fn_start_line: Option<usize> = 0_usize.into();
993 let mut fn_name = String::new();
994 let mut brace_depth: i32 = 0;
995 let mut in_function = false;
996
997 for (line_num, line) in content.lines().enumerate() {
998 let trimmed = line.trim();
999
1000 if trimmed.contains("TODO") {
1002 let item = TechDebtItem {
1003 file_path: rel.clone(),
1004 line_number: line_num + 1,
1005 category: "todo".to_string(),
1006 description: trimmed.to_string(),
1007 severity: "medium".to_string(),
1008 };
1009 if severity_filter.is_none() || severity_filter == Some("medium") {
1010 items.push(item);
1011 }
1012 }
1013 if trimmed.contains("FIXME") {
1014 let item = TechDebtItem {
1015 file_path: rel.clone(),
1016 line_number: line_num + 1,
1017 category: "fixme".to_string(),
1018 description: trimmed.to_string(),
1019 severity: "medium".to_string(),
1020 };
1021 if severity_filter.is_none() || severity_filter == Some("medium") {
1022 items.push(item);
1023 }
1024 }
1025 if trimmed.contains("HACK") {
1026 let item = TechDebtItem {
1027 file_path: rel.clone(),
1028 line_number: line_num + 1,
1029 category: "hack".to_string(),
1030 description: trimmed.to_string(),
1031 severity: "medium".to_string(),
1032 };
1033 if severity_filter.is_none() || severity_filter == Some("medium") {
1034 items.push(item);
1035 }
1036 }
1037
1038 let indent_level = Self::measure_indent(line);
1040 if indent_level > 4 {
1041 let item = TechDebtItem {
1042 file_path: rel.clone(),
1043 line_number: line_num + 1,
1044 category: "deep_nesting".to_string(),
1045 description: format!(
1046 "Deeply nested code ({} levels): {}",
1047 indent_level,
1048 Self::truncate_str(trimmed, 80)
1049 ),
1050 severity: "medium".to_string(),
1051 };
1052 if severity_filter.is_none() || severity_filter == Some("medium") {
1053 items.push(item);
1054 }
1055 }
1056
1057 let is_fn_start = trimmed.starts_with("fn ")
1059 || trimmed.starts_with("pub fn ")
1060 || trimmed.starts_with("pub(crate) fn ")
1061 || trimmed.starts_with("async fn ")
1062 || trimmed.starts_with("pub async fn ")
1063 || trimmed.starts_with("def ")
1064 || trimmed.starts_with("function ")
1065 || trimmed.starts_with("async function ")
1066 || trimmed.starts_with("export function ")
1067 || trimmed.starts_with("export async function ")
1068 || trimmed.starts_with("func ");
1069
1070 if is_fn_start && trimmed.contains('(') {
1071 if in_function && let Some(start) = fn_start_line {
1073 let length = line_num - start;
1074 if length > 100 {
1075 let item = TechDebtItem {
1076 file_path: rel.clone(),
1077 line_number: start + 1,
1078 category: "long_function".to_string(),
1079 description: format!(
1080 "Function '{}' is {} lines long (>100)",
1081 fn_name, length
1082 ),
1083 severity: "high".to_string(),
1084 };
1085 if severity_filter.is_none() || severity_filter == Some("high") {
1086 items.push(item);
1087 }
1088 }
1089 }
1090
1091 fn_start_line = Some(line_num);
1092 fn_name = Self::extract_fn_name(trimmed);
1093 brace_depth = 0;
1094 in_function = true;
1095 }
1096
1097 if in_function {
1098 for ch in trimmed.chars() {
1099 if ch == '{' {
1100 brace_depth += 1;
1101 } else if ch == '}' {
1102 brace_depth -= 1;
1103 }
1104 }
1105
1106 if brace_depth <= 0
1107 && fn_start_line.is_some()
1108 && line_num > fn_start_line.unwrap_or(0)
1109 {
1110 if let Some(start) = fn_start_line {
1111 let length = line_num - start + 1;
1112 if length > 100 {
1113 let item = TechDebtItem {
1114 file_path: rel.clone(),
1115 line_number: start + 1,
1116 category: "long_function".to_string(),
1117 description: format!(
1118 "Function '{}' is {} lines long (>100)",
1119 fn_name, length
1120 ),
1121 severity: "high".to_string(),
1122 };
1123 if severity_filter.is_none() || severity_filter == Some("high") {
1124 items.push(item);
1125 }
1126 }
1127 }
1128 in_function = false;
1129 fn_start_line = None;
1130 }
1131 }
1132 }
1133 }
1134
1135 let mut by_category: HashMap<String, usize> = HashMap::new();
1137 for item in &items {
1138 *by_category.entry(item.category.clone()).or_insert(0) += 1;
1139 }
1140
1141 let summary = by_category
1142 .iter()
1143 .map(|(k, v)| format!(" {}: {}", k, v))
1144 .collect::<Vec<_>>()
1145 .join("\n");
1146
1147 let detail = serde_json::to_string_pretty(&items).unwrap_or_default();
1148 Ok(ToolOutput::text(format!(
1149 "Tech debt report: {} items found\n\nSummary:\n{}\n\nDetails:\n{}",
1150 items.len(),
1151 summary,
1152 detail
1153 )))
1154 }
1155
1156 fn measure_indent(line: &str) -> usize {
1158 let leading_spaces = line.len() - line.trim_start().len();
1159 let tab_count = line.chars().take_while(|c| *c == '\t').count();
1161 if tab_count > 0 {
1162 tab_count
1163 } else {
1164 leading_spaces / 4
1165 }
1166 }
1167
1168 fn extract_fn_name(line: &str) -> String {
1170 let prefixes = [
1172 "pub async fn ",
1173 "pub(crate) fn ",
1174 "pub fn ",
1175 "async fn ",
1176 "fn ",
1177 "export async function ",
1178 "export function ",
1179 "async function ",
1180 "function ",
1181 "func ",
1182 "def ",
1183 ];
1184 for prefix in &prefixes {
1185 if let Some(rest) = line.trim().strip_prefix(prefix)
1186 && let Some(paren_pos) = rest.find('(')
1187 {
1188 let name = rest[..paren_pos].trim();
1189 if !name.is_empty() {
1190 return name.to_string();
1191 }
1192 }
1193 }
1194 "<anonymous>".to_string()
1195 }
1196
1197 fn truncate_str(s: &str, max: usize) -> String {
1198 if s.len() <= max {
1199 s.to_string()
1200 } else {
1201 format!("{}...", &s[..max])
1202 }
1203 }
1204
1205 fn api_surface(&self, args: &Value) -> Result<ToolOutput, ToolError> {
1206 let root = self.resolve_path(args);
1207 if !root.exists() {
1208 return Err(ToolError::ExecutionFailed {
1209 name: "code_intelligence".to_string(),
1210 message: format!("Path does not exist: {}", root.display()),
1211 });
1212 }
1213
1214 let lang_filter = args.get("language").and_then(|v| v.as_str());
1215 let mut entries: Vec<ApiEntry> = Vec::new();
1216
1217 let walker = ignore::WalkBuilder::new(&root)
1218 .hidden(false)
1219 .git_ignore(true)
1220 .build();
1221
1222 for entry in walker {
1223 let entry = match entry {
1224 Ok(e) => e,
1225 Err(_) => continue,
1226 };
1227
1228 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
1229 continue;
1230 }
1231
1232 let path = entry.path();
1233 let ext = path
1234 .extension()
1235 .map(|e| e.to_string_lossy().to_string())
1236 .unwrap_or_default();
1237
1238 if Self::is_binary_ext(&ext) {
1239 continue;
1240 }
1241
1242 let language = match Self::ext_to_language(&ext) {
1243 Some(l) => l,
1244 None => continue,
1245 };
1246
1247 if let Some(filter) = lang_filter {
1249 let filter_lower = filter.to_lowercase();
1250 if !language.to_lowercase().contains(&filter_lower) {
1251 continue;
1252 }
1253 }
1254
1255 let content = match std::fs::read_to_string(path) {
1256 Ok(c) => c,
1257 Err(_) => continue,
1258 };
1259
1260 let rel = path
1261 .strip_prefix(&root)
1262 .unwrap_or(path)
1263 .to_string_lossy()
1264 .to_string();
1265
1266 for (line_num, line) in content.lines().enumerate() {
1267 let trimmed = line.trim();
1268
1269 match language {
1270 "Rust" => {
1271 if (trimmed.starts_with("pub fn ") || trimmed.starts_with("pub async fn "))
1273 && trimmed.contains('(')
1274 {
1275 let name = Self::extract_fn_name(trimmed);
1276 entries.push(ApiEntry {
1277 name,
1278 kind: "function".to_string(),
1279 file_path: rel.clone(),
1280 line_number: line_num + 1,
1281 signature: trimmed.to_string(),
1282 visibility: "public".to_string(),
1283 });
1284 }
1285 if trimmed.starts_with("pub struct ") {
1287 let name = trimmed
1288 .strip_prefix("pub struct ")
1289 .and_then(|r| {
1290 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1291 })
1292 .unwrap_or("")
1293 .to_string();
1294 entries.push(ApiEntry {
1295 name,
1296 kind: "struct".to_string(),
1297 file_path: rel.clone(),
1298 line_number: line_num + 1,
1299 signature: trimmed.to_string(),
1300 visibility: "public".to_string(),
1301 });
1302 }
1303 if trimmed.starts_with("pub trait ") {
1305 let name = trimmed
1306 .strip_prefix("pub trait ")
1307 .and_then(|r| {
1308 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1309 })
1310 .unwrap_or("")
1311 .to_string();
1312 entries.push(ApiEntry {
1313 name,
1314 kind: "trait".to_string(),
1315 file_path: rel.clone(),
1316 line_number: line_num + 1,
1317 signature: trimmed.to_string(),
1318 visibility: "public".to_string(),
1319 });
1320 }
1321 if trimmed.starts_with("pub enum ") {
1323 let name = trimmed
1324 .strip_prefix("pub enum ")
1325 .and_then(|r| {
1326 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1327 })
1328 .unwrap_or("")
1329 .to_string();
1330 entries.push(ApiEntry {
1331 name,
1332 kind: "enum".to_string(),
1333 file_path: rel.clone(),
1334 line_number: line_num + 1,
1335 signature: trimmed.to_string(),
1336 visibility: "public".to_string(),
1337 });
1338 }
1339 }
1340 "Python" => {
1341 if line.starts_with("def ") && trimmed.contains('(') {
1343 let name = Self::extract_fn_name(trimmed);
1344 entries.push(ApiEntry {
1345 name,
1346 kind: "function".to_string(),
1347 file_path: rel.clone(),
1348 line_number: line_num + 1,
1349 signature: trimmed.to_string(),
1350 visibility: "public".to_string(),
1351 });
1352 }
1353 if line.starts_with("class ") {
1355 let name = trimmed
1356 .strip_prefix("class ")
1357 .and_then(|r| {
1358 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1359 })
1360 .unwrap_or("")
1361 .to_string();
1362 entries.push(ApiEntry {
1363 name,
1364 kind: "class".to_string(),
1365 file_path: rel.clone(),
1366 line_number: line_num + 1,
1367 signature: trimmed.to_string(),
1368 visibility: "public".to_string(),
1369 });
1370 }
1371 }
1372 "JavaScript" | "JavaScript (JSX)" | "TypeScript" | "TypeScript (TSX)" => {
1373 if (trimmed.starts_with("export function ")
1375 || trimmed.starts_with("export async function "))
1376 && trimmed.contains('(')
1377 {
1378 let name = Self::extract_fn_name(trimmed);
1379 entries.push(ApiEntry {
1380 name,
1381 kind: "function".to_string(),
1382 file_path: rel.clone(),
1383 line_number: line_num + 1,
1384 signature: trimmed.to_string(),
1385 visibility: "public".to_string(),
1386 });
1387 }
1388 if trimmed.starts_with("export class ") {
1390 let name = trimmed
1391 .strip_prefix("export class ")
1392 .and_then(|r| {
1393 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1394 })
1395 .unwrap_or("")
1396 .to_string();
1397 entries.push(ApiEntry {
1398 name,
1399 kind: "class".to_string(),
1400 file_path: rel.clone(),
1401 line_number: line_num + 1,
1402 signature: trimmed.to_string(),
1403 visibility: "public".to_string(),
1404 });
1405 }
1406 if trimmed.starts_with("export const ") {
1408 let name = trimmed
1409 .strip_prefix("export const ")
1410 .and_then(|r| {
1411 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1412 })
1413 .unwrap_or("")
1414 .to_string();
1415 entries.push(ApiEntry {
1416 name,
1417 kind: "module".to_string(),
1418 file_path: rel.clone(),
1419 line_number: line_num + 1,
1420 signature: trimmed.to_string(),
1421 visibility: "public".to_string(),
1422 });
1423 }
1424 }
1425 _ => {}
1426 }
1427 }
1428 }
1429
1430 let out = serde_json::to_string_pretty(&entries).unwrap_or_default();
1431 Ok(ToolOutput::text(format!(
1432 "API surface: {} public entries\n{}",
1433 entries.len(),
1434 out
1435 )))
1436 }
1437
1438 fn dependency_map(&self, args: &Value) -> Result<ToolOutput, ToolError> {
1439 let root = self.resolve_path(args);
1440 if !root.exists() {
1441 return Err(ToolError::ExecutionFailed {
1442 name: "code_intelligence".to_string(),
1443 message: format!("Path does not exist: {}", root.display()),
1444 });
1445 }
1446
1447 let mut deps: Vec<DependencyEntry> = Vec::new();
1448
1449 Self::find_and_parse_files(&root, "Cargo.toml", |path, content| {
1451 let rel = path
1452 .strip_prefix(&root)
1453 .unwrap_or(path)
1454 .to_string_lossy()
1455 .to_string();
1456 deps.extend(Self::parse_cargo_toml(&content, &rel));
1457 });
1458
1459 Self::find_and_parse_files(&root, "package.json", |path, content| {
1461 let rel = path
1462 .strip_prefix(&root)
1463 .unwrap_or(path)
1464 .to_string_lossy()
1465 .to_string();
1466 deps.extend(Self::parse_package_json(&content, &rel));
1467 });
1468
1469 Self::find_and_parse_files(&root, "requirements.txt", |path, content| {
1471 let rel = path
1472 .strip_prefix(&root)
1473 .unwrap_or(path)
1474 .to_string_lossy()
1475 .to_string();
1476 deps.extend(Self::parse_requirements_txt(&content, &rel));
1477 });
1478
1479 Self::find_and_parse_files(&root, "go.mod", |path, content| {
1481 let rel = path
1482 .strip_prefix(&root)
1483 .unwrap_or(path)
1484 .to_string_lossy()
1485 .to_string();
1486 deps.extend(Self::parse_go_mod(&content, &rel));
1487 });
1488
1489 Self::find_and_parse_files(&root, "Gemfile", |path, content| {
1491 let rel = path
1492 .strip_prefix(&root)
1493 .unwrap_or(path)
1494 .to_string_lossy()
1495 .to_string();
1496 deps.extend(Self::parse_gemfile(&content, &rel));
1497 });
1498
1499 let out = serde_json::to_string_pretty(&deps).unwrap_or_default();
1500 Ok(ToolOutput::text(format!(
1501 "Dependency map: {} dependencies\n{}",
1502 deps.len(),
1503 out
1504 )))
1505 }
1506
1507 fn find_and_parse_files<F>(root: &PathBuf, filename: &str, mut handler: F)
1513 where
1514 F: FnMut(&std::path::Path, String),
1515 {
1516 let walker = ignore::WalkBuilder::new(root)
1517 .hidden(false)
1518 .git_ignore(true)
1519 .build();
1520
1521 for entry in walker {
1522 let entry = match entry {
1523 Ok(e) => e,
1524 Err(_) => continue,
1525 };
1526
1527 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
1528 continue;
1529 }
1530
1531 if entry.file_name().to_string_lossy() == filename
1532 && let Ok(content) = std::fs::read_to_string(entry.path())
1533 {
1534 handler(entry.path(), content);
1535 }
1536 }
1537 }
1538
1539 fn parse_cargo_toml(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1541 let mut deps = Vec::new();
1542 let mut current_section = String::new();
1543
1544 for line in content.lines() {
1545 let trimmed = line.trim();
1546
1547 if trimmed.starts_with('[') && trimmed.ends_with(']') {
1549 current_section = trimmed[1..trimmed.len() - 1].to_string();
1550 continue;
1551 }
1552
1553 if trimmed.starts_with('[') {
1555 if let Some(end) = trimmed.find(']') {
1556 current_section = trimmed[1..end].to_string();
1557 }
1558 continue;
1559 }
1560
1561 let dep_type = match current_section.as_str() {
1562 "dependencies" | "workspace.dependencies" => "runtime",
1563 "dev-dependencies" => "dev",
1564 "build-dependencies" => "build",
1565 s if s.ends_with(".dependencies") && !s.contains("dev") && !s.contains("build") => {
1566 "runtime"
1567 }
1568 _ => continue,
1569 };
1570
1571 if let Some(eq_pos) = trimmed.find('=') {
1573 let name = trimmed[..eq_pos].trim().to_string();
1574 if name.is_empty() || name.starts_with('#') {
1575 continue;
1576 }
1577 let value_part = trimmed[eq_pos + 1..].trim();
1578
1579 let version = if value_part.starts_with('"') {
1580 value_part.trim_matches('"').trim_matches('\'').to_string()
1582 } else if value_part.starts_with('{') {
1583 Self::extract_toml_inline_version(value_part)
1585 } else {
1586 value_part.to_string()
1587 };
1588
1589 deps.push(DependencyEntry {
1590 name,
1591 version,
1592 dep_type: dep_type.to_string(),
1593 source_file: source_file.to_string(),
1594 });
1595 }
1596 }
1597
1598 deps
1599 }
1600
1601 fn extract_toml_inline_version(inline: &str) -> String {
1603 if let Some(ver_pos) = inline.find("version") {
1605 let after_key = &inline[ver_pos + 7..];
1606 if let Some(eq_pos) = after_key.find('=') {
1607 let after_eq = after_key[eq_pos + 1..].trim();
1608 if let Some(stripped) = after_eq.strip_prefix('"')
1609 && let Some(end_quote) = stripped.find('"')
1610 {
1611 return stripped[..end_quote].to_string();
1612 }
1613 }
1614 }
1615 if inline.contains("workspace") {
1617 return "workspace".to_string();
1618 }
1619 "*".to_string()
1620 }
1621
1622 fn parse_package_json(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1624 let mut deps = Vec::new();
1625
1626 let parsed: Value = match serde_json::from_str(content) {
1627 Ok(v) => v,
1628 Err(_) => return deps,
1629 };
1630
1631 let sections = [
1632 ("dependencies", "runtime"),
1633 ("devDependencies", "dev"),
1634 ("peerDependencies", "runtime"),
1635 ("optionalDependencies", "optional"),
1636 ];
1637
1638 for (key, dep_type) in §ions {
1639 if let Some(obj) = parsed.get(key).and_then(|v| v.as_object()) {
1640 for (name, version) in obj {
1641 deps.push(DependencyEntry {
1642 name: name.clone(),
1643 version: version.as_str().unwrap_or("*").to_string(),
1644 dep_type: dep_type.to_string(),
1645 source_file: source_file.to_string(),
1646 });
1647 }
1648 }
1649 }
1650
1651 deps
1652 }
1653
1654 fn parse_requirements_txt(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1656 let mut deps = Vec::new();
1657
1658 for line in content.lines() {
1659 let trimmed = line.trim();
1660 if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with('-') {
1661 continue;
1662 }
1663
1664 let (name, version) = if let Some(pos) = trimmed.find("==") {
1666 (trimmed[..pos].trim(), trimmed[pos + 2..].trim())
1667 } else if let Some(pos) = trimmed.find(">=") {
1668 (trimmed[..pos].trim(), trimmed[pos..].trim())
1669 } else if let Some(pos) = trimmed.find("~=") {
1670 (trimmed[..pos].trim(), trimmed[pos..].trim())
1671 } else if let Some(pos) = trimmed.find("<=") {
1672 (trimmed[..pos].trim(), trimmed[pos..].trim())
1673 } else if let Some(pos) = trimmed.find("!=") {
1674 (trimmed[..pos].trim(), trimmed[pos..].trim())
1675 } else {
1676 (trimmed, "*")
1677 };
1678
1679 if !name.is_empty() {
1680 deps.push(DependencyEntry {
1681 name: name.to_string(),
1682 version: version.to_string(),
1683 dep_type: "runtime".to_string(),
1684 source_file: source_file.to_string(),
1685 });
1686 }
1687 }
1688
1689 deps
1690 }
1691
1692 fn parse_go_mod(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1694 let mut deps = Vec::new();
1695 let mut in_require = false;
1696
1697 for line in content.lines() {
1698 let trimmed = line.trim();
1699
1700 if trimmed == "require (" {
1701 in_require = true;
1702 continue;
1703 }
1704 if trimmed == ")" {
1705 in_require = false;
1706 continue;
1707 }
1708
1709 if trimmed.starts_with("require ") && !trimmed.contains('(') {
1711 let rest = trimmed.strip_prefix("require ").unwrap_or("").trim();
1712 let parts: Vec<&str> = rest.split_whitespace().collect();
1713 if parts.len() >= 2 {
1714 deps.push(DependencyEntry {
1715 name: parts[0].to_string(),
1716 version: parts[1].to_string(),
1717 dep_type: "runtime".to_string(),
1718 source_file: source_file.to_string(),
1719 });
1720 }
1721 continue;
1722 }
1723
1724 if in_require && !trimmed.is_empty() && !trimmed.starts_with("//") {
1726 let clean = if let Some(pos) = trimmed.find("//") {
1727 trimmed[..pos].trim()
1728 } else {
1729 trimmed
1730 };
1731 let parts: Vec<&str> = clean.split_whitespace().collect();
1732 if parts.len() >= 2 {
1733 let dep_type = if parts.len() > 2 && parts[2] == "// indirect" {
1734 "optional"
1735 } else {
1736 "runtime"
1737 };
1738 deps.push(DependencyEntry {
1739 name: parts[0].to_string(),
1740 version: parts[1].to_string(),
1741 dep_type: dep_type.to_string(),
1742 source_file: source_file.to_string(),
1743 });
1744 }
1745 }
1746 }
1747
1748 deps
1749 }
1750
1751 fn parse_gemfile(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1753 let mut deps = Vec::new();
1754 let mut in_group: Option<String> = None;
1755
1756 for line in content.lines() {
1757 let trimmed = line.trim();
1758
1759 if trimmed.starts_with("group ") {
1760 if trimmed.contains(":development") || trimmed.contains(":test") {
1761 in_group = Some("dev".to_string());
1762 } else {
1763 in_group = Some("runtime".to_string());
1764 }
1765 continue;
1766 }
1767 if trimmed == "end" {
1768 in_group = None;
1769 continue;
1770 }
1771
1772 if trimmed.starts_with("gem ") {
1773 let rest = trimmed.strip_prefix("gem ").unwrap_or("").trim();
1774 let parts: Vec<&str> = rest.split(',').collect();
1776 if let Some(name_part) = parts.first() {
1777 let name = name_part
1778 .trim()
1779 .trim_matches('\'')
1780 .trim_matches('"')
1781 .to_string();
1782 let version = if parts.len() > 1 {
1783 parts[1]
1784 .trim()
1785 .trim_matches('\'')
1786 .trim_matches('"')
1787 .to_string()
1788 } else {
1789 "*".to_string()
1790 };
1791 let dep_type = in_group.as_deref().unwrap_or("runtime").to_string();
1792 deps.push(DependencyEntry {
1793 name,
1794 version,
1795 dep_type,
1796 source_file: source_file.to_string(),
1797 });
1798 }
1799 }
1800 }
1801
1802 deps
1803 }
1804}
1805
1806#[async_trait]
1811impl Tool for CodeIntelligenceTool {
1812 fn name(&self) -> &str {
1813 "code_intelligence"
1814 }
1815
1816 fn description(&self) -> &str {
1817 "Cross-language codebase analysis: architecture detection, pattern recognition, \
1818 tech debt scanning, API surface extraction. Actions: analyze_architecture, \
1819 detect_patterns, translate_snippet, compare_implementations, tech_debt_report, \
1820 api_surface, dependency_map."
1821 }
1822
1823 fn parameters_schema(&self) -> Value {
1824 json!({
1825 "type": "object",
1826 "properties": {
1827 "action": {
1828 "type": "string",
1829 "enum": [
1830 "analyze_architecture",
1831 "detect_patterns",
1832 "translate_snippet",
1833 "compare_implementations",
1834 "tech_debt_report",
1835 "api_surface",
1836 "dependency_map"
1837 ],
1838 "description": "Action to perform"
1839 },
1840 "path": {
1841 "type": "string",
1842 "description": "Target path (defaults to workspace root)"
1843 },
1844 "force": {
1845 "type": "boolean",
1846 "description": "Force re-analysis ignoring cache (for analyze_architecture)"
1847 },
1848 "pattern": {
1849 "type": "string",
1850 "enum": ["singleton", "factory", "observer", "builder", "repository"],
1851 "description": "Design pattern to detect (for detect_patterns)"
1852 },
1853 "code": {
1854 "type": "string",
1855 "description": "Source code snippet (for translate_snippet)"
1856 },
1857 "from_language": {
1858 "type": "string",
1859 "description": "Source language (for translate_snippet)"
1860 },
1861 "to_language": {
1862 "type": "string",
1863 "description": "Target language (for translate_snippet)"
1864 },
1865 "file_a": {
1866 "type": "string",
1867 "description": "First file path (for compare_implementations)"
1868 },
1869 "file_b": {
1870 "type": "string",
1871 "description": "Second file path (for compare_implementations)"
1872 },
1873 "severity": {
1874 "type": "string",
1875 "enum": ["low", "medium", "high"],
1876 "description": "Filter by severity (for tech_debt_report)"
1877 },
1878 "language": {
1879 "type": "string",
1880 "description": "Filter by language (for api_surface)"
1881 }
1882 },
1883 "required": ["action"]
1884 })
1885 }
1886
1887 fn risk_level(&self) -> RiskLevel {
1888 RiskLevel::ReadOnly
1889 }
1890
1891 fn timeout(&self) -> Duration {
1892 Duration::from_secs(120)
1893 }
1894
1895 async fn execute(&self, args: Value) -> Result<ToolOutput, ToolError> {
1896 let action = args.get("action").and_then(|v| v.as_str()).ok_or_else(|| {
1897 ToolError::InvalidArguments {
1898 name: "code_intelligence".to_string(),
1899 reason: "Missing required parameter 'action'".to_string(),
1900 }
1901 })?;
1902
1903 match action {
1904 "analyze_architecture" => self.analyze_architecture(&args),
1905 "detect_patterns" => self.detect_patterns(&args),
1906 "translate_snippet" => self.translate_snippet(&args),
1907 "compare_implementations" => self.compare_implementations(&args),
1908 "tech_debt_report" => self.tech_debt_report(&args),
1909 "api_surface" => self.api_surface(&args),
1910 "dependency_map" => self.dependency_map(&args),
1911 other => Err(ToolError::InvalidArguments {
1912 name: "code_intelligence".to_string(),
1913 reason: format!(
1914 "Unknown action '{}'. Valid actions: analyze_architecture, detect_patterns, translate_snippet, compare_implementations, tech_debt_report, api_surface, dependency_map",
1915 other
1916 ),
1917 }),
1918 }
1919 }
1920}
1921
1922#[cfg(test)]
1927mod tests {
1928 use super::*;
1929 use tempfile::TempDir;
1930
1931 fn make_tool(dir: &std::path::Path) -> CodeIntelligenceTool {
1932 let workspace = dir.canonicalize().unwrap();
1933 CodeIntelligenceTool::new(workspace)
1934 }
1935
1936 #[test]
1937 fn test_tool_properties() {
1938 let dir = TempDir::new().unwrap();
1939 let tool = make_tool(dir.path());
1940 assert_eq!(tool.name(), "code_intelligence");
1941 assert!(!tool.description().is_empty());
1942 assert_eq!(tool.risk_level(), RiskLevel::ReadOnly);
1943 assert_eq!(tool.timeout(), Duration::from_secs(120));
1944 }
1945
1946 #[test]
1947 fn test_schema_validation() {
1948 let dir = TempDir::new().unwrap();
1949 let tool = make_tool(dir.path());
1950 let schema = tool.parameters_schema();
1951 assert!(schema.is_object());
1952 let props = schema.get("properties").unwrap();
1953 assert!(props.get("action").is_some());
1954 assert!(props.get("path").is_some());
1955 assert!(props.get("code").is_some());
1956 let required = schema.get("required").unwrap().as_array().unwrap();
1957 assert!(required.contains(&json!("action")));
1958 }
1959
1960 #[tokio::test]
1961 async fn test_analyze_architecture_basic() {
1962 let dir = TempDir::new().unwrap();
1963 let workspace = dir.path().canonicalize().unwrap();
1964
1965 let src_dir = workspace.join("src");
1967 std::fs::create_dir_all(&src_dir).unwrap();
1968 std::fs::write(
1969 src_dir.join("main.rs"),
1970 "fn main() {\n println!(\"hello\");\n}\n",
1971 )
1972 .unwrap();
1973 std::fs::write(
1974 src_dir.join("lib.rs"),
1975 "pub fn greet() -> String {\n \"hello\".to_string()\n}\n",
1976 )
1977 .unwrap();
1978 std::fs::write(workspace.join("Cargo.toml"), "[package]\nname = \"demo\"\n").unwrap();
1979
1980 let tool = CodeIntelligenceTool::new(workspace);
1981 let result = tool
1982 .execute(json!({"action": "analyze_architecture"}))
1983 .await
1984 .unwrap();
1985
1986 let text = &result.content;
1987 assert!(text.contains("Architecture snapshot"));
1988 assert!(text.contains("Rust"));
1989 assert!(text.contains("main.rs"));
1990 assert!(text.contains("Cargo.toml"));
1991 }
1992
1993 #[tokio::test]
1994 async fn test_analyze_caching() {
1995 let dir = TempDir::new().unwrap();
1996 let workspace = dir.path().canonicalize().unwrap();
1997
1998 let src_dir = workspace.join("src");
1999 std::fs::create_dir_all(&src_dir).unwrap();
2000 std::fs::write(src_dir.join("main.rs"), "fn main() {}\n").unwrap();
2001
2002 let tool = CodeIntelligenceTool::new(workspace);
2003
2004 let result1 = tool
2006 .execute(json!({"action": "analyze_architecture"}))
2007 .await
2008 .unwrap();
2009 assert!(result1.content.contains("Architecture snapshot:"));
2010 assert!(!result1.content.contains("(cached)"));
2011
2012 let result2 = tool
2014 .execute(json!({"action": "analyze_architecture"}))
2015 .await
2016 .unwrap();
2017 assert!(result2.content.contains("(cached)"));
2018
2019 let result3 = tool
2021 .execute(json!({"action": "analyze_architecture", "force": true}))
2022 .await
2023 .unwrap();
2024 assert!(!result3.content.contains("(cached)"));
2025 }
2026
2027 #[tokio::test]
2028 async fn test_detect_patterns_todo() {
2029 let dir = TempDir::new().unwrap();
2030 let workspace = dir.path().canonicalize().unwrap();
2031
2032 std::fs::write(
2033 workspace.join("example.rs"),
2034 "fn main() {\n // TODO: fix this later\n // FIXME: broken\n // HACK: workaround\n println!(\"ok\");\n}\n",
2035 )
2036 .unwrap();
2037
2038 let tool = CodeIntelligenceTool::new(workspace);
2039 let result = tool
2040 .execute(json!({"action": "detect_patterns"}))
2041 .await
2042 .unwrap();
2043
2044 let text = &result.content;
2045 assert!(text.contains("TODO"));
2046 assert!(text.contains("FIXME"));
2047 assert!(text.contains("HACK"));
2048 assert!(text.contains("fix this later"));
2049 }
2050
2051 #[tokio::test]
2052 async fn test_translate_returns_prompt() {
2053 let dir = TempDir::new().unwrap();
2054 let tool = make_tool(dir.path());
2055
2056 let result = tool
2057 .execute(json!({
2058 "action": "translate_snippet",
2059 "code": "fn add(a: i32, b: i32) -> i32 { a + b }",
2060 "from_language": "Rust",
2061 "to_language": "Python"
2062 }))
2063 .await
2064 .unwrap();
2065
2066 let text = &result.content;
2067 assert!(text.contains("Translate the following Rust code to Python"));
2068 assert!(text.contains("fn add"));
2069 assert!(text.contains("Rust Semantics"));
2070 assert!(text.contains("Python Semantics"));
2071 assert!(text.contains("Ownership"));
2072 assert!(text.contains("Dynamic typing"));
2073 }
2074
2075 #[tokio::test]
2076 async fn test_compare_implementations() {
2077 let dir = TempDir::new().unwrap();
2078 let workspace = dir.path().canonicalize().unwrap();
2079
2080 std::fs::write(
2081 workspace.join("sort_a.rs"),
2082 "pub fn bubble_sort(arr: &mut Vec<i32>) {\n let n = arr.len();\n for i in 0..n {\n for j in 0..n-1-i {\n if arr[j] > arr[j+1] {\n arr.swap(j, j+1);\n }\n }\n }\n}\n",
2083 )
2084 .unwrap();
2085 std::fs::write(
2086 workspace.join("sort_b.py"),
2087 "def quick_sort(arr):\n if len(arr) <= 1:\n return arr\n pivot = arr[0]\n left = [x for x in arr[1:] if x <= pivot]\n right = [x for x in arr[1:] if x > pivot]\n return quick_sort(left) + [pivot] + quick_sort(right)\n",
2088 )
2089 .unwrap();
2090
2091 let tool = CodeIntelligenceTool::new(workspace);
2092 let result = tool
2093 .execute(json!({
2094 "action": "compare_implementations",
2095 "file_a": "sort_a.rs",
2096 "file_b": "sort_b.py"
2097 }))
2098 .await
2099 .unwrap();
2100
2101 let text = &result.content;
2102 assert!(text.contains("Implementation Comparison"));
2103 assert!(text.contains("sort_a.rs"));
2104 assert!(text.contains("sort_b.py"));
2105 assert!(text.contains("Rust"));
2106 assert!(text.contains("Python"));
2107 assert!(text.contains("Lines"));
2108 assert!(text.contains("Functions"));
2109 }
2110
2111 #[tokio::test]
2112 async fn test_tech_debt_todo_fixme() {
2113 let dir = TempDir::new().unwrap();
2114 let workspace = dir.path().canonicalize().unwrap();
2115
2116 std::fs::write(
2117 workspace.join("messy.rs"),
2118 "fn main() {\n // TODO: refactor this\n // FIXME: memory leak\n println!(\"ok\");\n}\n",
2119 )
2120 .unwrap();
2121
2122 let tool = CodeIntelligenceTool::new(workspace);
2123 let result = tool
2124 .execute(json!({"action": "tech_debt_report"}))
2125 .await
2126 .unwrap();
2127
2128 let text = &result.content;
2129 assert!(text.contains("Tech debt report"));
2130 assert!(text.contains("todo"));
2131 assert!(text.contains("fixme"));
2132 assert!(text.contains("refactor this"));
2133 assert!(text.contains("memory leak"));
2134 }
2135
2136 #[tokio::test]
2137 async fn test_api_surface_rust() {
2138 let dir = TempDir::new().unwrap();
2139 let workspace = dir.path().canonicalize().unwrap();
2140
2141 std::fs::write(
2142 workspace.join("api.rs"),
2143 "pub fn create_user(name: &str) -> User {\n User { name: name.to_string() }\n}\n\n\
2144 pub struct User {\n pub name: String,\n}\n\n\
2145 pub trait Greet {\n fn greet(&self) -> String;\n}\n\n\
2146 pub enum Color {\n Red,\n Blue,\n}\n\n\
2147 fn private_helper() {}\n",
2148 )
2149 .unwrap();
2150
2151 let tool = CodeIntelligenceTool::new(workspace);
2152 let result = tool
2153 .execute(json!({"action": "api_surface", "language": "rust"}))
2154 .await
2155 .unwrap();
2156
2157 let text = &result.content;
2158 assert!(text.contains("create_user"));
2159 assert!(text.contains("User"));
2160 assert!(text.contains("Greet"));
2161 assert!(text.contains("Color"));
2162 assert!(!text.contains("private_helper"));
2164 }
2165
2166 #[tokio::test]
2167 async fn test_dependency_map_cargo() {
2168 let dir = TempDir::new().unwrap();
2169 let workspace = dir.path().canonicalize().unwrap();
2170
2171 std::fs::write(
2172 workspace.join("Cargo.toml"),
2173 "[package]\nname = \"demo\"\nversion = \"0.1.0\"\n\n\
2174 [dependencies]\nserde = \"1.0\"\ntokio = { version = \"1.47\", features = [\"full\"] }\n\n\
2175 [dev-dependencies]\ntempfile = \"3.14\"\n\n\
2176 [build-dependencies]\ncc = \"1.0\"\n",
2177 )
2178 .unwrap();
2179
2180 let tool = CodeIntelligenceTool::new(workspace);
2181 let result = tool
2182 .execute(json!({"action": "dependency_map"}))
2183 .await
2184 .unwrap();
2185
2186 let text = &result.content;
2187 assert!(text.contains("serde"));
2188 assert!(text.contains("tokio"));
2189 assert!(text.contains("tempfile"));
2190 assert!(text.contains("cc"));
2191 assert!(text.contains("runtime"));
2192 assert!(text.contains("dev"));
2193 assert!(text.contains("build"));
2194 }
2195
2196 #[tokio::test]
2197 async fn test_dependency_map_npm() {
2198 let dir = TempDir::new().unwrap();
2199 let workspace = dir.path().canonicalize().unwrap();
2200
2201 std::fs::write(
2202 workspace.join("package.json"),
2203 r#"{
2204 "name": "demo",
2205 "dependencies": {
2206 "express": "^4.18.0",
2207 "lodash": "^4.17.21"
2208 },
2209 "devDependencies": {
2210 "jest": "^29.0.0"
2211 }
2212 }"#,
2213 )
2214 .unwrap();
2215
2216 let tool = CodeIntelligenceTool::new(workspace);
2217 let result = tool
2218 .execute(json!({"action": "dependency_map"}))
2219 .await
2220 .unwrap();
2221
2222 let text = &result.content;
2223 assert!(text.contains("express"));
2224 assert!(text.contains("lodash"));
2225 assert!(text.contains("jest"));
2226 assert!(text.contains("runtime"));
2227 assert!(text.contains("dev"));
2228 }
2229
2230 #[tokio::test]
2231 async fn test_state_roundtrip() {
2232 let dir = TempDir::new().unwrap();
2233 let workspace = dir.path().canonicalize().unwrap();
2234 let tool = CodeIntelligenceTool::new(workspace);
2235
2236 let snapshot = ArchitectureSnapshot {
2238 project_root: "/test".to_string(),
2239 languages: vec![LanguageStats {
2240 language: "Rust".to_string(),
2241 files: 10,
2242 lines: 500,
2243 extensions: vec!["rs".to_string()],
2244 }],
2245 directories: vec![],
2246 entry_points: vec!["src/main.rs".to_string()],
2247 config_files: vec!["Cargo.toml".to_string()],
2248 total_files: 10,
2249 total_lines: 500,
2250 analyzed_at: Utc::now(),
2251 };
2252 let cache = CodeIntelCache {
2253 last_snapshot: Some(snapshot),
2254 };
2255 tool.save_cache(&cache).unwrap();
2256
2257 let loaded = tool.load_cache();
2259 assert!(loaded.last_snapshot.is_some());
2260 let loaded_snap = loaded.last_snapshot.unwrap();
2261 assert_eq!(loaded_snap.project_root, "/test");
2262 assert_eq!(loaded_snap.languages.len(), 1);
2263 assert_eq!(loaded_snap.languages[0].language, "Rust");
2264 assert_eq!(loaded_snap.total_files, 10);
2265 assert_eq!(loaded_snap.total_lines, 500);
2266 }
2267
2268 #[tokio::test]
2269 async fn test_unknown_action() {
2270 let dir = TempDir::new().unwrap();
2271 let tool = make_tool(dir.path());
2272
2273 let result = tool.execute(json!({"action": "nonexistent_action"})).await;
2274 assert!(result.is_err());
2275 let err = result.unwrap_err();
2276 match err {
2277 ToolError::InvalidArguments { name, reason } => {
2278 assert_eq!(name, "code_intelligence");
2279 assert!(reason.contains("Unknown action"));
2280 assert!(reason.contains("nonexistent_action"));
2281 }
2282 other => panic!("Expected InvalidArguments, got {:?}", other),
2283 }
2284 }
2285}