1use async_trait::async_trait;
5use chrono::{DateTime, Utc};
6use rustant_core::error::ToolError;
7use rustant_core::types::{RiskLevel, ToolOutput};
8use serde::{Deserialize, Serialize};
9use serde_json::{json, Value};
10use std::collections::HashMap;
11use std::path::PathBuf;
12use std::time::Duration;
13
14use crate::registry::Tool;
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
21struct LanguageStats {
22 language: String,
23 files: usize,
24 lines: usize,
25 extensions: Vec<String>,
26}
27
28#[derive(Debug, Clone, Serialize, Deserialize)]
29struct DirectoryInfo {
30 path: String,
31 classification: String,
32 file_count: usize,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
36struct ArchitectureSnapshot {
37 project_root: String,
38 languages: Vec<LanguageStats>,
39 directories: Vec<DirectoryInfo>,
40 entry_points: Vec<String>,
41 config_files: Vec<String>,
42 total_files: usize,
43 total_lines: usize,
44 analyzed_at: DateTime<Utc>,
45}
46
47#[derive(Debug, Clone, Serialize, Deserialize)]
48struct PatternMatch {
49 pattern_name: String,
50 file_path: String,
51 line_number: usize,
52 snippet: String,
53 confidence: f64,
54}
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
57struct TechDebtItem {
58 file_path: String,
59 line_number: usize,
60 category: String,
61 description: String,
62 severity: String,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
66struct ApiEntry {
67 name: String,
68 kind: String,
69 file_path: String,
70 line_number: usize,
71 signature: String,
72 visibility: String,
73}
74
75#[derive(Debug, Clone, Serialize, Deserialize)]
76struct DependencyEntry {
77 name: String,
78 version: String,
79 dep_type: String,
80 source_file: String,
81}
82
83#[derive(Debug, Default, Serialize, Deserialize)]
84struct CodeIntelCache {
85 last_snapshot: Option<ArchitectureSnapshot>,
86}
87
88pub struct CodeIntelligenceTool {
93 workspace: PathBuf,
94}
95
96impl CodeIntelligenceTool {
97 pub fn new(workspace: PathBuf) -> Self {
98 Self { workspace }
99 }
100
101 fn state_path(&self) -> PathBuf {
102 self.workspace
103 .join(".rustant")
104 .join("code_intel")
105 .join("cache.json")
106 }
107
108 fn load_cache(&self) -> CodeIntelCache {
109 let path = self.state_path();
110 if path.exists() {
111 std::fs::read_to_string(&path)
112 .ok()
113 .and_then(|s| serde_json::from_str(&s).ok())
114 .unwrap_or_default()
115 } else {
116 CodeIntelCache::default()
117 }
118 }
119
120 fn save_cache(&self, cache: &CodeIntelCache) -> Result<(), ToolError> {
121 let path = self.state_path();
122 if let Some(parent) = path.parent() {
123 std::fs::create_dir_all(parent).map_err(|e| ToolError::ExecutionFailed {
124 name: "code_intelligence".to_string(),
125 message: format!("Failed to create cache dir: {}", e),
126 })?;
127 }
128 let json = serde_json::to_string_pretty(cache).map_err(|e| ToolError::ExecutionFailed {
129 name: "code_intelligence".to_string(),
130 message: format!("Failed to serialize cache: {}", e),
131 })?;
132 let tmp = path.with_extension("json.tmp");
133 std::fs::write(&tmp, &json).map_err(|e| ToolError::ExecutionFailed {
134 name: "code_intelligence".to_string(),
135 message: format!("Failed to write cache: {}", e),
136 })?;
137 std::fs::rename(&tmp, &path).map_err(|e| ToolError::ExecutionFailed {
138 name: "code_intelligence".to_string(),
139 message: format!("Failed to rename cache file: {}", e),
140 })?;
141 Ok(())
142 }
143
144 fn resolve_path(&self, args: &Value) -> PathBuf {
150 args.get("path")
151 .and_then(|v| v.as_str())
152 .map(|p| {
153 let pb = PathBuf::from(p);
154 if pb.is_absolute() {
155 pb
156 } else {
157 self.workspace.join(pb)
158 }
159 })
160 .unwrap_or_else(|| self.workspace.clone())
161 }
162
163 fn ext_to_language(ext: &str) -> Option<&'static str> {
165 match ext {
166 "rs" => Some("Rust"),
167 "py" | "pyi" => Some("Python"),
168 "js" | "mjs" | "cjs" => Some("JavaScript"),
169 "ts" | "mts" | "cts" => Some("TypeScript"),
170 "jsx" => Some("JavaScript (JSX)"),
171 "tsx" => Some("TypeScript (TSX)"),
172 "go" => Some("Go"),
173 "java" => Some("Java"),
174 "rb" => Some("Ruby"),
175 "c" | "h" => Some("C"),
176 "cpp" | "cc" | "cxx" | "hpp" | "hxx" => Some("C++"),
177 "cs" => Some("C#"),
178 "swift" => Some("Swift"),
179 "kt" | "kts" => Some("Kotlin"),
180 "sh" | "bash" | "zsh" => Some("Shell"),
181 "html" | "htm" => Some("HTML"),
182 "css" | "scss" | "sass" => Some("CSS"),
183 "json" => Some("JSON"),
184 "toml" => Some("TOML"),
185 "yaml" | "yml" => Some("YAML"),
186 "xml" => Some("XML"),
187 "md" | "markdown" => Some("Markdown"),
188 "sql" => Some("SQL"),
189 _ => None,
190 }
191 }
192
193 fn is_binary_ext(ext: &str) -> bool {
195 matches!(
196 ext,
197 "png"
198 | "jpg"
199 | "jpeg"
200 | "gif"
201 | "bmp"
202 | "ico"
203 | "svg"
204 | "woff"
205 | "woff2"
206 | "ttf"
207 | "otf"
208 | "eot"
209 | "pdf"
210 | "zip"
211 | "tar"
212 | "gz"
213 | "bz2"
214 | "xz"
215 | "7z"
216 | "rar"
217 | "exe"
218 | "dll"
219 | "so"
220 | "dylib"
221 | "o"
222 | "a"
223 | "class"
224 | "jar"
225 | "war"
226 | "pyc"
227 | "pyo"
228 | "wasm"
229 | "db"
230 | "sqlite"
231 | "lock"
232 )
233 }
234
235 fn classify_dir(name: &str) -> &'static str {
237 match name {
238 "src" | "lib" | "app" | "pkg" | "internal" | "cmd" => "source",
239 "test" | "tests" | "spec" | "specs" | "__tests__" | "test_data" | "testdata" => "test",
240 "doc" | "docs" | "documentation" => "docs",
241 "build" | "target" | "dist" | "out" | "output" | "bin" | "obj" => "build",
242 "vendor" | "node_modules" | "third_party" | "external" | "deps" => "vendor",
243 "config" | "configs" | "conf" | "etc" | "settings" | ".github" | ".vscode" => "config",
244 _ => "source",
245 }
246 }
247
248 fn is_entry_point(name: &str) -> bool {
250 matches!(
251 name,
252 "main.rs"
253 | "main.py"
254 | "__main__.py"
255 | "index.js"
256 | "index.ts"
257 | "index.tsx"
258 | "index.jsx"
259 | "main.go"
260 | "Main.java"
261 | "Program.cs"
262 | "main.c"
263 | "main.cpp"
264 | "main.rb"
265 | "app.py"
266 | "app.js"
267 | "app.ts"
268 | "server.js"
269 | "server.ts"
270 | "manage.py"
271 )
272 }
273
274 fn is_config_file(name: &str) -> bool {
276 matches!(
277 name,
278 "Cargo.toml"
279 | "package.json"
280 | "tsconfig.json"
281 | "pyproject.toml"
282 | "setup.py"
283 | "setup.cfg"
284 | "requirements.txt"
285 | "go.mod"
286 | "go.sum"
287 | "Gemfile"
288 | "Makefile"
289 | "CMakeLists.txt"
290 | "Dockerfile"
291 | "docker-compose.yml"
292 | "docker-compose.yaml"
293 | ".gitignore"
294 | ".editorconfig"
295 | "jest.config.js"
296 | "jest.config.ts"
297 | "webpack.config.js"
298 | "vite.config.ts"
299 | "vite.config.js"
300 | "babel.config.js"
301 | ".eslintrc.json"
302 | ".eslintrc.js"
303 | ".prettierrc"
304 | "tox.ini"
305 | "Pipfile"
306 | "poetry.lock"
307 | ".env.example"
308 )
309 }
310
311 fn count_lines(content: &str) -> usize {
313 if content.is_empty() {
314 0
315 } else {
316 content.lines().count()
317 }
318 }
319
320 fn analyze_architecture(&self, args: &Value) -> Result<ToolOutput, ToolError> {
325 let force = args.get("force").and_then(|v| v.as_bool()).unwrap_or(false);
326
327 if !force {
329 let cache = self.load_cache();
330 if let Some(snapshot) = &cache.last_snapshot {
331 let out = serde_json::to_string_pretty(snapshot).unwrap_or_default();
332 return Ok(ToolOutput::text(format!(
333 "Architecture snapshot (cached):\n{}",
334 out
335 )));
336 }
337 }
338
339 let root = self.resolve_path(args);
340 if !root.exists() {
341 return Err(ToolError::ExecutionFailed {
342 name: "code_intelligence".to_string(),
343 message: format!("Path does not exist: {}", root.display()),
344 });
345 }
346
347 let mut lang_map: HashMap<String, (usize, usize, std::collections::HashSet<String>)> =
349 HashMap::new();
350 let mut dir_map: HashMap<String, (String, usize)> = HashMap::new();
352 let mut entry_points: Vec<String> = Vec::new();
353 let mut config_files: Vec<String> = Vec::new();
354 let mut total_files: usize = 0;
355 let mut total_lines: usize = 0;
356 let max_files: usize = 5000;
357
358 let walker = ignore::WalkBuilder::new(&root)
359 .hidden(false)
360 .git_ignore(true)
361 .build();
362
363 for entry in walker {
364 let entry = match entry {
365 Ok(e) => e,
366 Err(_) => continue,
367 };
368
369 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
370 continue;
371 }
372
373 if total_files >= max_files {
374 break;
375 }
376
377 let path = entry.path();
378 let rel = path
379 .strip_prefix(&root)
380 .unwrap_or(path)
381 .to_string_lossy()
382 .to_string();
383
384 let file_name = path
385 .file_name()
386 .map(|n| n.to_string_lossy().to_string())
387 .unwrap_or_default();
388
389 let ext = path
390 .extension()
391 .map(|e| e.to_string_lossy().to_string())
392 .unwrap_or_default();
393
394 if Self::is_binary_ext(&ext) {
396 continue;
397 }
398
399 total_files += 1;
400
401 if Self::is_entry_point(&file_name) {
403 entry_points.push(rel.clone());
404 }
405
406 if Self::is_config_file(&file_name) {
408 config_files.push(rel.clone());
409 }
410
411 let lines = std::fs::read_to_string(path)
413 .map(|c| Self::count_lines(&c))
414 .unwrap_or(0);
415 total_lines += lines;
416
417 if let Some(lang) = Self::ext_to_language(&ext) {
419 let entry = lang_map
420 .entry(lang.to_string())
421 .or_insert_with(|| (0, 0, std::collections::HashSet::new()));
422 entry.0 += 1;
423 entry.1 += lines;
424 entry.2.insert(ext.clone());
425 }
426
427 if let Some(parent) = path.parent() {
429 let dir_rel = parent
430 .strip_prefix(&root)
431 .unwrap_or(parent)
432 .to_string_lossy()
433 .to_string();
434 let dir_name = parent
435 .file_name()
436 .map(|n| n.to_string_lossy().to_string())
437 .unwrap_or_else(|| dir_rel.clone());
438 let classification = Self::classify_dir(&dir_name).to_string();
439 let dir_entry = dir_map
440 .entry(dir_rel)
441 .or_insert_with(|| (classification, 0));
442 dir_entry.1 += 1;
443 }
444 }
445
446 let mut languages: Vec<LanguageStats> = lang_map
448 .into_iter()
449 .map(|(lang, (files, lines, exts))| {
450 let mut ext_vec: Vec<String> = exts.into_iter().collect();
451 ext_vec.sort();
452 LanguageStats {
453 language: lang,
454 files,
455 lines,
456 extensions: ext_vec,
457 }
458 })
459 .collect();
460 languages.sort_by(|a, b| b.files.cmp(&a.files));
461
462 let mut directories: Vec<DirectoryInfo> = dir_map
464 .into_iter()
465 .map(|(path, (classification, file_count))| DirectoryInfo {
466 path,
467 classification,
468 file_count,
469 })
470 .collect();
471 directories.sort_by(|a, b| b.file_count.cmp(&a.file_count));
472
473 entry_points.sort();
474 config_files.sort();
475
476 let snapshot = ArchitectureSnapshot {
477 project_root: root.to_string_lossy().to_string(),
478 languages,
479 directories,
480 entry_points,
481 config_files,
482 total_files,
483 total_lines,
484 analyzed_at: Utc::now(),
485 };
486
487 let cache = CodeIntelCache {
489 last_snapshot: Some(snapshot.clone()),
490 };
491 self.save_cache(&cache)?;
492
493 let out = serde_json::to_string_pretty(&snapshot).unwrap_or_default();
494 Ok(ToolOutput::text(format!("Architecture snapshot:\n{}", out)))
495 }
496
497 fn detect_patterns(&self, args: &Value) -> Result<ToolOutput, ToolError> {
498 let root = self.resolve_path(args);
499 if !root.exists() {
500 return Err(ToolError::ExecutionFailed {
501 name: "code_intelligence".to_string(),
502 message: format!("Path does not exist: {}", root.display()),
503 });
504 }
505
506 let pattern_filter = args.get("pattern").and_then(|v| v.as_str());
507 let mut matches: Vec<PatternMatch> = Vec::new();
508 let max_files: usize = 1000;
509 let mut file_count: usize = 0;
510
511 let walker = ignore::WalkBuilder::new(&root)
512 .hidden(false)
513 .git_ignore(true)
514 .build();
515
516 for entry in walker {
517 let entry = match entry {
518 Ok(e) => e,
519 Err(_) => continue,
520 };
521
522 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
523 continue;
524 }
525
526 if file_count >= max_files {
527 break;
528 }
529
530 let path = entry.path();
531 let ext = path
532 .extension()
533 .map(|e| e.to_string_lossy().to_string())
534 .unwrap_or_default();
535
536 if Self::is_binary_ext(&ext) {
537 continue;
538 }
539
540 if Self::ext_to_language(&ext).is_none() {
542 continue;
543 }
544
545 let content = match std::fs::read_to_string(path) {
546 Ok(c) => c,
547 Err(_) => continue,
548 };
549
550 file_count += 1;
551
552 let rel = path
553 .strip_prefix(&root)
554 .unwrap_or(path)
555 .to_string_lossy()
556 .to_string();
557
558 for (line_num, line) in content.lines().enumerate() {
559 let trimmed = line.trim();
560
561 if pattern_filter.is_none()
563 || pattern_filter == Some("todo")
564 || pattern_filter == Some("fixme")
565 || pattern_filter == Some("hack")
566 {
567 if trimmed.contains("TODO") || trimmed.contains("FIXME") {
568 let pname = if trimmed.contains("TODO") {
569 "TODO"
570 } else {
571 "FIXME"
572 };
573 matches.push(PatternMatch {
574 pattern_name: pname.to_string(),
575 file_path: rel.clone(),
576 line_number: line_num + 1,
577 snippet: trimmed.to_string(),
578 confidence: 1.0,
579 });
580 }
581 if trimmed.contains("HACK") {
582 matches.push(PatternMatch {
583 pattern_name: "HACK".to_string(),
584 file_path: rel.clone(),
585 line_number: line_num + 1,
586 snippet: trimmed.to_string(),
587 confidence: 1.0,
588 });
589 }
590 }
591
592 if let Some(filter) = pattern_filter {
594 match filter {
595 "singleton" => {
596 if (trimmed.contains("static")
597 && (trimmed.contains("instance") || trimmed.contains("INSTANCE")))
598 || trimmed.contains("get_instance")
599 || trimmed.contains("getInstance")
600 {
601 matches.push(PatternMatch {
602 pattern_name: "Singleton".to_string(),
603 file_path: rel.clone(),
604 line_number: line_num + 1,
605 snippet: trimmed.to_string(),
606 confidence: 0.8,
607 });
608 }
609 }
610 "factory" => {
611 if trimmed.contains("fn create_")
612 || trimmed.contains("fn new_")
613 || trimmed.contains("def create_")
614 || trimmed.contains("function create")
615 || trimmed.contains("Factory")
616 {
617 matches.push(PatternMatch {
618 pattern_name: "Factory".to_string(),
619 file_path: rel.clone(),
620 line_number: line_num + 1,
621 snippet: trimmed.to_string(),
622 confidence: 0.7,
623 });
624 }
625 }
626 "builder" => {
627 if trimmed.contains("fn builder(")
628 || trimmed.contains(".builder()")
629 || trimmed.contains(".build()")
630 || trimmed.contains("Builder")
631 {
632 matches.push(PatternMatch {
633 pattern_name: "Builder".to_string(),
634 file_path: rel.clone(),
635 line_number: line_num + 1,
636 snippet: trimmed.to_string(),
637 confidence: 0.7,
638 });
639 }
640 }
641 "observer" => {
642 if trimmed.contains("on_event")
643 || trimmed.contains("addEventListener")
644 || trimmed.contains("subscribe")
645 || trimmed.contains("notify")
646 || trimmed.contains("Observer")
647 {
648 matches.push(PatternMatch {
649 pattern_name: "Observer".to_string(),
650 file_path: rel.clone(),
651 line_number: line_num + 1,
652 snippet: trimmed.to_string(),
653 confidence: 0.6,
654 });
655 }
656 }
657 "repository" => {
658 if trimmed.contains("find_by")
659 || trimmed.contains("findBy")
660 || trimmed.contains("get_all")
661 || trimmed.contains("getAll")
662 || trimmed.contains("Repository")
663 {
664 matches.push(PatternMatch {
665 pattern_name: "Repository".to_string(),
666 file_path: rel.clone(),
667 line_number: line_num + 1,
668 snippet: trimmed.to_string(),
669 confidence: 0.7,
670 });
671 }
672 }
673 _ => {}
674 }
675 }
676 }
677 }
678
679 let out = serde_json::to_string_pretty(&matches).unwrap_or_default();
680 Ok(ToolOutput::text(format!(
681 "Detected {} pattern matches:\n{}",
682 matches.len(),
683 out
684 )))
685 }
686
687 fn translate_snippet(&self, args: &Value) -> Result<ToolOutput, ToolError> {
688 let code = args.get("code").and_then(|v| v.as_str()).ok_or_else(|| {
689 ToolError::InvalidArguments {
690 name: "code_intelligence".to_string(),
691 reason: "Missing required parameter 'code'".to_string(),
692 }
693 })?;
694 let from_lang = args
695 .get("from_language")
696 .and_then(|v| v.as_str())
697 .ok_or_else(|| ToolError::InvalidArguments {
698 name: "code_intelligence".to_string(),
699 reason: "Missing required parameter 'from_language'".to_string(),
700 })?;
701 let to_lang = args
702 .get("to_language")
703 .and_then(|v| v.as_str())
704 .ok_or_else(|| ToolError::InvalidArguments {
705 name: "code_intelligence".to_string(),
706 reason: "Missing required parameter 'to_language'".to_string(),
707 })?;
708
709 let semantics_from = Self::language_semantics_notes(from_lang);
710 let semantics_to = Self::language_semantics_notes(to_lang);
711
712 let prompt = format!(
713 "Translate the following {from_lang} code to {to_lang}.\n\n\
714 ## Source Code ({from_lang})\n```{from_ext}\n{code}\n```\n\n\
715 ## {from_lang} Semantics\n{semantics_from}\n\n\
716 ## {to_lang} Semantics\n{semantics_to}\n\n\
717 ## Instructions\n\
718 - Produce idiomatic {to_lang} code.\n\
719 - Preserve the original logic and behavior.\n\
720 - Use {to_lang} conventions for naming, error handling, and structure.\n\
721 - Add brief comments where the translation involves non-obvious choices.",
722 from_lang = from_lang,
723 to_lang = to_lang,
724 from_ext = from_lang.to_lowercase(),
725 code = code,
726 semantics_from = semantics_from,
727 semantics_to = semantics_to,
728 );
729
730 Ok(ToolOutput::text(prompt))
731 }
732
733 fn language_semantics_notes(lang: &str) -> &'static str {
734 match lang.to_lowercase().as_str() {
735 "rust" => "Ownership & borrowing, no GC, Result/Option for errors, pattern matching, traits for polymorphism, lifetimes.",
736 "python" => "Dynamic typing, GC, exceptions for errors, duck typing, indentation-based blocks, list comprehensions.",
737 "javascript" | "js" => "Dynamic typing, prototype-based OOP, async/await with Promises, closures, event loop concurrency.",
738 "typescript" | "ts" => "Structural typing over JavaScript, interfaces, generics, union/intersection types, async/await.",
739 "go" => "Static typing, GC, error values (not exceptions), goroutines/channels for concurrency, interfaces (implicit), no generics (pre-1.18).",
740 "java" => "Static typing, GC, checked exceptions, class-based OOP, interfaces, generics with type erasure.",
741 "c" => "Manual memory management, pointers, no OOP, preprocessor macros, header files.",
742 "c++" | "cpp" => "Manual memory + RAII/smart pointers, templates, OOP with multiple inheritance, operator overloading.",
743 "ruby" => "Dynamic typing, GC, everything is an object, blocks/procs/lambdas, mixins via modules.",
744 _ => "General-purpose programming language.",
745 }
746 }
747
748 fn compare_implementations(&self, args: &Value) -> Result<ToolOutput, ToolError> {
749 let file_a = args.get("file_a").and_then(|v| v.as_str()).ok_or_else(|| {
750 ToolError::InvalidArguments {
751 name: "code_intelligence".to_string(),
752 reason: "Missing required parameter 'file_a'".to_string(),
753 }
754 })?;
755 let file_b = args.get("file_b").and_then(|v| v.as_str()).ok_or_else(|| {
756 ToolError::InvalidArguments {
757 name: "code_intelligence".to_string(),
758 reason: "Missing required parameter 'file_b'".to_string(),
759 }
760 })?;
761
762 let path_a = {
763 let p = PathBuf::from(file_a);
764 if p.is_absolute() {
765 p
766 } else {
767 self.workspace.join(p)
768 }
769 };
770 let path_b = {
771 let p = PathBuf::from(file_b);
772 if p.is_absolute() {
773 p
774 } else {
775 self.workspace.join(p)
776 }
777 };
778
779 let content_a =
780 std::fs::read_to_string(&path_a).map_err(|e| ToolError::ExecutionFailed {
781 name: "code_intelligence".to_string(),
782 message: format!("Failed to read file_a '{}': {}", path_a.display(), e),
783 })?;
784 let content_b =
785 std::fs::read_to_string(&path_b).map_err(|e| ToolError::ExecutionFailed {
786 name: "code_intelligence".to_string(),
787 message: format!("Failed to read file_b '{}': {}", path_b.display(), e),
788 })?;
789
790 let lang_a = path_a
791 .extension()
792 .and_then(|e| Self::ext_to_language(&e.to_string_lossy()))
793 .unwrap_or("Unknown");
794 let lang_b = path_b
795 .extension()
796 .and_then(|e| Self::ext_to_language(&e.to_string_lossy()))
797 .unwrap_or("Unknown");
798
799 let lines_a = Self::count_lines(&content_a);
800 let lines_b = Self::count_lines(&content_b);
801
802 let fn_count_a = Self::count_functions(&content_a, lang_a);
804 let fn_count_b = Self::count_functions(&content_b, lang_b);
805
806 let output = format!(
807 "## Implementation Comparison\n\n\
808 | Metric | File A | File B |\n\
809 |--------|--------|--------|\n\
810 | Path | {file_a} | {file_b} |\n\
811 | Language | {lang_a} | {lang_b} |\n\
812 | Lines | {lines_a} | {lines_b} |\n\
813 | Functions | {fn_count_a} | {fn_count_b} |\n\n\
814 ### File A: {file_a}\n```{ext_a}\n{preview_a}\n```\n\n\
815 ### File B: {file_b}\n```{ext_b}\n{preview_b}\n```",
816 file_a = file_a,
817 file_b = file_b,
818 lang_a = lang_a,
819 lang_b = lang_b,
820 lines_a = lines_a,
821 lines_b = lines_b,
822 fn_count_a = fn_count_a,
823 fn_count_b = fn_count_b,
824 ext_a = lang_a.to_lowercase(),
825 ext_b = lang_b.to_lowercase(),
826 preview_a = Self::preview_content(&content_a, 50),
827 preview_b = Self::preview_content(&content_b, 50),
828 );
829
830 Ok(ToolOutput::text(output))
831 }
832
833 fn count_functions(content: &str, language: &str) -> usize {
835 let mut count = 0;
836 for line in content.lines() {
837 let trimmed = line.trim();
838 match language {
839 "Rust" => {
840 if (trimmed.starts_with("fn ")
841 || trimmed.starts_with("pub fn ")
842 || trimmed.starts_with("pub(crate) fn ")
843 || trimmed.starts_with("async fn ")
844 || trimmed.starts_with("pub async fn "))
845 && trimmed.contains('(')
846 {
847 count += 1;
848 }
849 }
850 "Python" => {
851 if trimmed.starts_with("def ") && trimmed.contains('(') {
852 count += 1;
853 }
854 }
855 "JavaScript" | "JavaScript (JSX)" | "TypeScript" | "TypeScript (TSX)" => {
856 if (trimmed.starts_with("function ")
857 || trimmed.starts_with("async function ")
858 || trimmed.starts_with("export function ")
859 || trimmed.starts_with("export async function "))
860 && trimmed.contains('(')
861 {
862 count += 1;
863 }
864 }
865 "Go" => {
866 if trimmed.starts_with("func ") && trimmed.contains('(') {
867 count += 1;
868 }
869 }
870 "Java" | "C#" => {
871 if (trimmed.contains("public ")
872 || trimmed.contains("private ")
873 || trimmed.contains("protected "))
874 && trimmed.contains('(')
875 && !trimmed.contains("class ")
876 && !trimmed.contains("interface ")
877 {
878 count += 1;
879 }
880 }
881 "Ruby" => {
882 if trimmed.starts_with("def ") {
883 count += 1;
884 }
885 }
886 "C" | "C++" => {
887 if trimmed.contains('(')
888 && trimmed.contains(')')
889 && (trimmed.ends_with('{') || trimmed.ends_with(") {"))
890 && !trimmed.starts_with("if ")
891 && !trimmed.starts_with("for ")
892 && !trimmed.starts_with("while ")
893 && !trimmed.starts_with("switch ")
894 && !trimmed.starts_with("//")
895 && !trimmed.starts_with('#')
896 {
897 count += 1;
898 }
899 }
900 _ => {}
901 }
902 }
903 count
904 }
905
906 fn preview_content(content: &str, max_lines: usize) -> String {
908 let lines: Vec<&str> = content.lines().take(max_lines).collect();
909 let preview = lines.join("\n");
910 let total = content.lines().count();
911 if total > max_lines {
912 format!("{}\n\n... ({} more lines)", preview, total - max_lines)
913 } else {
914 preview
915 }
916 }
917
918 fn tech_debt_report(&self, args: &Value) -> Result<ToolOutput, ToolError> {
919 let root = self.resolve_path(args);
920 if !root.exists() {
921 return Err(ToolError::ExecutionFailed {
922 name: "code_intelligence".to_string(),
923 message: format!("Path does not exist: {}", root.display()),
924 });
925 }
926
927 let severity_filter = args.get("severity").and_then(|v| v.as_str());
928
929 let mut items: Vec<TechDebtItem> = Vec::new();
930 let max_files: usize = 1000;
931 let mut file_count: usize = 0;
932
933 let walker = ignore::WalkBuilder::new(&root)
934 .hidden(false)
935 .git_ignore(true)
936 .build();
937
938 for entry in walker {
939 let entry = match entry {
940 Ok(e) => e,
941 Err(_) => continue,
942 };
943
944 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
945 continue;
946 }
947
948 if file_count >= max_files {
949 break;
950 }
951
952 let path = entry.path();
953 let ext = path
954 .extension()
955 .map(|e| e.to_string_lossy().to_string())
956 .unwrap_or_default();
957
958 if Self::is_binary_ext(&ext) || Self::ext_to_language(&ext).is_none() {
959 continue;
960 }
961
962 let content = match std::fs::read_to_string(path) {
963 Ok(c) => c,
964 Err(_) => continue,
965 };
966
967 file_count += 1;
968
969 let rel = path
970 .strip_prefix(&root)
971 .unwrap_or(path)
972 .to_string_lossy()
973 .to_string();
974
975 let mut fn_start_line: Option<usize> = 0_usize.into();
977 let mut fn_name = String::new();
978 let mut brace_depth: i32 = 0;
979 let mut in_function = false;
980
981 for (line_num, line) in content.lines().enumerate() {
982 let trimmed = line.trim();
983
984 if trimmed.contains("TODO") {
986 let item = TechDebtItem {
987 file_path: rel.clone(),
988 line_number: line_num + 1,
989 category: "todo".to_string(),
990 description: trimmed.to_string(),
991 severity: "medium".to_string(),
992 };
993 if severity_filter.is_none() || severity_filter == Some("medium") {
994 items.push(item);
995 }
996 }
997 if trimmed.contains("FIXME") {
998 let item = TechDebtItem {
999 file_path: rel.clone(),
1000 line_number: line_num + 1,
1001 category: "fixme".to_string(),
1002 description: trimmed.to_string(),
1003 severity: "medium".to_string(),
1004 };
1005 if severity_filter.is_none() || severity_filter == Some("medium") {
1006 items.push(item);
1007 }
1008 }
1009 if trimmed.contains("HACK") {
1010 let item = TechDebtItem {
1011 file_path: rel.clone(),
1012 line_number: line_num + 1,
1013 category: "hack".to_string(),
1014 description: trimmed.to_string(),
1015 severity: "medium".to_string(),
1016 };
1017 if severity_filter.is_none() || severity_filter == Some("medium") {
1018 items.push(item);
1019 }
1020 }
1021
1022 let indent_level = Self::measure_indent(line);
1024 if indent_level > 4 {
1025 let item = TechDebtItem {
1026 file_path: rel.clone(),
1027 line_number: line_num + 1,
1028 category: "deep_nesting".to_string(),
1029 description: format!(
1030 "Deeply nested code ({} levels): {}",
1031 indent_level,
1032 Self::truncate_str(trimmed, 80)
1033 ),
1034 severity: "medium".to_string(),
1035 };
1036 if severity_filter.is_none() || severity_filter == Some("medium") {
1037 items.push(item);
1038 }
1039 }
1040
1041 let is_fn_start = trimmed.starts_with("fn ")
1043 || trimmed.starts_with("pub fn ")
1044 || trimmed.starts_with("pub(crate) fn ")
1045 || trimmed.starts_with("async fn ")
1046 || trimmed.starts_with("pub async fn ")
1047 || trimmed.starts_with("def ")
1048 || trimmed.starts_with("function ")
1049 || trimmed.starts_with("async function ")
1050 || trimmed.starts_with("export function ")
1051 || trimmed.starts_with("export async function ")
1052 || trimmed.starts_with("func ");
1053
1054 if is_fn_start && trimmed.contains('(') {
1055 if in_function {
1057 if let Some(start) = fn_start_line {
1058 let length = line_num - start;
1059 if length > 100 {
1060 let item = TechDebtItem {
1061 file_path: rel.clone(),
1062 line_number: start + 1,
1063 category: "long_function".to_string(),
1064 description: format!(
1065 "Function '{}' is {} lines long (>100)",
1066 fn_name, length
1067 ),
1068 severity: "high".to_string(),
1069 };
1070 if severity_filter.is_none() || severity_filter == Some("high") {
1071 items.push(item);
1072 }
1073 }
1074 }
1075 }
1076
1077 fn_start_line = Some(line_num);
1078 fn_name = Self::extract_fn_name(trimmed);
1079 brace_depth = 0;
1080 in_function = true;
1081 }
1082
1083 if in_function {
1084 for ch in trimmed.chars() {
1085 if ch == '{' {
1086 brace_depth += 1;
1087 } else if ch == '}' {
1088 brace_depth -= 1;
1089 }
1090 }
1091
1092 if brace_depth <= 0
1093 && fn_start_line.is_some()
1094 && line_num > fn_start_line.unwrap_or(0)
1095 {
1096 if let Some(start) = fn_start_line {
1097 let length = line_num - start + 1;
1098 if length > 100 {
1099 let item = TechDebtItem {
1100 file_path: rel.clone(),
1101 line_number: start + 1,
1102 category: "long_function".to_string(),
1103 description: format!(
1104 "Function '{}' is {} lines long (>100)",
1105 fn_name, length
1106 ),
1107 severity: "high".to_string(),
1108 };
1109 if severity_filter.is_none() || severity_filter == Some("high") {
1110 items.push(item);
1111 }
1112 }
1113 }
1114 in_function = false;
1115 fn_start_line = None;
1116 }
1117 }
1118 }
1119 }
1120
1121 let mut by_category: HashMap<String, usize> = HashMap::new();
1123 for item in &items {
1124 *by_category.entry(item.category.clone()).or_insert(0) += 1;
1125 }
1126
1127 let summary = by_category
1128 .iter()
1129 .map(|(k, v)| format!(" {}: {}", k, v))
1130 .collect::<Vec<_>>()
1131 .join("\n");
1132
1133 let detail = serde_json::to_string_pretty(&items).unwrap_or_default();
1134 Ok(ToolOutput::text(format!(
1135 "Tech debt report: {} items found\n\nSummary:\n{}\n\nDetails:\n{}",
1136 items.len(),
1137 summary,
1138 detail
1139 )))
1140 }
1141
1142 fn measure_indent(line: &str) -> usize {
1144 let leading_spaces = line.len() - line.trim_start().len();
1145 let tab_count = line.chars().take_while(|c| *c == '\t').count();
1147 if tab_count > 0 {
1148 tab_count
1149 } else {
1150 leading_spaces / 4
1151 }
1152 }
1153
1154 fn extract_fn_name(line: &str) -> String {
1156 let prefixes = [
1158 "pub async fn ",
1159 "pub(crate) fn ",
1160 "pub fn ",
1161 "async fn ",
1162 "fn ",
1163 "export async function ",
1164 "export function ",
1165 "async function ",
1166 "function ",
1167 "func ",
1168 "def ",
1169 ];
1170 for prefix in &prefixes {
1171 if let Some(rest) = line.trim().strip_prefix(prefix) {
1172 if let Some(paren_pos) = rest.find('(') {
1173 let name = rest[..paren_pos].trim();
1174 if !name.is_empty() {
1175 return name.to_string();
1176 }
1177 }
1178 }
1179 }
1180 "<anonymous>".to_string()
1181 }
1182
1183 fn truncate_str(s: &str, max: usize) -> String {
1184 if s.len() <= max {
1185 s.to_string()
1186 } else {
1187 format!("{}...", &s[..max])
1188 }
1189 }
1190
1191 fn api_surface(&self, args: &Value) -> Result<ToolOutput, ToolError> {
1192 let root = self.resolve_path(args);
1193 if !root.exists() {
1194 return Err(ToolError::ExecutionFailed {
1195 name: "code_intelligence".to_string(),
1196 message: format!("Path does not exist: {}", root.display()),
1197 });
1198 }
1199
1200 let lang_filter = args.get("language").and_then(|v| v.as_str());
1201 let mut entries: Vec<ApiEntry> = Vec::new();
1202
1203 let walker = ignore::WalkBuilder::new(&root)
1204 .hidden(false)
1205 .git_ignore(true)
1206 .build();
1207
1208 for entry in walker {
1209 let entry = match entry {
1210 Ok(e) => e,
1211 Err(_) => continue,
1212 };
1213
1214 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
1215 continue;
1216 }
1217
1218 let path = entry.path();
1219 let ext = path
1220 .extension()
1221 .map(|e| e.to_string_lossy().to_string())
1222 .unwrap_or_default();
1223
1224 if Self::is_binary_ext(&ext) {
1225 continue;
1226 }
1227
1228 let language = match Self::ext_to_language(&ext) {
1229 Some(l) => l,
1230 None => continue,
1231 };
1232
1233 if let Some(filter) = lang_filter {
1235 let filter_lower = filter.to_lowercase();
1236 if !language.to_lowercase().contains(&filter_lower) {
1237 continue;
1238 }
1239 }
1240
1241 let content = match std::fs::read_to_string(path) {
1242 Ok(c) => c,
1243 Err(_) => continue,
1244 };
1245
1246 let rel = path
1247 .strip_prefix(&root)
1248 .unwrap_or(path)
1249 .to_string_lossy()
1250 .to_string();
1251
1252 for (line_num, line) in content.lines().enumerate() {
1253 let trimmed = line.trim();
1254
1255 match language {
1256 "Rust" => {
1257 if (trimmed.starts_with("pub fn ") || trimmed.starts_with("pub async fn "))
1259 && trimmed.contains('(')
1260 {
1261 let name = Self::extract_fn_name(trimmed);
1262 entries.push(ApiEntry {
1263 name,
1264 kind: "function".to_string(),
1265 file_path: rel.clone(),
1266 line_number: line_num + 1,
1267 signature: trimmed.to_string(),
1268 visibility: "public".to_string(),
1269 });
1270 }
1271 if trimmed.starts_with("pub struct ") {
1273 let name = trimmed
1274 .strip_prefix("pub struct ")
1275 .and_then(|r| {
1276 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1277 })
1278 .unwrap_or("")
1279 .to_string();
1280 entries.push(ApiEntry {
1281 name,
1282 kind: "struct".to_string(),
1283 file_path: rel.clone(),
1284 line_number: line_num + 1,
1285 signature: trimmed.to_string(),
1286 visibility: "public".to_string(),
1287 });
1288 }
1289 if trimmed.starts_with("pub trait ") {
1291 let name = trimmed
1292 .strip_prefix("pub trait ")
1293 .and_then(|r| {
1294 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1295 })
1296 .unwrap_or("")
1297 .to_string();
1298 entries.push(ApiEntry {
1299 name,
1300 kind: "trait".to_string(),
1301 file_path: rel.clone(),
1302 line_number: line_num + 1,
1303 signature: trimmed.to_string(),
1304 visibility: "public".to_string(),
1305 });
1306 }
1307 if trimmed.starts_with("pub enum ") {
1309 let name = trimmed
1310 .strip_prefix("pub enum ")
1311 .and_then(|r| {
1312 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1313 })
1314 .unwrap_or("")
1315 .to_string();
1316 entries.push(ApiEntry {
1317 name,
1318 kind: "enum".to_string(),
1319 file_path: rel.clone(),
1320 line_number: line_num + 1,
1321 signature: trimmed.to_string(),
1322 visibility: "public".to_string(),
1323 });
1324 }
1325 }
1326 "Python" => {
1327 if line.starts_with("def ") && trimmed.contains('(') {
1329 let name = Self::extract_fn_name(trimmed);
1330 entries.push(ApiEntry {
1331 name,
1332 kind: "function".to_string(),
1333 file_path: rel.clone(),
1334 line_number: line_num + 1,
1335 signature: trimmed.to_string(),
1336 visibility: "public".to_string(),
1337 });
1338 }
1339 if line.starts_with("class ") {
1341 let name = trimmed
1342 .strip_prefix("class ")
1343 .and_then(|r| {
1344 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1345 })
1346 .unwrap_or("")
1347 .to_string();
1348 entries.push(ApiEntry {
1349 name,
1350 kind: "class".to_string(),
1351 file_path: rel.clone(),
1352 line_number: line_num + 1,
1353 signature: trimmed.to_string(),
1354 visibility: "public".to_string(),
1355 });
1356 }
1357 }
1358 "JavaScript" | "JavaScript (JSX)" | "TypeScript" | "TypeScript (TSX)" => {
1359 if (trimmed.starts_with("export function ")
1361 || trimmed.starts_with("export async function "))
1362 && trimmed.contains('(')
1363 {
1364 let name = Self::extract_fn_name(trimmed);
1365 entries.push(ApiEntry {
1366 name,
1367 kind: "function".to_string(),
1368 file_path: rel.clone(),
1369 line_number: line_num + 1,
1370 signature: trimmed.to_string(),
1371 visibility: "public".to_string(),
1372 });
1373 }
1374 if trimmed.starts_with("export class ") {
1376 let name = trimmed
1377 .strip_prefix("export class ")
1378 .and_then(|r| {
1379 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1380 })
1381 .unwrap_or("")
1382 .to_string();
1383 entries.push(ApiEntry {
1384 name,
1385 kind: "class".to_string(),
1386 file_path: rel.clone(),
1387 line_number: line_num + 1,
1388 signature: trimmed.to_string(),
1389 visibility: "public".to_string(),
1390 });
1391 }
1392 if trimmed.starts_with("export const ") {
1394 let name = trimmed
1395 .strip_prefix("export const ")
1396 .and_then(|r| {
1397 r.split(|c: char| !c.is_alphanumeric() && c != '_').next()
1398 })
1399 .unwrap_or("")
1400 .to_string();
1401 entries.push(ApiEntry {
1402 name,
1403 kind: "module".to_string(),
1404 file_path: rel.clone(),
1405 line_number: line_num + 1,
1406 signature: trimmed.to_string(),
1407 visibility: "public".to_string(),
1408 });
1409 }
1410 }
1411 _ => {}
1412 }
1413 }
1414 }
1415
1416 let out = serde_json::to_string_pretty(&entries).unwrap_or_default();
1417 Ok(ToolOutput::text(format!(
1418 "API surface: {} public entries\n{}",
1419 entries.len(),
1420 out
1421 )))
1422 }
1423
1424 fn dependency_map(&self, args: &Value) -> Result<ToolOutput, ToolError> {
1425 let root = self.resolve_path(args);
1426 if !root.exists() {
1427 return Err(ToolError::ExecutionFailed {
1428 name: "code_intelligence".to_string(),
1429 message: format!("Path does not exist: {}", root.display()),
1430 });
1431 }
1432
1433 let mut deps: Vec<DependencyEntry> = Vec::new();
1434
1435 Self::find_and_parse_files(&root, "Cargo.toml", |path, content| {
1437 let rel = path
1438 .strip_prefix(&root)
1439 .unwrap_or(path)
1440 .to_string_lossy()
1441 .to_string();
1442 deps.extend(Self::parse_cargo_toml(&content, &rel));
1443 });
1444
1445 Self::find_and_parse_files(&root, "package.json", |path, content| {
1447 let rel = path
1448 .strip_prefix(&root)
1449 .unwrap_or(path)
1450 .to_string_lossy()
1451 .to_string();
1452 deps.extend(Self::parse_package_json(&content, &rel));
1453 });
1454
1455 Self::find_and_parse_files(&root, "requirements.txt", |path, content| {
1457 let rel = path
1458 .strip_prefix(&root)
1459 .unwrap_or(path)
1460 .to_string_lossy()
1461 .to_string();
1462 deps.extend(Self::parse_requirements_txt(&content, &rel));
1463 });
1464
1465 Self::find_and_parse_files(&root, "go.mod", |path, content| {
1467 let rel = path
1468 .strip_prefix(&root)
1469 .unwrap_or(path)
1470 .to_string_lossy()
1471 .to_string();
1472 deps.extend(Self::parse_go_mod(&content, &rel));
1473 });
1474
1475 Self::find_and_parse_files(&root, "Gemfile", |path, content| {
1477 let rel = path
1478 .strip_prefix(&root)
1479 .unwrap_or(path)
1480 .to_string_lossy()
1481 .to_string();
1482 deps.extend(Self::parse_gemfile(&content, &rel));
1483 });
1484
1485 let out = serde_json::to_string_pretty(&deps).unwrap_or_default();
1486 Ok(ToolOutput::text(format!(
1487 "Dependency map: {} dependencies\n{}",
1488 deps.len(),
1489 out
1490 )))
1491 }
1492
1493 fn find_and_parse_files<F>(root: &PathBuf, filename: &str, mut handler: F)
1499 where
1500 F: FnMut(&std::path::Path, String),
1501 {
1502 let walker = ignore::WalkBuilder::new(root)
1503 .hidden(false)
1504 .git_ignore(true)
1505 .build();
1506
1507 for entry in walker {
1508 let entry = match entry {
1509 Ok(e) => e,
1510 Err(_) => continue,
1511 };
1512
1513 if !entry.file_type().is_some_and(|ft| ft.is_file()) {
1514 continue;
1515 }
1516
1517 if entry.file_name().to_string_lossy() == filename {
1518 if let Ok(content) = std::fs::read_to_string(entry.path()) {
1519 handler(entry.path(), content);
1520 }
1521 }
1522 }
1523 }
1524
1525 fn parse_cargo_toml(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1527 let mut deps = Vec::new();
1528 let mut current_section = String::new();
1529
1530 for line in content.lines() {
1531 let trimmed = line.trim();
1532
1533 if trimmed.starts_with('[') && trimmed.ends_with(']') {
1535 current_section = trimmed[1..trimmed.len() - 1].to_string();
1536 continue;
1537 }
1538
1539 if trimmed.starts_with('[') {
1541 if let Some(end) = trimmed.find(']') {
1542 current_section = trimmed[1..end].to_string();
1543 }
1544 continue;
1545 }
1546
1547 let dep_type = match current_section.as_str() {
1548 "dependencies" | "workspace.dependencies" => "runtime",
1549 "dev-dependencies" => "dev",
1550 "build-dependencies" => "build",
1551 s if s.ends_with(".dependencies") && !s.contains("dev") && !s.contains("build") => {
1552 "runtime"
1553 }
1554 _ => continue,
1555 };
1556
1557 if let Some(eq_pos) = trimmed.find('=') {
1559 let name = trimmed[..eq_pos].trim().to_string();
1560 if name.is_empty() || name.starts_with('#') {
1561 continue;
1562 }
1563 let value_part = trimmed[eq_pos + 1..].trim();
1564
1565 let version = if value_part.starts_with('"') {
1566 value_part.trim_matches('"').trim_matches('\'').to_string()
1568 } else if value_part.starts_with('{') {
1569 Self::extract_toml_inline_version(value_part)
1571 } else {
1572 value_part.to_string()
1573 };
1574
1575 deps.push(DependencyEntry {
1576 name,
1577 version,
1578 dep_type: dep_type.to_string(),
1579 source_file: source_file.to_string(),
1580 });
1581 }
1582 }
1583
1584 deps
1585 }
1586
1587 fn extract_toml_inline_version(inline: &str) -> String {
1589 if let Some(ver_pos) = inline.find("version") {
1591 let after_key = &inline[ver_pos + 7..];
1592 if let Some(eq_pos) = after_key.find('=') {
1593 let after_eq = after_key[eq_pos + 1..].trim();
1594 if let Some(stripped) = after_eq.strip_prefix('"') {
1595 if let Some(end_quote) = stripped.find('"') {
1596 return stripped[..end_quote].to_string();
1597 }
1598 }
1599 }
1600 }
1601 if inline.contains("workspace") {
1603 return "workspace".to_string();
1604 }
1605 "*".to_string()
1606 }
1607
1608 fn parse_package_json(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1610 let mut deps = Vec::new();
1611
1612 let parsed: Value = match serde_json::from_str(content) {
1613 Ok(v) => v,
1614 Err(_) => return deps,
1615 };
1616
1617 let sections = [
1618 ("dependencies", "runtime"),
1619 ("devDependencies", "dev"),
1620 ("peerDependencies", "runtime"),
1621 ("optionalDependencies", "optional"),
1622 ];
1623
1624 for (key, dep_type) in §ions {
1625 if let Some(obj) = parsed.get(key).and_then(|v| v.as_object()) {
1626 for (name, version) in obj {
1627 deps.push(DependencyEntry {
1628 name: name.clone(),
1629 version: version.as_str().unwrap_or("*").to_string(),
1630 dep_type: dep_type.to_string(),
1631 source_file: source_file.to_string(),
1632 });
1633 }
1634 }
1635 }
1636
1637 deps
1638 }
1639
1640 fn parse_requirements_txt(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1642 let mut deps = Vec::new();
1643
1644 for line in content.lines() {
1645 let trimmed = line.trim();
1646 if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with('-') {
1647 continue;
1648 }
1649
1650 let (name, version) = if let Some(pos) = trimmed.find("==") {
1652 (trimmed[..pos].trim(), trimmed[pos + 2..].trim())
1653 } else if let Some(pos) = trimmed.find(">=") {
1654 (trimmed[..pos].trim(), trimmed[pos..].trim())
1655 } else if let Some(pos) = trimmed.find("~=") {
1656 (trimmed[..pos].trim(), trimmed[pos..].trim())
1657 } else if let Some(pos) = trimmed.find("<=") {
1658 (trimmed[..pos].trim(), trimmed[pos..].trim())
1659 } else if let Some(pos) = trimmed.find("!=") {
1660 (trimmed[..pos].trim(), trimmed[pos..].trim())
1661 } else {
1662 (trimmed, "*")
1663 };
1664
1665 if !name.is_empty() {
1666 deps.push(DependencyEntry {
1667 name: name.to_string(),
1668 version: version.to_string(),
1669 dep_type: "runtime".to_string(),
1670 source_file: source_file.to_string(),
1671 });
1672 }
1673 }
1674
1675 deps
1676 }
1677
1678 fn parse_go_mod(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1680 let mut deps = Vec::new();
1681 let mut in_require = false;
1682
1683 for line in content.lines() {
1684 let trimmed = line.trim();
1685
1686 if trimmed == "require (" {
1687 in_require = true;
1688 continue;
1689 }
1690 if trimmed == ")" {
1691 in_require = false;
1692 continue;
1693 }
1694
1695 if trimmed.starts_with("require ") && !trimmed.contains('(') {
1697 let rest = trimmed.strip_prefix("require ").unwrap_or("").trim();
1698 let parts: Vec<&str> = rest.split_whitespace().collect();
1699 if parts.len() >= 2 {
1700 deps.push(DependencyEntry {
1701 name: parts[0].to_string(),
1702 version: parts[1].to_string(),
1703 dep_type: "runtime".to_string(),
1704 source_file: source_file.to_string(),
1705 });
1706 }
1707 continue;
1708 }
1709
1710 if in_require && !trimmed.is_empty() && !trimmed.starts_with("//") {
1712 let clean = if let Some(pos) = trimmed.find("//") {
1713 trimmed[..pos].trim()
1714 } else {
1715 trimmed
1716 };
1717 let parts: Vec<&str> = clean.split_whitespace().collect();
1718 if parts.len() >= 2 {
1719 let dep_type = if parts.len() > 2 && parts[2] == "// indirect" {
1720 "optional"
1721 } else {
1722 "runtime"
1723 };
1724 deps.push(DependencyEntry {
1725 name: parts[0].to_string(),
1726 version: parts[1].to_string(),
1727 dep_type: dep_type.to_string(),
1728 source_file: source_file.to_string(),
1729 });
1730 }
1731 }
1732 }
1733
1734 deps
1735 }
1736
1737 fn parse_gemfile(content: &str, source_file: &str) -> Vec<DependencyEntry> {
1739 let mut deps = Vec::new();
1740 let mut in_group: Option<String> = None;
1741
1742 for line in content.lines() {
1743 let trimmed = line.trim();
1744
1745 if trimmed.starts_with("group ") {
1746 if trimmed.contains(":development") || trimmed.contains(":test") {
1747 in_group = Some("dev".to_string());
1748 } else {
1749 in_group = Some("runtime".to_string());
1750 }
1751 continue;
1752 }
1753 if trimmed == "end" {
1754 in_group = None;
1755 continue;
1756 }
1757
1758 if trimmed.starts_with("gem ") {
1759 let rest = trimmed.strip_prefix("gem ").unwrap_or("").trim();
1760 let parts: Vec<&str> = rest.split(',').collect();
1762 if let Some(name_part) = parts.first() {
1763 let name = name_part
1764 .trim()
1765 .trim_matches('\'')
1766 .trim_matches('"')
1767 .to_string();
1768 let version = if parts.len() > 1 {
1769 parts[1]
1770 .trim()
1771 .trim_matches('\'')
1772 .trim_matches('"')
1773 .to_string()
1774 } else {
1775 "*".to_string()
1776 };
1777 let dep_type = in_group.as_deref().unwrap_or("runtime").to_string();
1778 deps.push(DependencyEntry {
1779 name,
1780 version,
1781 dep_type,
1782 source_file: source_file.to_string(),
1783 });
1784 }
1785 }
1786 }
1787
1788 deps
1789 }
1790}
1791
1792#[async_trait]
1797impl Tool for CodeIntelligenceTool {
1798 fn name(&self) -> &str {
1799 "code_intelligence"
1800 }
1801
1802 fn description(&self) -> &str {
1803 "Cross-language codebase analysis: architecture detection, pattern recognition, \
1804 tech debt scanning, API surface extraction. Actions: analyze_architecture, \
1805 detect_patterns, translate_snippet, compare_implementations, tech_debt_report, \
1806 api_surface, dependency_map."
1807 }
1808
1809 fn parameters_schema(&self) -> Value {
1810 json!({
1811 "type": "object",
1812 "properties": {
1813 "action": {
1814 "type": "string",
1815 "enum": [
1816 "analyze_architecture",
1817 "detect_patterns",
1818 "translate_snippet",
1819 "compare_implementations",
1820 "tech_debt_report",
1821 "api_surface",
1822 "dependency_map"
1823 ],
1824 "description": "Action to perform"
1825 },
1826 "path": {
1827 "type": "string",
1828 "description": "Target path (defaults to workspace root)"
1829 },
1830 "force": {
1831 "type": "boolean",
1832 "description": "Force re-analysis ignoring cache (for analyze_architecture)"
1833 },
1834 "pattern": {
1835 "type": "string",
1836 "enum": ["singleton", "factory", "observer", "builder", "repository"],
1837 "description": "Design pattern to detect (for detect_patterns)"
1838 },
1839 "code": {
1840 "type": "string",
1841 "description": "Source code snippet (for translate_snippet)"
1842 },
1843 "from_language": {
1844 "type": "string",
1845 "description": "Source language (for translate_snippet)"
1846 },
1847 "to_language": {
1848 "type": "string",
1849 "description": "Target language (for translate_snippet)"
1850 },
1851 "file_a": {
1852 "type": "string",
1853 "description": "First file path (for compare_implementations)"
1854 },
1855 "file_b": {
1856 "type": "string",
1857 "description": "Second file path (for compare_implementations)"
1858 },
1859 "severity": {
1860 "type": "string",
1861 "enum": ["low", "medium", "high"],
1862 "description": "Filter by severity (for tech_debt_report)"
1863 },
1864 "language": {
1865 "type": "string",
1866 "description": "Filter by language (for api_surface)"
1867 }
1868 },
1869 "required": ["action"]
1870 })
1871 }
1872
1873 fn risk_level(&self) -> RiskLevel {
1874 RiskLevel::ReadOnly
1875 }
1876
1877 fn timeout(&self) -> Duration {
1878 Duration::from_secs(120)
1879 }
1880
1881 async fn execute(&self, args: Value) -> Result<ToolOutput, ToolError> {
1882 let action = args.get("action").and_then(|v| v.as_str()).ok_or_else(|| {
1883 ToolError::InvalidArguments {
1884 name: "code_intelligence".to_string(),
1885 reason: "Missing required parameter 'action'".to_string(),
1886 }
1887 })?;
1888
1889 match action {
1890 "analyze_architecture" => self.analyze_architecture(&args),
1891 "detect_patterns" => self.detect_patterns(&args),
1892 "translate_snippet" => self.translate_snippet(&args),
1893 "compare_implementations" => self.compare_implementations(&args),
1894 "tech_debt_report" => self.tech_debt_report(&args),
1895 "api_surface" => self.api_surface(&args),
1896 "dependency_map" => self.dependency_map(&args),
1897 other => Err(ToolError::InvalidArguments {
1898 name: "code_intelligence".to_string(),
1899 reason: format!("Unknown action '{}'. Valid actions: analyze_architecture, detect_patterns, translate_snippet, compare_implementations, tech_debt_report, api_surface, dependency_map", other),
1900 }),
1901 }
1902 }
1903}
1904
1905#[cfg(test)]
1910mod tests {
1911 use super::*;
1912 use tempfile::TempDir;
1913
1914 fn make_tool(dir: &std::path::Path) -> CodeIntelligenceTool {
1915 let workspace = dir.canonicalize().unwrap();
1916 CodeIntelligenceTool::new(workspace)
1917 }
1918
1919 #[test]
1920 fn test_tool_properties() {
1921 let dir = TempDir::new().unwrap();
1922 let tool = make_tool(dir.path());
1923 assert_eq!(tool.name(), "code_intelligence");
1924 assert!(!tool.description().is_empty());
1925 assert_eq!(tool.risk_level(), RiskLevel::ReadOnly);
1926 assert_eq!(tool.timeout(), Duration::from_secs(120));
1927 }
1928
1929 #[test]
1930 fn test_schema_validation() {
1931 let dir = TempDir::new().unwrap();
1932 let tool = make_tool(dir.path());
1933 let schema = tool.parameters_schema();
1934 assert!(schema.is_object());
1935 let props = schema.get("properties").unwrap();
1936 assert!(props.get("action").is_some());
1937 assert!(props.get("path").is_some());
1938 assert!(props.get("code").is_some());
1939 let required = schema.get("required").unwrap().as_array().unwrap();
1940 assert!(required.contains(&json!("action")));
1941 }
1942
1943 #[tokio::test]
1944 async fn test_analyze_architecture_basic() {
1945 let dir = TempDir::new().unwrap();
1946 let workspace = dir.path().canonicalize().unwrap();
1947
1948 let src_dir = workspace.join("src");
1950 std::fs::create_dir_all(&src_dir).unwrap();
1951 std::fs::write(
1952 src_dir.join("main.rs"),
1953 "fn main() {\n println!(\"hello\");\n}\n",
1954 )
1955 .unwrap();
1956 std::fs::write(
1957 src_dir.join("lib.rs"),
1958 "pub fn greet() -> String {\n \"hello\".to_string()\n}\n",
1959 )
1960 .unwrap();
1961 std::fs::write(workspace.join("Cargo.toml"), "[package]\nname = \"demo\"\n").unwrap();
1962
1963 let tool = CodeIntelligenceTool::new(workspace);
1964 let result = tool
1965 .execute(json!({"action": "analyze_architecture"}))
1966 .await
1967 .unwrap();
1968
1969 let text = &result.content;
1970 assert!(text.contains("Architecture snapshot"));
1971 assert!(text.contains("Rust"));
1972 assert!(text.contains("main.rs"));
1973 assert!(text.contains("Cargo.toml"));
1974 }
1975
1976 #[tokio::test]
1977 async fn test_analyze_caching() {
1978 let dir = TempDir::new().unwrap();
1979 let workspace = dir.path().canonicalize().unwrap();
1980
1981 let src_dir = workspace.join("src");
1982 std::fs::create_dir_all(&src_dir).unwrap();
1983 std::fs::write(src_dir.join("main.rs"), "fn main() {}\n").unwrap();
1984
1985 let tool = CodeIntelligenceTool::new(workspace);
1986
1987 let result1 = tool
1989 .execute(json!({"action": "analyze_architecture"}))
1990 .await
1991 .unwrap();
1992 assert!(result1.content.contains("Architecture snapshot:"));
1993 assert!(!result1.content.contains("(cached)"));
1994
1995 let result2 = tool
1997 .execute(json!({"action": "analyze_architecture"}))
1998 .await
1999 .unwrap();
2000 assert!(result2.content.contains("(cached)"));
2001
2002 let result3 = tool
2004 .execute(json!({"action": "analyze_architecture", "force": true}))
2005 .await
2006 .unwrap();
2007 assert!(!result3.content.contains("(cached)"));
2008 }
2009
2010 #[tokio::test]
2011 async fn test_detect_patterns_todo() {
2012 let dir = TempDir::new().unwrap();
2013 let workspace = dir.path().canonicalize().unwrap();
2014
2015 std::fs::write(
2016 workspace.join("example.rs"),
2017 "fn main() {\n // TODO: fix this later\n // FIXME: broken\n // HACK: workaround\n println!(\"ok\");\n}\n",
2018 )
2019 .unwrap();
2020
2021 let tool = CodeIntelligenceTool::new(workspace);
2022 let result = tool
2023 .execute(json!({"action": "detect_patterns"}))
2024 .await
2025 .unwrap();
2026
2027 let text = &result.content;
2028 assert!(text.contains("TODO"));
2029 assert!(text.contains("FIXME"));
2030 assert!(text.contains("HACK"));
2031 assert!(text.contains("fix this later"));
2032 }
2033
2034 #[tokio::test]
2035 async fn test_translate_returns_prompt() {
2036 let dir = TempDir::new().unwrap();
2037 let tool = make_tool(dir.path());
2038
2039 let result = tool
2040 .execute(json!({
2041 "action": "translate_snippet",
2042 "code": "fn add(a: i32, b: i32) -> i32 { a + b }",
2043 "from_language": "Rust",
2044 "to_language": "Python"
2045 }))
2046 .await
2047 .unwrap();
2048
2049 let text = &result.content;
2050 assert!(text.contains("Translate the following Rust code to Python"));
2051 assert!(text.contains("fn add"));
2052 assert!(text.contains("Rust Semantics"));
2053 assert!(text.contains("Python Semantics"));
2054 assert!(text.contains("Ownership"));
2055 assert!(text.contains("Dynamic typing"));
2056 }
2057
2058 #[tokio::test]
2059 async fn test_compare_implementations() {
2060 let dir = TempDir::new().unwrap();
2061 let workspace = dir.path().canonicalize().unwrap();
2062
2063 std::fs::write(
2064 workspace.join("sort_a.rs"),
2065 "pub fn bubble_sort(arr: &mut Vec<i32>) {\n let n = arr.len();\n for i in 0..n {\n for j in 0..n-1-i {\n if arr[j] > arr[j+1] {\n arr.swap(j, j+1);\n }\n }\n }\n}\n",
2066 )
2067 .unwrap();
2068 std::fs::write(
2069 workspace.join("sort_b.py"),
2070 "def quick_sort(arr):\n if len(arr) <= 1:\n return arr\n pivot = arr[0]\n left = [x for x in arr[1:] if x <= pivot]\n right = [x for x in arr[1:] if x > pivot]\n return quick_sort(left) + [pivot] + quick_sort(right)\n",
2071 )
2072 .unwrap();
2073
2074 let tool = CodeIntelligenceTool::new(workspace);
2075 let result = tool
2076 .execute(json!({
2077 "action": "compare_implementations",
2078 "file_a": "sort_a.rs",
2079 "file_b": "sort_b.py"
2080 }))
2081 .await
2082 .unwrap();
2083
2084 let text = &result.content;
2085 assert!(text.contains("Implementation Comparison"));
2086 assert!(text.contains("sort_a.rs"));
2087 assert!(text.contains("sort_b.py"));
2088 assert!(text.contains("Rust"));
2089 assert!(text.contains("Python"));
2090 assert!(text.contains("Lines"));
2091 assert!(text.contains("Functions"));
2092 }
2093
2094 #[tokio::test]
2095 async fn test_tech_debt_todo_fixme() {
2096 let dir = TempDir::new().unwrap();
2097 let workspace = dir.path().canonicalize().unwrap();
2098
2099 std::fs::write(
2100 workspace.join("messy.rs"),
2101 "fn main() {\n // TODO: refactor this\n // FIXME: memory leak\n println!(\"ok\");\n}\n",
2102 )
2103 .unwrap();
2104
2105 let tool = CodeIntelligenceTool::new(workspace);
2106 let result = tool
2107 .execute(json!({"action": "tech_debt_report"}))
2108 .await
2109 .unwrap();
2110
2111 let text = &result.content;
2112 assert!(text.contains("Tech debt report"));
2113 assert!(text.contains("todo"));
2114 assert!(text.contains("fixme"));
2115 assert!(text.contains("refactor this"));
2116 assert!(text.contains("memory leak"));
2117 }
2118
2119 #[tokio::test]
2120 async fn test_api_surface_rust() {
2121 let dir = TempDir::new().unwrap();
2122 let workspace = dir.path().canonicalize().unwrap();
2123
2124 std::fs::write(
2125 workspace.join("api.rs"),
2126 "pub fn create_user(name: &str) -> User {\n User { name: name.to_string() }\n}\n\n\
2127 pub struct User {\n pub name: String,\n}\n\n\
2128 pub trait Greet {\n fn greet(&self) -> String;\n}\n\n\
2129 pub enum Color {\n Red,\n Blue,\n}\n\n\
2130 fn private_helper() {}\n",
2131 )
2132 .unwrap();
2133
2134 let tool = CodeIntelligenceTool::new(workspace);
2135 let result = tool
2136 .execute(json!({"action": "api_surface", "language": "rust"}))
2137 .await
2138 .unwrap();
2139
2140 let text = &result.content;
2141 assert!(text.contains("create_user"));
2142 assert!(text.contains("User"));
2143 assert!(text.contains("Greet"));
2144 assert!(text.contains("Color"));
2145 assert!(!text.contains("private_helper"));
2147 }
2148
2149 #[tokio::test]
2150 async fn test_dependency_map_cargo() {
2151 let dir = TempDir::new().unwrap();
2152 let workspace = dir.path().canonicalize().unwrap();
2153
2154 std::fs::write(
2155 workspace.join("Cargo.toml"),
2156 "[package]\nname = \"demo\"\nversion = \"0.1.0\"\n\n\
2157 [dependencies]\nserde = \"1.0\"\ntokio = { version = \"1.47\", features = [\"full\"] }\n\n\
2158 [dev-dependencies]\ntempfile = \"3.14\"\n\n\
2159 [build-dependencies]\ncc = \"1.0\"\n",
2160 )
2161 .unwrap();
2162
2163 let tool = CodeIntelligenceTool::new(workspace);
2164 let result = tool
2165 .execute(json!({"action": "dependency_map"}))
2166 .await
2167 .unwrap();
2168
2169 let text = &result.content;
2170 assert!(text.contains("serde"));
2171 assert!(text.contains("tokio"));
2172 assert!(text.contains("tempfile"));
2173 assert!(text.contains("cc"));
2174 assert!(text.contains("runtime"));
2175 assert!(text.contains("dev"));
2176 assert!(text.contains("build"));
2177 }
2178
2179 #[tokio::test]
2180 async fn test_dependency_map_npm() {
2181 let dir = TempDir::new().unwrap();
2182 let workspace = dir.path().canonicalize().unwrap();
2183
2184 std::fs::write(
2185 workspace.join("package.json"),
2186 r#"{
2187 "name": "demo",
2188 "dependencies": {
2189 "express": "^4.18.0",
2190 "lodash": "^4.17.21"
2191 },
2192 "devDependencies": {
2193 "jest": "^29.0.0"
2194 }
2195 }"#,
2196 )
2197 .unwrap();
2198
2199 let tool = CodeIntelligenceTool::new(workspace);
2200 let result = tool
2201 .execute(json!({"action": "dependency_map"}))
2202 .await
2203 .unwrap();
2204
2205 let text = &result.content;
2206 assert!(text.contains("express"));
2207 assert!(text.contains("lodash"));
2208 assert!(text.contains("jest"));
2209 assert!(text.contains("runtime"));
2210 assert!(text.contains("dev"));
2211 }
2212
2213 #[tokio::test]
2214 async fn test_state_roundtrip() {
2215 let dir = TempDir::new().unwrap();
2216 let workspace = dir.path().canonicalize().unwrap();
2217 let tool = CodeIntelligenceTool::new(workspace);
2218
2219 let snapshot = ArchitectureSnapshot {
2221 project_root: "/test".to_string(),
2222 languages: vec![LanguageStats {
2223 language: "Rust".to_string(),
2224 files: 10,
2225 lines: 500,
2226 extensions: vec!["rs".to_string()],
2227 }],
2228 directories: vec![],
2229 entry_points: vec!["src/main.rs".to_string()],
2230 config_files: vec!["Cargo.toml".to_string()],
2231 total_files: 10,
2232 total_lines: 500,
2233 analyzed_at: Utc::now(),
2234 };
2235 let cache = CodeIntelCache {
2236 last_snapshot: Some(snapshot),
2237 };
2238 tool.save_cache(&cache).unwrap();
2239
2240 let loaded = tool.load_cache();
2242 assert!(loaded.last_snapshot.is_some());
2243 let loaded_snap = loaded.last_snapshot.unwrap();
2244 assert_eq!(loaded_snap.project_root, "/test");
2245 assert_eq!(loaded_snap.languages.len(), 1);
2246 assert_eq!(loaded_snap.languages[0].language, "Rust");
2247 assert_eq!(loaded_snap.total_files, 10);
2248 assert_eq!(loaded_snap.total_lines, 500);
2249 }
2250
2251 #[tokio::test]
2252 async fn test_unknown_action() {
2253 let dir = TempDir::new().unwrap();
2254 let tool = make_tool(dir.path());
2255
2256 let result = tool.execute(json!({"action": "nonexistent_action"})).await;
2257 assert!(result.is_err());
2258 let err = result.unwrap_err();
2259 match err {
2260 ToolError::InvalidArguments { name, reason } => {
2261 assert_eq!(name, "code_intelligence");
2262 assert!(reason.contains("Unknown action"));
2263 assert!(reason.contains("nonexistent_action"));
2264 }
2265 other => panic!("Expected InvalidArguments, got {:?}", other),
2266 }
2267 }
2268}