1use crate::tools::tree_sitter::{CodeAnalysis, TreeSitterAnalyzer};
9use anyhow::{Result, anyhow};
10use serde::{Deserialize, Serialize};
11use std::collections::HashMap;
12use std::path::{Path, PathBuf};
13use std::sync::Arc;
14use tokio::sync::RwLock;
15
16#[derive(Debug, Clone, Serialize, Deserialize, Eq, Hash, PartialEq)]
18pub struct Location {
19 pub file: String,
20 pub line: usize,
21 pub column: usize,
22}
23
24#[derive(Debug, Clone, Serialize, Deserialize)]
26pub enum CompactionStrategy {
27 Conservative,
28 Balanced,
29 Aggressive,
30}
31
32#[derive(Debug, Clone, Serialize, Deserialize)]
34pub struct CompactionDecision {
35 pub should_compact: bool,
36 pub strategy: CompactionStrategy,
37 pub reasoning: String,
38 pub estimated_benefit: usize,
39}
40
41#[derive(Debug, Clone, Serialize, Deserialize)]
43pub struct SemanticContext {
44 pub current_file: Option<PathBuf>,
46 pub recent_files: Vec<PathBuf>,
48 pub cursor_context: Option<CursorContext>,
50 pub symbol_table: HashMap<String, SymbolInfo>,
52 pub code_patterns: Vec<CodePattern>,
54 pub dependency_graph: HashMap<String, Vec<String>>,
56}
57
58#[derive(Debug, Clone, Serialize, Deserialize)]
60pub struct CursorContext {
61 pub line: usize,
62 pub column: usize,
63 pub selected_text: Option<String>,
64 pub surrounding_lines: Vec<String>,
65 pub current_function: Option<String>,
66 pub current_class: Option<String>,
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize)]
71pub struct SymbolInfo {
72 pub name: String,
73 pub kind: SymbolKind,
74 pub location: Location,
75 pub definition: String,
76 pub usages: Vec<Location>,
77 pub related_symbols: Vec<String>,
78 pub confidence_score: f64,
79}
80
81#[derive(Debug, Clone, Serialize, Deserialize)]
83pub enum SymbolKind {
84 Function,
85 Class,
86 Struct,
87 Enum,
88 Variable,
89 Constant,
90 Module,
91 Type,
92 Macro,
93}
94
95#[derive(Debug, Clone, Serialize, Deserialize)]
97pub struct CodePattern {
98 pub pattern_type: PatternType,
99 pub description: String,
100 pub examples: Vec<String>,
101 pub frequency: usize,
102 pub confidence: f64,
103}
104
105#[derive(Debug, Clone, Serialize, Deserialize)]
106pub enum PatternType {
107 NamingConvention,
108 ErrorHandling,
109 AsyncPattern,
110 DesignPattern,
111 CodeStyle,
112}
113
114#[derive(Debug, Clone, Serialize, Deserialize)]
116pub struct CompletionSuggestion {
117 pub text: String,
118 pub kind: CompletionKind,
119 pub relevance_score: f64,
120 pub context_info: String,
121 pub documentation: Option<String>,
122}
123
124#[derive(Debug, Clone, Serialize, Deserialize)]
125pub enum CompletionKind {
126 Function,
127 Variable,
128 Type,
129 Keyword,
130 Snippet,
131 Import,
132}
133
134pub struct IntelligenceEngine {
136 analyzer: TreeSitterAnalyzer,
137 context: Arc<RwLock<SemanticContext>>,
138 pattern_learner: PatternLearner,
139 completion_engine: CompletionEngine,
140}
141
142impl IntelligenceEngine {
143 pub fn new() -> Result<Self> {
145 let analyzer = TreeSitterAnalyzer::new()
146 .map_err(|e| anyhow!("Failed to initialize tree-sitter analyzer: {}", e))?;
147
148 let context = Arc::new(RwLock::new(SemanticContext {
149 current_file: None,
150 recent_files: Vec::new(),
151 cursor_context: None,
152 symbol_table: HashMap::new(),
153 code_patterns: Vec::new(),
154 dependency_graph: HashMap::new(),
155 }));
156
157 Ok(Self {
158 analyzer,
159 context,
160 pattern_learner: PatternLearner::new(),
161 completion_engine: CompletionEngine::new(),
162 })
163 }
164
165 pub async fn analyze_context(&mut self, workspace_root: &Path) -> Result<()> {
167 self.analyze_workspace_structure(workspace_root).await?;
169
170 self.build_symbol_table(workspace_root).await?;
172
173 {
175 let context = self.context.read().await;
176 self.pattern_learner.learn_patterns(&context).await?;
177 }
178
179 self.update_dependency_graph().await?;
181
182 Ok(())
183 }
184
185 pub async fn get_completions(
187 &mut self,
188 file_path: &Path,
189 cursor_line: usize,
190 cursor_column: usize,
191 prefix: &str,
192 ) -> Result<Vec<CompletionSuggestion>> {
193 let context = self.context.read().await;
194
195 let source_code = std::fs::read_to_string(file_path)?;
197 let analysis = self
198 .analyzer
199 .analyze_file_with_tree_sitter(file_path, &source_code)?;
200
201 self.completion_engine
203 .generate_completions(&context, &analysis, cursor_line, cursor_column, prefix)
204 .await
205 }
206
207 pub async fn update_cursor_context(
209 &mut self,
210 file_path: &Path,
211 line: usize,
212 column: usize,
213 selected_text: Option<String>,
214 ) -> Result<()> {
215 let source_code = std::fs::read_to_string(file_path)?;
216 let lines: Vec<String> = source_code.lines().map(|s| s.to_string()).collect();
217
218 let start_line = line.saturating_sub(5);
220 let end_line = (line + 5).min(lines.len());
221 let surrounding_lines = lines[start_line..end_line].to_vec();
222
223 let current_function = self.analyze_current_function(&source_code, line)?;
225 let current_class = self.analyze_current_class(&source_code, line)?;
226
227 let cursor_context = CursorContext {
228 line,
229 column,
230 selected_text,
231 surrounding_lines,
232 current_function,
233 current_class,
234 };
235
236 let mut context = self.context.write().await;
237 context.cursor_context = Some(cursor_context);
238 context.current_file = Some(file_path.to_path_buf());
239
240 self.update_recent_files(&mut context, file_path);
242
243 Ok(())
244 }
245
246 async fn analyze_workspace_structure(&mut self, workspace_root: &Path) -> Result<()> {
248 let mut project_structure = HashMap::new();
250
251 for entry in walkdir::WalkDir::new(workspace_root) {
252 let entry = entry?;
253 let path = entry.path();
254
255 if path.is_file() {
256 if let Some(ext) = path.extension() {
257 let ext_str = ext.to_string_lossy().to_string();
258 *project_structure.entry(ext_str).or_insert(0) += 1;
259 }
260 }
261 }
262
263 let project_type = self.detect_project_type(&project_structure)?;
265 let frameworks = self.detect_frameworks(workspace_root)?;
266
267 let mut context = self.context.write().await;
269 context.symbol_table.insert(
270 "project_info".to_string(),
271 SymbolInfo {
272 name: "project_info".to_string(),
273 kind: SymbolKind::Module,
274 location: Location {
275 file: workspace_root.to_string_lossy().to_string(),
276 line: 0,
277 column: 0,
278 },
279 definition: format!(
280 "{:?} project with frameworks: {:?}",
281 project_type, frameworks
282 ),
283 usages: Vec::new(),
284 related_symbols: Vec::new(),
285 confidence_score: 0.9,
286 },
287 );
288
289 Ok(())
290 }
291
292 async fn build_symbol_table(&mut self, workspace_root: &Path) -> Result<()> {
294 for entry in walkdir::WalkDir::new(workspace_root) {
295 let entry = entry?;
296 let path = entry.path();
297
298 if path.is_file() && self.is_supported_file(path) {
299 if let Ok(source_code) = std::fs::read_to_string(path) {
300 let analysis = self
301 .analyzer
302 .analyze_file_with_tree_sitter(path, &source_code)?;
303
304 for symbol in &analysis.symbols {
306 let symbol_info = SymbolInfo {
307 name: symbol.name.clone(),
308 kind: self
309 .map_symbol_kind(&format!("{:?}", symbol.kind).to_lowercase()),
310 location: Location {
311 file: path.to_string_lossy().to_string(),
312 line: symbol.position.row,
313 column: symbol.position.column,
314 },
315 definition: symbol
316 .signature
317 .clone()
318 .unwrap_or_else(|| symbol.name.clone()),
319 usages: Vec::new(), related_symbols: Vec::new(),
321 confidence_score: 0.8,
322 };
323
324 self.context
325 .write()
326 .await
327 .symbol_table
328 .insert(symbol.name.clone(), symbol_info);
329 }
330 }
331 }
332 }
333
334 Ok(())
335 }
336
337 async fn update_dependency_graph(&mut self) -> Result<()> {
339 let context = self.context.read().await;
340 let mut dependency_updates = HashMap::new();
341
342 for (symbol_name, symbol_info) in &context.symbol_table {
344 let mut dependencies = Vec::new();
345
346 if let Some(imports) = self.extract_imports(&symbol_info.definition) {
348 dependencies.extend(imports);
349 }
350
351 dependency_updates.insert(symbol_name.clone(), dependencies);
352 }
353
354 let mut context = self.context.write().await;
356 for (symbol_name, dependencies) in dependency_updates {
357 context.dependency_graph.insert(symbol_name, dependencies);
358 }
359
360 Ok(())
361 }
362
363 fn is_supported_file(&self, path: &Path) -> bool {
365 if let Some(ext) = path.extension() {
366 matches!(
367 ext.to_str(),
368 Some("rs") | Some("py") | Some("js") | Some("ts") | Some("go") | Some("java")
369 )
370 } else {
371 false
372 }
373 }
374
375 fn map_symbol_kind(&self, kind: &str) -> SymbolKind {
376 match kind {
377 "function" => SymbolKind::Function,
378 "class" => SymbolKind::Class,
379 "struct" => SymbolKind::Struct,
380 "enum" => SymbolKind::Enum,
381 "variable" => SymbolKind::Variable,
382 "constant" => SymbolKind::Constant,
383 "module" => SymbolKind::Module,
384 "type" => SymbolKind::Type,
385 "macro" => SymbolKind::Macro,
386 _ => SymbolKind::Variable,
387 }
388 }
389
390 fn detect_project_type(&self, structure: &HashMap<String, usize>) -> Result<String> {
391 if structure.contains_key("rs") {
393 Ok("Rust".to_string())
394 } else if structure.contains_key("py") {
395 Ok("Python".to_string())
396 } else if structure.contains_key("js") || structure.contains_key("ts") {
397 Ok("JavaScript/TypeScript".to_string())
398 } else if structure.contains_key("go") {
399 Ok("Go".to_string())
400 } else if structure.contains_key("java") {
401 Ok("Java".to_string())
402 } else {
403 Ok("Unknown".to_string())
404 }
405 }
406
407 fn detect_frameworks(&self, workspace_root: &Path) -> Result<Vec<String>> {
408 let mut frameworks = Vec::new();
409
410 let framework_indicators = [
412 ("Cargo.toml", "Rust"),
413 ("package.json", "Node.js"),
414 ("requirements.txt", "Python"),
415 ("go.mod", "Go"),
416 ("pom.xml", "Maven"),
417 ("build.gradle", "Gradle"),
418 ];
419
420 for (file, framework) in &framework_indicators {
421 if workspace_root.join(file).exists() {
422 frameworks.push(framework.to_string());
423 }
424 }
425
426 Ok(frameworks)
427 }
428
429 fn analyze_current_function(&self, source_code: &str, line: usize) -> Result<Option<String>> {
430 let lines: Vec<&str> = source_code.lines().collect();
432 if line >= lines.len() {
433 return Ok(None);
434 }
435
436 for i in (0..=line).rev() {
438 let line_content = lines[i].trim();
439 if line_content.starts_with("fn ") {
440 if let Some(end) = line_content.find('(') {
441 return Ok(Some(line_content[3..end].trim().to_string()));
442 }
443 }
444 }
445
446 Ok(None)
447 }
448
449 fn analyze_current_class(&self, source_code: &str, line: usize) -> Result<Option<String>> {
450 let lines: Vec<&str> = source_code.lines().collect();
452 if line >= lines.len() {
453 return Ok(None);
454 }
455
456 for i in (0..=line).rev() {
458 let line_content = lines[i].trim();
459 if line_content.starts_with("struct ") || line_content.starts_with("class ") {
460 let parts: Vec<&str> = line_content.split_whitespace().collect();
461 if parts.len() >= 2 {
462 return Ok(Some(parts[1].to_string()));
463 }
464 }
465 }
466
467 Ok(None)
468 }
469
470 fn extract_imports(&self, definition: &str) -> Option<Vec<String>> {
471 let mut imports = Vec::new();
473
474 for line in definition.lines() {
475 let line = line.trim();
476 if line.starts_with("use ") || line.starts_with("import ") || line.starts_with("from ")
477 {
478 imports.push(line.to_string());
479 }
480 }
481
482 if imports.is_empty() {
483 None
484 } else {
485 Some(imports)
486 }
487 }
488
489 fn update_recent_files(&self, context: &mut SemanticContext, file_path: &Path) {
490 let path_buf = file_path.to_path_buf();
491
492 context.recent_files.retain(|p| p != &path_buf);
494
495 context.recent_files.insert(0, path_buf);
497
498 if context.recent_files.len() > 10 {
500 context.recent_files.truncate(10);
501 }
502 }
503}
504
505pub struct PatternLearner {
507 patterns: Arc<RwLock<HashMap<String, CodePattern>>>,
508}
509
510impl PatternLearner {
511 pub fn new() -> Self {
512 Self {
513 patterns: Arc::new(RwLock::new(HashMap::new())),
514 }
515 }
516
517 pub async fn learn_patterns(&self, context: &SemanticContext) -> Result<()> {
518 let mut patterns = self.patterns.write().await;
519
520 self.analyze_naming_conventions(context, &mut patterns)
522 .await?;
523
524 self.analyze_error_patterns(context, &mut patterns).await?;
526
527 Ok(())
528 }
529
530 async fn analyze_naming_conventions(
531 &self,
532 context: &SemanticContext,
533 patterns: &mut HashMap<String, CodePattern>,
534 ) -> Result<()> {
535 let mut snake_case_count = 0;
536 let mut camel_case_count = 0;
537 let mut pascal_case_count = 0;
538
539 for symbol in context.symbol_table.values() {
540 if symbol.name.contains('_') {
541 snake_case_count += 1;
542 } else if symbol
543 .name
544 .chars()
545 .next()
546 .map_or(false, |c| c.is_lowercase())
547 {
548 camel_case_count += 1;
549 } else {
550 pascal_case_count += 1;
551 }
552 }
553
554 let total = snake_case_count + camel_case_count + pascal_case_count;
555 if total > 0 {
556 let dominant_style =
557 if snake_case_count > camel_case_count && snake_case_count > pascal_case_count {
558 "snake_case"
559 } else if camel_case_count > pascal_case_count {
560 "camelCase"
561 } else {
562 "PascalCase"
563 };
564
565 patterns.insert(
566 "naming_convention".to_string(),
567 CodePattern {
568 pattern_type: PatternType::NamingConvention,
569 description: format!("Dominant naming convention: {}", dominant_style),
570 examples: vec![
571 "snake_case".to_string(),
572 "camelCase".to_string(),
573 "PascalCase".to_string(),
574 ],
575 frequency: total,
576 confidence: 0.8,
577 },
578 );
579 }
580
581 Ok(())
582 }
583
584 async fn analyze_error_patterns(
585 &self,
586 context: &SemanticContext,
587 patterns: &mut HashMap<String, CodePattern>,
588 ) -> Result<()> {
589 let mut error_patterns = Vec::new();
591
592 for symbol in context.symbol_table.values() {
593 if symbol.definition.contains("Result<") || symbol.definition.contains("Option<") {
594 error_patterns.push(symbol.name.clone());
595 }
596 }
597
598 if !error_patterns.is_empty() {
599 patterns.insert(
600 "error_handling".to_string(),
601 CodePattern {
602 pattern_type: PatternType::ErrorHandling,
603 description: "Uses Result/Option types for error handling".to_string(),
604 examples: error_patterns.clone().into_iter().take(3).collect(),
605 frequency: error_patterns.len(),
606 confidence: 0.9,
607 },
608 );
609 }
610
611 Ok(())
612 }
613}
614
615pub struct CompletionEngine {
617 }
619
620impl CompletionEngine {
621 pub fn new() -> Self {
622 Self {}
623 }
624
625 pub async fn generate_completions(
626 &self,
627 context: &SemanticContext,
628 _analysis: &CodeAnalysis,
629 _line: usize,
630 _column: usize,
631 _prefix: &str,
632 ) -> Result<Vec<CompletionSuggestion>> {
633 let mut suggestions = Vec::new();
634
635 self.generate_symbol_completions(context, _prefix, &mut suggestions)
637 .await?;
638
639 self.generate_context_completions(
641 context,
642 _analysis,
643 _line,
644 _column,
645 _prefix,
646 &mut suggestions,
647 )
648 .await?;
649
650 self.generate_pattern_completions(context, _prefix, &mut suggestions)
652 .await?;
653
654 suggestions.sort_by(|a, b| b.relevance_score.partial_cmp(&a.relevance_score).unwrap());
656
657 Ok(suggestions.into_iter().take(10).collect())
658 }
659
660 async fn generate_symbol_completions(
661 &self,
662 context: &SemanticContext,
663 prefix: &str,
664 suggestions: &mut Vec<CompletionSuggestion>,
665 ) -> Result<()> {
666 for (name, symbol) in &context.symbol_table {
667 if name.starts_with(prefix) {
668 suggestions.push(CompletionSuggestion {
669 text: name.clone(),
670 kind: match symbol.kind {
671 SymbolKind::Function => CompletionKind::Function,
672 SymbolKind::Variable => CompletionKind::Variable,
673 SymbolKind::Type => CompletionKind::Type,
674 _ => CompletionKind::Variable,
675 },
676 relevance_score: 0.8,
677 context_info: format!("{:?} - {}", symbol.kind, symbol.location.file),
678 documentation: Some(symbol.definition.clone()),
679 });
680 }
681 }
682
683 Ok(())
684 }
685
686 async fn generate_context_completions(
687 &self,
688 context: &SemanticContext,
689 _analysis: &CodeAnalysis,
690 _line: usize,
691 _column: usize,
692 _prefix: &str,
693 suggestions: &mut Vec<CompletionSuggestion>,
694 ) -> Result<()> {
695 if let Some(cursor_ctx) = &context.cursor_context {
697 if let Some(current_function) = &cursor_ctx.current_function {
698 suggestions.push(CompletionSuggestion {
700 text: format!("{}_result", current_function.to_lowercase()),
701 kind: CompletionKind::Variable,
702 relevance_score: 0.7,
703 context_info: "Local variable suggestion".to_string(),
704 documentation: None,
705 });
706 }
707 }
708
709 Ok(())
710 }
711
712 async fn generate_pattern_completions(
713 &self,
714 context: &SemanticContext,
715 prefix: &str,
716 suggestions: &mut Vec<CompletionSuggestion>,
717 ) -> Result<()> {
718 for pattern in &context.code_patterns {
720 match pattern.pattern_type {
721 PatternType::NamingConvention => {
722 if pattern.description.contains("snake_case") && prefix.contains('_') {
723 suggestions.push(CompletionSuggestion {
724 text: format!("{}_value", prefix),
725 kind: CompletionKind::Variable,
726 relevance_score: 0.6,
727 context_info: "Following naming convention".to_string(),
728 documentation: Some(pattern.description.clone()),
729 });
730 }
731 }
732 _ => {}
733 }
734 }
735
736 Ok(())
737 }
738}