ai_code_buddy/core/
analysis.rs

1use crate::args::Args;
2use crate::core::{
3    ai_analyzer::{AIAnalyzer, AnalysisRequest, ProgressUpdate},
4    git::GitAnalyzer,
5    review::Review,
6};
7use anyhow::Result;
8use num_cpus;
9use rayon::prelude::*;
10use tokio::sync::mpsc;
11
12pub async fn perform_analysis_with_progress(
13    args: &Args,
14    progress_callback: Option<Box<dyn Fn(f64, String) + Send + Sync>>,
15) -> Result<Review> {
16    println!("📊 Starting AI-powered analysis...");
17
18    let git_analyzer = GitAnalyzer::new(&args.repo_path)?;
19
20    // Get changed files between branches
21    let changed_files = git_analyzer.get_changed_files(&args.source_branch, &args.target_branch)?;
22
23    println!("📈 Found {} changed files", changed_files.len());
24
25    let mut review = Review {
26        files_count: changed_files.len(),
27        issues_count: 0,
28        critical_issues: 0,
29        high_issues: 0,
30        medium_issues: 0,
31        low_issues: 0,
32        issues: Vec::new(),
33    };
34
35    // Initialize AI analyzer
36    let use_gpu = args.use_gpu && !args.force_cpu;
37    if args.force_cpu {
38        println!("💻 CPU mode forced by user with --cpu flag");
39    } else if args.use_gpu {
40        println!("🚀 GPU acceleration enabled (auto-detected or requested)");
41    }
42    let ai_analyzer = AIAnalyzer::new(use_gpu, !args.disable_ai).await?;
43
44    // Create progress channel
45    let (progress_tx, mut progress_rx) = mpsc::unbounded_channel::<ProgressUpdate>();
46
47    // Spawn task to handle progress updates
48    if let Some(callback) = progress_callback {
49        tokio::spawn(async move {
50            while let Some(update) = progress_rx.recv().await {
51                // Format the current file with stage information
52                let status_message = if update.stage.is_empty() {
53                    update.current_file
54                } else {
55                    format!("{} - {}", update.current_file, update.stage)
56                };
57                callback(update.progress, status_message);
58            }
59        });
60    }
61
62    // Analyze each file
63    let total_files = changed_files.len() as f64;
64
65    if args.parallel {
66        println!(
67            "🚀 Using parallel analysis with {} CPU cores",
68            num_cpus::get()
69        );
70        perform_parallel_analysis(
71            &changed_files,
72            args,
73            &git_analyzer,
74            &ai_analyzer,
75            progress_tx.clone(),
76            total_files,
77            &mut review,
78        )
79        .await?;
80    } else {
81        println!("🔄 Using sequential analysis");
82        perform_sequential_analysis(
83            &changed_files,
84            args,
85            &git_analyzer,
86            &ai_analyzer,
87            progress_tx.clone(),
88            total_files,
89            &mut review,
90        )
91        .await?;
92    }
93
94    // Close progress channel
95    drop(progress_tx);
96
97    println!(
98        "✅ AI analysis complete! Found {} issues.",
99        review.issues_count
100    );
101    Ok(review)
102}
103
104async fn perform_sequential_analysis(
105    changed_files: &[String],
106    args: &Args,
107    git_analyzer: &GitAnalyzer,
108    ai_analyzer: &AIAnalyzer,
109    progress_tx: mpsc::UnboundedSender<ProgressUpdate>,
110    total_files: f64,
111    review: &mut Review,
112) -> Result<()> {
113    for (index, file_path) in changed_files.iter().enumerate() {
114        if should_analyze_file(file_path, args) {
115            let commit_status = git_analyzer
116                .get_file_status(file_path)
117                .unwrap_or(crate::core::review::CommitStatus::Committed);
118
119            let status_indicator = match commit_status {
120                crate::core::review::CommitStatus::Committed => "📄",
121                crate::core::review::CommitStatus::Staged => "📑",
122                crate::core::review::CommitStatus::Modified => "📝",
123                crate::core::review::CommitStatus::Untracked => "📄",
124            };
125
126            let file_progress = (index as f64 / total_files) * 100.0;
127            println!(
128                "  {status_indicator} Analyzing: {file_path} ({commit_status:?}) [{file_progress:.1}%]"
129            );
130
131            if let Ok(content) = git_analyzer.get_file_content(file_path, &args.target_branch) {
132                let request = AnalysisRequest {
133                    file_path: file_path.clone(),
134                    content,
135                    language: detect_language(file_path),
136                    commit_status,
137                };
138
139                match ai_analyzer
140                    .analyze_file(request, Some(progress_tx.clone()))
141                    .await
142                {
143                    Ok(file_issues) => {
144                        for issue in file_issues {
145                            match issue.severity.as_str() {
146                                "Critical" => review.critical_issues += 1,
147                                "High" => review.high_issues += 1,
148                                "Medium" => review.medium_issues += 1,
149                                "Low" => review.low_issues += 1,
150                                _ => {}
151                            }
152                            review.issues.push(issue);
153                            review.issues_count += 1;
154                        }
155                    }
156                    Err(e) => {
157                        eprintln!("⚠️  Failed to analyze {file_path}: {e}");
158                    }
159                }
160            }
161        }
162    }
163    Ok(())
164}
165
166async fn perform_parallel_analysis(
167    changed_files: &[String],
168    args: &Args,
169    git_analyzer: &GitAnalyzer,
170    ai_analyzer: &AIAnalyzer,
171    _progress_tx: mpsc::UnboundedSender<ProgressUpdate>,
172    total_files: f64,
173    review: &mut Review,
174) -> Result<()> {
175    // First, collect all file data in the main thread (since GitAnalyzer isn't Send)
176    let mut file_requests = Vec::new();
177
178    for (index, file_path) in changed_files.iter().enumerate() {
179        if should_analyze_file(file_path, args) {
180            let commit_status = git_analyzer
181                .get_file_status(file_path)
182                .unwrap_or(crate::core::review::CommitStatus::Committed);
183
184            if let Ok(content) = git_analyzer.get_file_content(file_path, &args.target_branch) {
185                let request = AnalysisRequest {
186                    file_path: file_path.clone(),
187                    content,
188                    language: detect_language(file_path),
189                    commit_status,
190                };
191                file_requests.push((index, request));
192            }
193        }
194    }
195
196    println!(
197        "📊 Processing {} files in parallel using {} threads",
198        file_requests.len(),
199        num_cpus::get()
200    );
201
202    // Now process the analysis requests in parallel
203    let analysis_results: Vec<_> = file_requests
204        .into_par_iter()
205        .map(|(index, request)| {
206            let file_path = request.file_path.clone();
207            let status_indicator = match request.commit_status {
208                crate::core::review::CommitStatus::Committed => "📄",
209                crate::core::review::CommitStatus::Staged => "📑",
210                crate::core::review::CommitStatus::Modified => "📝",
211                crate::core::review::CommitStatus::Untracked => "📄",
212            };
213
214            let file_progress = (index as f64 / total_files) * 100.0;
215            println!(
216                "  {status_indicator} Analyzing: {} ({:?}) [{:.1}%]",
217                file_path, request.commit_status, file_progress
218            );
219
220            // Perform synchronous rule-based analysis (since AI is disabled)
221            match ai_analyzer.rule_based_analysis(&request) {
222                Ok(issues) => {
223                    println!("    ✅ Found {} issues in {}", issues.len(), file_path);
224                    Ok(issues)
225                }
226                Err(e) => {
227                    eprintln!("⚠️  Failed to analyze {file_path}: {e}");
228                    Err(e)
229                }
230            }
231        })
232        .collect();
233
234    // Collect results
235    for result in analysis_results {
236        match result {
237            Ok(issues) => {
238                for issue in issues {
239                    match issue.severity.as_str() {
240                        "Critical" => review.critical_issues += 1,
241                        "High" => review.high_issues += 1,
242                        "Medium" => review.medium_issues += 1,
243                        "Low" => review.low_issues += 1,
244                        _ => {}
245                    }
246                    review.issues.push(issue);
247                    review.issues_count += 1;
248                }
249            }
250            Err(e) => {
251                eprintln!("⚠️  Analysis failed: {e}");
252            }
253        }
254    }
255
256    println!("🎯 Parallel analysis complete!");
257    Ok(())
258}
259
260pub fn perform_analysis(args: &Args) -> Result<Review> {
261    // Create a simple runtime for synchronous callers
262    let rt = tokio::runtime::Runtime::new()?;
263    rt.block_on(perform_analysis_with_progress(args, None))
264}
265
266fn should_analyze_file(file_path: &str, args: &Args) -> bool {
267    // Check include patterns
268    if !args.include_patterns.is_empty() {
269        let matches_include = args
270            .include_patterns
271            .iter()
272            .any(|pattern| file_matches_pattern(file_path, pattern));
273        if !matches_include {
274            return false;
275        }
276    }
277
278    // Check exclude patterns
279    for pattern in &args.exclude_patterns {
280        if file_matches_pattern(file_path, pattern) {
281            return false;
282        }
283    }
284
285    // Default exclusions
286    if file_path.starts_with("target/")
287        || file_path.contains("node_modules/")
288        || file_path.ends_with(".lock")
289        || file_path.ends_with(".log")
290    {
291        return false;
292    }
293
294    true
295}
296
297fn file_matches_pattern(file_path: &str, pattern: &str) -> bool {
298    // Simple pattern matching - can be enhanced with glob
299    if pattern.starts_with("*.") {
300        let extension = &pattern[1..];
301        file_path.ends_with(extension)
302    } else if let Some(prefix) = pattern.strip_suffix("/**") {
303        file_path.starts_with(prefix)
304    } else {
305        file_path.contains(pattern)
306    }
307}
308
309fn detect_language(file_path: &str) -> String {
310    use std::path::Path;
311    let path = Path::new(file_path);
312    match path.extension().and_then(|ext| ext.to_str()) {
313        Some("rs") => "rust".to_string(),
314        Some("js") => "javascript".to_string(),
315        Some("ts") => "typescript".to_string(),
316        Some("py") => "python".to_string(),
317        Some("java") => "java".to_string(),
318        Some("cpp") | Some("cc") | Some("cxx") => "cpp".to_string(),
319        Some("c") => "c".to_string(),
320        Some("go") => "go".to_string(),
321        Some("php") => "php".to_string(),
322        Some("rb") => "ruby".to_string(),
323        Some("cs") => "csharp".to_string(),
324        _ => "unknown".to_string(),
325    }
326}
327
328#[cfg(test)]
329mod tests {
330    use super::*;
331
332    fn mk_args(include: Vec<&str>, exclude: Vec<&str>) -> Args {
333        Args {
334            repo_path: ".".to_string(),
335            source_branch: "main".to_string(),
336            target_branch: "HEAD".to_string(),
337            cli_mode: false,
338            verbose: false,
339            show_credits: false,
340            output_format: crate::args::OutputFormat::Summary,
341            include_patterns: include.into_iter().map(|s| s.to_string()).collect(),
342            exclude_patterns: exclude.into_iter().map(|s| s.to_string()).collect(),
343            use_gpu: false,
344            force_cpu: true,
345            parallel: false,
346            disable_ai: false,
347        }
348    }
349
350    #[test]
351    fn test_file_matches_pattern_variants() {
352        assert!(file_matches_pattern("src/lib.rs", "*.rs"));
353        assert!(file_matches_pattern("src/core/mod.rs", "src/**"));
354        assert!(file_matches_pattern("foo/bar/baz.txt", "bar"));
355        assert!(!file_matches_pattern("src/lib.rs", "*.py"));
356    }
357
358    #[test]
359    fn test_should_analyze_file_include_exclude() {
360        // Include only rs
361        let args = mk_args(vec!["*.rs"], vec![]);
362        assert!(should_analyze_file("src/lib.rs", &args));
363        assert!(!should_analyze_file("src/app.py", &args));
364
365        // Exclude target and logs by default
366        let args2 = mk_args(vec![], vec![]);
367        assert!(!should_analyze_file("target/debug/build.rs", &args2));
368        assert!(!should_analyze_file(
369            "foo/node_modules/pkg/index.js",
370            &args2
371        ));
372        assert!(!should_analyze_file("foo/app.log", &args2));
373        assert!(should_analyze_file("src/main.rs", &args2));
374
375        // Explicit exclude wins
376        let args3 = mk_args(vec![], vec!["*.rs"]);
377        assert!(!should_analyze_file("src/lib.rs", &args3));
378    }
379
380    #[test]
381    fn test_detect_language_extensions() {
382        assert_eq!(detect_language("a.rs"), "rust");
383        assert_eq!(detect_language("a.js"), "javascript");
384        assert_eq!(detect_language("a.ts"), "typescript");
385        assert_eq!(detect_language("a.py"), "python");
386        assert_eq!(detect_language("a.unknown"), "unknown");
387    }
388}