opengrep 1.1.0

Advanced AST-aware code search tool with tree-sitter parsing and AI integration capabilities
Documentation
//! AI integration for intelligent code analysis

use anyhow::Result;
use async_openai::{
    types::{
        ChatCompletionRequestSystemMessageArgs,
        ChatCompletionRequestUserMessageArgs,
        CreateChatCompletionRequestArgs,
    },
    Client,
};
use crate::search::SearchResult;

pub mod analysis;
pub mod suggestions;

pub use analysis::{AnalysisResult, QualityMetrics, SecurityAnalysis, PerformanceAnalysis, ComprehensiveAnalysis};
pub use suggestions::{PatternSuggestion, SuggestionType};

/// AI service for code analysis
#[derive(Clone)]
pub struct AiService {
    client: Client<async_openai::config::OpenAIConfig>,
    config: crate::config::AiConfig,
}

impl AiService {
    /// Create a new AI service
    pub fn new(config: crate::config::AiConfig) -> Result<Self> {
        let client = Client::with_config(
            async_openai::config::OpenAIConfig::new()
                .with_api_key(&config.api_key)
        );
        
        Ok(Self {
            client,
            config,
        })
    }
    
    /// Generate insights for search results
    pub async fn generate_insights(&self, result: &SearchResult) -> Result<crate::search::AiInsights> {
        let prompt = self.build_insights_prompt(result);
        
        let request = CreateChatCompletionRequestArgs::default()
            .model(&self.config.model)
            .messages([
                ChatCompletionRequestSystemMessageArgs::default()
                    .content("You are a code analysis expert. Analyze the search results and provide concise, actionable insights about the code patterns, potential issues, and improvement suggestions.")
                    .build()?.into(),
                ChatCompletionRequestUserMessageArgs::default()
                    .content(prompt)
                    .build()?.into(),
            ])
            .max_tokens(self.config.max_tokens)
            .temperature(self.config.temperature)
            .build()?;
        
        let response = self.client.chat().create(request).await?;
        let content = response.choices[0].message.content.as_ref()
            .ok_or_else(|| anyhow::anyhow!("No response from AI"))?;
        
        self.parse_insights_response(content)
    }
    
    /// Suggest search patterns based on a query
    pub async fn suggest_patterns(&self, query: &str, _context: &str) -> Result<Vec<PatternSuggestion>> {
        let request = CreateChatCompletionRequestArgs::default()
            .model(&self.config.model)
            .messages([
                ChatCompletionRequestSystemMessageArgs::default()
                    .content("You are a code search expert. Given a search query, suggest effective search patterns (regex or literal) that would help find relevant code. Focus on common programming patterns and practices.")
                    .build()?.into(),
                ChatCompletionRequestUserMessageArgs::default()
                    .content(format!(
                        "Query: '{}'\n\nSuggest 3-5 effective search patterns that would help find relevant code. Return only the patterns, one per line, without explanations.",
                        query
                    ))
                    .build()?.into(),
            ])
            .max_tokens(200u32)
            .temperature(0.5)
            .build()?;
        
        let response = self.client.chat().create(request).await?;
        let content = response.choices[0].message.content.as_ref()
            .ok_or_else(|| anyhow::anyhow!("No response from AI"))?;
        
        self.parse_pattern_suggestions(content)
    }
    
    /// Explain code patterns
    pub async fn explain_code(&self, code: &str, language: Option<&str>) -> Result<String> {
        let lang_context = language.map(|l| format!(" (language: {})", l)).unwrap_or_default();
        
        let request = CreateChatCompletionRequestArgs::default()
            .model(&self.config.model)
            .messages([
                ChatCompletionRequestSystemMessageArgs::default()
                    .content("You are a programming expert. Explain code snippets in clear, concise terms. Focus on what the code does, any patterns used, and potential concerns.")
                    .build()?.into(),
                ChatCompletionRequestUserMessageArgs::default()
                    .content(format!(
                        "Explain this code snippet{}:\n\n```\n{}\n```",
                        lang_context, code
                    ))
                    .build()?.into(),
            ])
            .max_tokens(self.config.max_tokens)
            .temperature(self.config.temperature)
            .build()?;
        
        let response = self.client.chat().create(request).await?;
        Ok(response.choices[0].message.content.as_ref()
            .unwrap_or(&"No explanation available".to_string())
            .clone())
    }
    
    /// Build prompt for insights generation
    fn build_insights_prompt(&self, result: &SearchResult) -> String {
        let mut prompt = format!(
            "File: {}\nLanguage: {}\nSize: {} bytes\n\nCode matches found:\n",
            result.path.display(),
            result.metadata.language.as_deref().unwrap_or("unknown"),
            result.metadata.size
        );
        
        for (i, match_item) in result.matches.iter().enumerate().take(5) {
            prompt.push_str(&format!(
                "\nMatch {} (line {}):\n{}\n",
                i + 1,
                match_item.line_number,
                match_item.line_text.trim()
            ));
            
            // Add context
            if !match_item.before_context.is_empty() {
                prompt.push_str("Before:\n");
                for line in &match_item.before_context {
                    prompt.push_str(&format!("  {}\n", line));
                }
            }
            
            if !match_item.after_context.is_empty() {
                prompt.push_str("After:\n");
                for line in &match_item.after_context {
                    prompt.push_str(&format!("  {}\n", line));
                }
            }
            
            // Add AST context
            if let Some(ast_context) = &match_item.ast_context {
                prompt.push_str(&format!("AST Context: {}\n", ast_context.summary));
            }
        }
        
        if result.matches.len() > 5 {
            prompt.push_str(&format!("\n... and {} more matches\n", result.matches.len() - 5));
        }
        
        prompt.push_str("\nAnalyze these matches and provide:\n");
        prompt.push_str("1. SUMMARY: Brief description of what was found\n");
        prompt.push_str("2. PATTERNS: Any code patterns or issues noticed\n");
        prompt.push_str("3. SUGGESTIONS: Specific improvement recommendations\n");
        prompt.push_str("\nKeep responses concise and actionable.");
        
        prompt
    }
    
    /// Parse AI response into insights
    fn parse_insights_response(&self, response: &str) -> Result<crate::search::AiInsights> {
        let mut summary = String::new();
        let mut explanation = None;
        let mut suggestions = Vec::new();
        
        let mut current_section = "";
        
        for line in response.lines() {
            let trimmed = line.trim();
            
            if trimmed.starts_with("SUMMARY:") || trimmed.starts_with("1.") {
                current_section = "summary";
                summary = trimmed.split(':').nth(1)
                    .unwrap_or(trimmed.trim_start_matches("1."))
                    .trim()
                    .to_string();
            } else if trimmed.starts_with("PATTERNS:") || trimmed.starts_with("2.") {
                current_section = "explanation";
                let content = trimmed.split(':').nth(1)
                    .unwrap_or(trimmed.trim_start_matches("2."))
                    .trim()
                    .to_string();
                if !content.is_empty() {
                    explanation = Some(content);
                }
            } else if trimmed.starts_with("SUGGESTIONS:") || trimmed.starts_with("3.") {
                current_section = "suggestions";
                let content = trimmed.split(':').nth(1)
                    .unwrap_or(trimmed.trim_start_matches("3."))
                    .trim();
                if !content.is_empty() {
                    suggestions.push(content.to_string());
                }
            } else if !trimmed.is_empty() {
                match current_section {
                    "summary" if summary.is_empty() => summary = trimmed.to_string(),
                    "explanation" if explanation.is_none() => explanation = Some(trimmed.to_string()),
                    "suggestions" => {
                        let suggestion = trimmed.trim_start_matches('-')
                            .trim_start_matches('')
                            .trim_start_matches('*')
                            .trim();
                        if !suggestion.is_empty() {
                            suggestions.push(suggestion.to_string());
                        }
                    }
                    _ => {}
                }
            }
        }
        
        // Fallback if parsing fails
        if summary.is_empty() {
            summary = response.lines().next().unwrap_or("Analysis completed").to_string();
        }
        
        Ok(crate::search::AiInsights {
            summary,
            explanation,
            suggestions,
            related_locations: Vec::new(),
        })
    }

    /// Parse pattern suggestions from AI response
    fn parse_pattern_suggestions(&self, content: &str) -> Result<Vec<PatternSuggestion>> {
        Ok(content.lines()
            .filter(|line| !line.trim().is_empty())
            .map(|line| PatternSuggestion {
                pattern: line.trim().to_string(),
                description: format!("AI suggested pattern: {}", line.trim()),
                confidence: 0.8,
                regex: line.contains(r"\") || line.contains('*') || line.contains('+'),
                suggestion_type: SuggestionType::Alternative,
                example: None,
            })
            .collect())
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    
    #[test]
    fn test_ai_service_creation() {
        let config = crate::config::AiConfig {
            api_key: "test-key".to_string(),
            model: "gpt-4o-mini".to_string(),
            enable_insights: true,
            enable_explanation: true,
            max_tokens: 1000,
            temperature: 0.3,
        };
        
        let result = AiService::new(config);
        assert!(result.is_ok());
    }
}