opencrates 3.0.1

Enterprise-grade AI-powered Rust development companion with comprehensive automation, monitoring, and deployment capabilities
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::sync::Arc;

#[cfg(feature = "ai-integration")]
use async_openai::{
    config::OpenAIConfig,
    types::{
        ChatCompletionRequestMessage, ChatCompletionRequestSystemMessage,
        ChatCompletionRequestUserMessage, CreateChatCompletionRequestArgs,
    },
    Client,
};

#[cfg(not(feature = "ai-integration"))]
pub struct Client<T> {
    _phantom: std::marker::PhantomData<T>,
}

pub struct OpenAIClient {
    #[cfg(feature = "ai-integration")]
    client: Client<OpenAIConfig>,
    #[cfg(not(feature = "ai-integration"))]
    client: Client<()>,
}

impl OpenAIClient {
    pub fn new(api_key: String) -> Result<Self> {
        #[cfg(feature = "ai-integration")]
        {
            let config = OpenAIConfig::new().with_api_key(api_key);
            let client = Client::with_config(config);
            Ok(Self { client })
        }
        #[cfg(not(feature = "ai-integration"))]
        {
            let _ = api_key; // Avoid unused variable warning
            Ok(Self {
                client: Client {
                    _phantom: std::marker::PhantomData,
                },
            })
        }
    }

    pub async fn generate_code(&self, prompt: &str, context: &str) -> Result<String> {
        #[cfg(feature = "ai-integration")]
        {
            let system_message = ChatCompletionRequestSystemMessage {
                content: async_openai::types::ChatCompletionRequestSystemMessageContent::Text("You are an expert Rust developer. Generate high-quality Rust code based on the user's requirements.".to_string()),
                name: None,
            };

            let user_message = ChatCompletionRequestUserMessage {
                content: format!("Context: {}\n\nPrompt: {}", context, prompt).into(),
                name: None,
            };

            let messages = vec![
                ChatCompletionRequestMessage::System(system_message),
                ChatCompletionRequestMessage::User(user_message),
            ];

            let request = CreateChatCompletionRequestArgs::default()
                .model("gpt-4")
                .messages(messages)
                .temperature(0.7)
                .max_tokens(2000u32)
                .build()?;

            let response = self.client.chat().create(request).await?;

            if let Some(choice) = response.choices.first() {
                if let Some(content) = &choice.message.content {
                    return Ok(content.clone());
                }
            }

            Err(anyhow!("No content in AI response"))
        }
        #[cfg(not(feature = "ai-integration"))]
        {
            let _ = (prompt, context); // Avoid unused variable warnings
            Ok("// AI integration not available in this build".to_string())
        }
    }

    pub async fn analyze_code(&self, code: &str) -> Result<String> {
        #[cfg(feature = "ai-integration")]
        {
            let system_message = ChatCompletionRequestSystemMessage {
                content: async_openai::types::ChatCompletionRequestSystemMessageContent::Text("You are an expert Rust code analyzer. Analyze the provided code and suggest improvements.".to_string()),
                name: None,
            };

            let user_message = ChatCompletionRequestUserMessage {
                content: format!("Analyze this Rust code:\n\n{}", code).into(),
                name: None,
            };

            let messages = vec![
                ChatCompletionRequestMessage::System(system_message),
                ChatCompletionRequestMessage::User(user_message),
            ];

            let request = CreateChatCompletionRequestArgs::default()
                .model("gpt-4")
                .messages(messages)
                .temperature(0.3)
                .max_tokens(1500u32)
                .build()?;

            let response = self.client.chat().create(request).await?;

            if let Some(choice) = response.choices.first() {
                if let Some(content) = &choice.message.content {
                    return Ok(content.clone());
                }
            }

            Err(anyhow!("No content in AI response"))
        }
        #[cfg(not(feature = "ai-integration"))]
        {
            let _ = code; // Avoid unused variable warning
            Ok("// AI integration not available in this build".to_string())
        }
    }
}

#[async_trait]
pub trait AiProvider: Send + Sync {
    async fn generate(&self, prompt: &str, context: &str) -> Result<String>;
    async fn analyze(&self, content: &str) -> Result<String>;
}

#[async_trait]
impl AiProvider for OpenAIClient {
    async fn generate(&self, prompt: &str, context: &str) -> Result<String> {
        self.generate_code(prompt, context).await
    }

    async fn analyze(&self, content: &str) -> Result<String> {
        self.analyze_code(content).await
    }
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AiConfig {
    pub provider: String,
    pub api_key: String,
    pub model: String,
    pub temperature: f32,
    pub max_tokens: u32,
}

impl Default for AiConfig {
    fn default() -> Self {
        Self {
            provider: "openai".to_string(),
            api_key: std::env::var("OPENAI_API_KEY").unwrap_or_default(),
            model: "gpt-4".to_string(),
            temperature: 0.7,
            max_tokens: 2000,
        }
    }
}

pub fn create_ai_client(config: &AiConfig) -> Result<Arc<dyn AiProvider>> {
    match config.provider.as_str() {
        "openai" => {
            let client = OpenAIClient::new(config.api_key.clone())?;
            Ok(Arc::new(client))
        }
        _ => Err(anyhow!("Unsupported AI provider: {}", config.provider)),
    }
}