Skip to main content

mc_minder/
ai_client.rs

1use anyhow::{Result, Context};
2use log::{debug, warn};
3use reqwest::Client;
4use serde::{Serialize, Deserialize};
5use std::time::Duration;
6use crate::config::{AiConfig, OllamaConfig};
7
8#[derive(Debug, Clone)]
9pub struct AiClient {
10    client: Client,
11    config: AiConfig,
12    ollama_config: Option<OllamaConfig>,
13}
14
15#[derive(Debug, Serialize)]
16struct ChatRequest {
17    model: String,
18    messages: Vec<Message>,
19    max_tokens: u32,
20    temperature: f32,
21}
22
23#[derive(Debug, Serialize, Deserialize, Clone)]
24pub struct Message {
25    pub role: String,
26    pub content: String,
27}
28
29#[derive(Debug, Deserialize)]
30struct ChatResponse {
31    choices: Vec<Choice>,
32}
33
34#[derive(Debug, Deserialize)]
35struct Choice {
36    message: Message,
37}
38
39#[derive(Debug, Serialize)]
40struct OllamaRequest {
41    model: String,
42    prompt: String,
43    stream: bool,
44}
45
46#[derive(Debug, Deserialize)]
47struct OllamaResponse {
48    response: String,
49}
50
51impl AiClient {
52    pub fn new(config: AiConfig, ollama_config: Option<OllamaConfig>) -> Result<Self> {
53        let client = Client::builder()
54            .timeout(Duration::from_secs(30))
55            .build()
56            .context("Failed to create HTTP client")?;
57
58        Ok(Self {
59            client,
60            config,
61            ollama_config,
62        })
63    }
64
65    pub async fn chat(&self, messages: Vec<Message>) -> Result<String> {
66        if let Some(ref ollama) = self.ollama_config {
67            if ollama.enabled {
68                return self.chat_ollama(ollama, messages).await;
69            }
70        }
71        
72        self.chat_openai(messages).await
73    }
74
75    async fn chat_openai(&self, messages: Vec<Message>) -> Result<String> {
76        let request = ChatRequest {
77            model: self.config.model.clone(),
78            messages,
79            max_tokens: self.config.max_tokens,
80            temperature: self.config.temperature,
81        };
82
83        debug!("Sending request to OpenAI API");
84
85        let response = self.client
86            .post(&self.config.api_url)
87            .header("Authorization", format!("Bearer {}", self.config.api_key))
88            .json(&request)
89            .send()
90            .await
91            .context("Failed to send request to OpenAI API")?;
92
93        if !response.status().is_success() {
94            let status = response.status();
95            let body = response.text().await.unwrap_or_default();
96            warn!("OpenAI API error: {} - {}", status, body);
97            anyhow::bail!("OpenAI API returned error: {}", status);
98        }
99
100        let chat_response: ChatResponse = response
101            .json()
102            .await
103            .context("Failed to parse OpenAI response")?;
104
105        chat_response
106            .choices
107            .first()
108            .map(|c| c.message.content.clone())
109            .ok_or_else(|| anyhow::anyhow!("No response from OpenAI"))
110    }
111
112    async fn chat_ollama(&self, ollama: &OllamaConfig, messages: Vec<Message>) -> Result<String> {
113        let prompt = messages
114            .iter()
115            .map(|m| format!("{}: {}", m.role, m.content))
116            .collect::<Vec<_>>()
117            .join("\n");
118
119        let request = OllamaRequest {
120            model: ollama.model.clone(),
121            prompt,
122            stream: false,
123        };
124
125        debug!("Sending request to Ollama API");
126
127        let response = self.client
128            .post(&ollama.url)
129            .json(&request)
130            .send()
131            .await
132            .context("Failed to send request to Ollama API")?;
133
134        if !response.status().is_success() {
135            let status = response.status();
136            let body = response.text().await.unwrap_or_default();
137            warn!("Ollama API error: {} - {}", status, body);
138            anyhow::bail!("Ollama API returned error: {}", status);
139        }
140
141        let ollama_response: OllamaResponse = response
142            .json()
143            .await
144            .context("Failed to parse Ollama response")?;
145
146        Ok(ollama_response.response)
147    }
148
149    pub fn get_trigger(&self) -> &str {
150        &self.config.trigger
151    }
152}