agent_core/client/providers/openai/
mod.rs

1mod types;
2
3use crate::client::error::LlmError;
4use crate::client::http::HttpClient;
5use crate::client::models::{Message, MessageOptions};
6use crate::client::traits::LlmProvider;
7use std::future::Future;
8use std::pin::Pin;
9
10pub struct OpenAIProvider {
11    pub api_key: String,
12    pub model: String,
13}
14
15impl OpenAIProvider {
16    pub fn new(api_key: String, model: String) -> Self {
17        Self { api_key, model }
18    }
19}
20
21impl LlmProvider for OpenAIProvider {
22    fn send_msg(
23        &self,
24        client: &HttpClient,
25        messages: &[Message],
26        options: &MessageOptions,
27    ) -> Pin<Box<dyn Future<Output = Result<Message, LlmError>> + Send>> {
28        // Clone data for the async block
29        let client = client.clone();
30        let api_key = self.api_key.clone();
31        let model = self.model.clone();
32        let messages = messages.to_vec();
33        let options = options.clone();
34
35        Box::pin(async move {
36            // Build request body
37            let body = types::build_request_body(&messages, &options, &model)?;
38
39            // Get headers
40            let headers = types::get_request_headers(&api_key);
41            let headers_ref: Vec<(&str, &str)> = headers
42                .iter()
43                .map(|(k, v)| (*k, v.as_str()))
44                .collect();
45
46            // Make the API call
47            let response = client
48                .post(types::get_api_url(), &headers_ref, &body)
49                .await?;
50
51            // Parse and return the response
52            types::parse_response(&response)
53        })
54    }
55}