agent_core/client/providers/openai/
mod.rs1mod types;
2
3use crate::client::error::LlmError;
4use crate::client::http::HttpClient;
5use crate::client::models::{Message, MessageOptions};
6use crate::client::traits::LlmProvider;
7use std::future::Future;
8use std::pin::Pin;
9
10pub struct OpenAIProvider {
12 pub api_key: String,
14 pub model: String,
16}
17
18impl OpenAIProvider {
19 pub fn new(api_key: String, model: String) -> Self {
21 Self { api_key, model }
22 }
23}
24
25impl LlmProvider for OpenAIProvider {
26 fn send_msg(
27 &self,
28 client: &HttpClient,
29 messages: &[Message],
30 options: &MessageOptions,
31 ) -> Pin<Box<dyn Future<Output = Result<Message, LlmError>> + Send>> {
32 let client = client.clone();
34 let api_key = self.api_key.clone();
35 let model = self.model.clone();
36 let messages = messages.to_vec();
37 let options = options.clone();
38
39 Box::pin(async move {
40 let body = types::build_request_body(&messages, &options, &model)?;
42
43 let headers = types::get_request_headers(&api_key);
45 let headers_ref: Vec<(&str, &str)> = headers
46 .iter()
47 .map(|(k, v)| (*k, v.as_str()))
48 .collect();
49
50 let response = client
52 .post(types::get_api_url(), &headers_ref, &body)
53 .await?;
54
55 types::parse_response(&response)
57 })
58 }
59}