Skip to main content

self_llm/
client.rs

1use std::collections::HashMap;
2use std::pin::Pin;
3
4use futures::Stream;
5
6use crate::{
7    config::{LlmProviderConfig, ProviderType},
8    error::Error,
9    types::{ChatRequest, ChatResponse, StreamEvent},
10};
11
12fn build_http_client() -> reqwest::Client {
13    reqwest::Client::builder()
14        .use_rustls_tls()
15        .build()
16        .expect("failed to build HTTP client")
17}
18
19/// Unified LLM client that works with both OpenAI and Anthropic APIs.
20pub struct Client {
21    provider: Provider,
22    http: reqwest::Client,
23}
24
25enum Provider {
26    OpenAi {
27        api_key: String,
28        base_url: String,
29        custom_headers: HashMap<String, String>,
30    },
31    Anthropic {
32        api_key: String,
33        base_url: String,
34        api_version: String,
35        custom_headers: HashMap<String, String>,
36    },
37}
38
39impl Client {
40    /// Create a client for the OpenAI API (`https://api.openai.com/v1`).
41    pub fn openai(api_key: impl Into<String>) -> Self {
42        Self::openai_with_base_url(api_key, "https://api.openai.com/v1")
43    }
44
45    /// Create a client for an OpenAI-compatible API with a custom base URL.
46    pub fn openai_with_base_url(api_key: impl Into<String>, base_url: impl Into<String>) -> Self {
47        Self {
48            provider: Provider::OpenAi {
49                api_key: api_key.into(),
50                base_url: base_url.into(),
51                custom_headers: HashMap::new(),
52            },
53            http: build_http_client(),
54        }
55    }
56
57    /// Create a client for the Anthropic API (`https://api.anthropic.com`).
58    pub fn anthropic(api_key: impl Into<String>) -> Self {
59        Self::anthropic_with_base_url(api_key, "https://api.anthropic.com")
60    }
61
62    /// Create a client for an Anthropic-compatible API with a custom base URL.
63    pub fn anthropic_with_base_url(
64        api_key: impl Into<String>,
65        base_url: impl Into<String>,
66    ) -> Self {
67        Self {
68            provider: Provider::Anthropic {
69                api_key: api_key.into(),
70                base_url: base_url.into(),
71                api_version: "2023-06-01".to_string(),
72                custom_headers: HashMap::new(),
73            },
74            http: build_http_client(),
75        }
76    }
77
78    /// Create a client from a [`LlmProviderConfig`].
79    pub fn from_provider(config: &LlmProviderConfig) -> Self {
80        let http = build_http_client();
81        match config.provider_type {
82            ProviderType::OpenAi => Self {
83                provider: Provider::OpenAi {
84                    api_key: config.api_key.clone(),
85                    base_url: config.base_url.clone(),
86                    custom_headers: config.custom_header.clone(),
87                },
88                http,
89            },
90            ProviderType::Anthropic => Self {
91                provider: Provider::Anthropic {
92                    api_key: config.api_key.clone(),
93                    base_url: config.base_url.clone(),
94                    api_version: "2023-06-01".to_string(),
95                    custom_headers: config.custom_header.clone(),
96                },
97                http,
98            },
99        }
100    }
101
102    /// Send a chat completion request and wait for the full response.
103    pub async fn chat(&self, request: ChatRequest) -> Result<ChatResponse, Error> {
104        match &self.provider {
105            Provider::OpenAi {
106                api_key,
107                base_url,
108                custom_headers,
109            } => crate::openai::chat(&self.http, base_url, api_key, custom_headers, request).await,
110            Provider::Anthropic {
111                api_key,
112                base_url,
113                api_version,
114                custom_headers,
115            } => {
116                crate::anthropic::chat(
117                    &self.http,
118                    base_url,
119                    api_key,
120                    api_version,
121                    custom_headers,
122                    request,
123                )
124                .await
125            }
126        }
127    }
128
129    /// Send a chat completion request and receive a stream of incremental events.
130    pub async fn chat_stream(
131        &self,
132        request: ChatRequest,
133    ) -> Result<Pin<Box<dyn Stream<Item = Result<StreamEvent, Error>> + Send>>, Error> {
134        match &self.provider {
135            Provider::OpenAi {
136                api_key,
137                base_url,
138                custom_headers,
139            } => {
140                crate::openai::chat_stream(&self.http, base_url, api_key, custom_headers, request)
141                    .await
142            }
143            Provider::Anthropic {
144                api_key,
145                base_url,
146                api_version,
147                custom_headers,
148            } => {
149                crate::anthropic::chat_stream(
150                    &self.http,
151                    base_url,
152                    api_key,
153                    api_version,
154                    custom_headers,
155                    request,
156                )
157                .await
158            }
159        }
160    }
161}