Skip to main content

a3s_code_core/llm/
factory.rs

1//! LLM client factory
2
3use super::anthropic::AnthropicClient;
4use super::openai::OpenAiClient;
5use super::types::SecretString;
6use super::LlmClient;
7use crate::retry::RetryConfig;
8use std::sync::Arc;
9
10/// LLM client configuration
11#[derive(Clone, Default)]
12pub struct LlmConfig {
13    pub provider: String,
14    pub model: String,
15    pub api_key: SecretString,
16    pub base_url: Option<String>,
17    pub retry_config: Option<RetryConfig>,
18}
19
20impl std::fmt::Debug for LlmConfig {
21    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
22        f.debug_struct("LlmConfig")
23            .field("provider", &self.provider)
24            .field("model", &self.model)
25            .field("api_key", &"[REDACTED]")
26            .field("base_url", &self.base_url)
27            .field("retry_config", &self.retry_config)
28            .finish()
29    }
30}
31
32impl LlmConfig {
33    pub fn new(
34        provider: impl Into<String>,
35        model: impl Into<String>,
36        api_key: impl Into<String>,
37    ) -> Self {
38        Self {
39            provider: provider.into(),
40            model: model.into(),
41            api_key: SecretString::new(api_key.into()),
42            base_url: None,
43            retry_config: None,
44        }
45    }
46
47    pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
48        self.base_url = Some(base_url.into());
49        self
50    }
51
52    pub fn with_retry_config(mut self, retry_config: RetryConfig) -> Self {
53        self.retry_config = Some(retry_config);
54        self
55    }
56}
57
58/// Create LLM client with full configuration (supports custom base_url)
59pub fn create_client_with_config(config: LlmConfig) -> Arc<dyn LlmClient> {
60    let retry = config.retry_config.unwrap_or_default();
61    let api_key = config.api_key.expose().to_string();
62
63    match config.provider.as_str() {
64        "anthropic" | "claude" => {
65            let mut client = AnthropicClient::new(api_key, config.model).with_retry_config(retry);
66            if let Some(base_url) = config.base_url {
67                client = client.with_base_url(base_url);
68            }
69            Arc::new(client)
70        }
71        "openai" | "gpt" => {
72            let mut client = OpenAiClient::new(api_key, config.model).with_retry_config(retry);
73            if let Some(base_url) = config.base_url {
74                client = client.with_base_url(base_url);
75            }
76            Arc::new(client)
77        }
78        // OpenAI-compatible providers (deepseek, groq, together, ollama, etc.)
79        _ => {
80            tracing::info!(
81                "Using OpenAI-compatible client for provider '{}'",
82                config.provider
83            );
84            let mut client = OpenAiClient::new(api_key, config.model).with_retry_config(retry);
85            if let Some(base_url) = config.base_url {
86                client = client.with_base_url(base_url);
87            }
88            Arc::new(client)
89        }
90    }
91}