a3s_code_core/llm/
factory.rs1use super::anthropic::AnthropicClient;
4use super::openai::OpenAiClient;
5use super::types::SecretString;
6use super::LlmClient;
7use crate::retry::RetryConfig;
8use std::sync::Arc;
9
10#[derive(Clone, Default)]
12pub struct LlmConfig {
13 pub provider: String,
14 pub model: String,
15 pub api_key: SecretString,
16 pub base_url: Option<String>,
17 pub retry_config: Option<RetryConfig>,
18 pub temperature: Option<f32>,
20 pub max_tokens: Option<usize>,
22 pub thinking_budget: Option<usize>,
24 pub disable_temperature: bool,
26}
27
28impl std::fmt::Debug for LlmConfig {
29 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
30 f.debug_struct("LlmConfig")
31 .field("provider", &self.provider)
32 .field("model", &self.model)
33 .field("api_key", &"[REDACTED]")
34 .field("base_url", &self.base_url)
35 .field("retry_config", &self.retry_config)
36 .field("temperature", &self.temperature)
37 .field("max_tokens", &self.max_tokens)
38 .field("thinking_budget", &self.thinking_budget)
39 .field("disable_temperature", &self.disable_temperature)
40 .finish()
41 }
42}
43
44impl LlmConfig {
45 pub fn new(
46 provider: impl Into<String>,
47 model: impl Into<String>,
48 api_key: impl Into<String>,
49 ) -> Self {
50 Self {
51 provider: provider.into(),
52 model: model.into(),
53 api_key: SecretString::new(api_key.into()),
54 base_url: None,
55 retry_config: None,
56 temperature: None,
57 max_tokens: None,
58 thinking_budget: None,
59 disable_temperature: false,
60 }
61 }
62
63 pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
64 self.base_url = Some(base_url.into());
65 self
66 }
67
68 pub fn with_retry_config(mut self, retry_config: RetryConfig) -> Self {
69 self.retry_config = Some(retry_config);
70 self
71 }
72
73 pub fn with_temperature(mut self, temperature: f32) -> Self {
74 self.temperature = Some(temperature);
75 self
76 }
77
78 pub fn with_max_tokens(mut self, max_tokens: usize) -> Self {
79 self.max_tokens = Some(max_tokens);
80 self
81 }
82
83 pub fn with_thinking_budget(mut self, budget: usize) -> Self {
84 self.thinking_budget = Some(budget);
85 self
86 }
87}
88
89pub fn create_client_with_config(config: LlmConfig) -> Arc<dyn LlmClient> {
91 let retry = config.retry_config.unwrap_or_default();
92 let api_key = config.api_key.expose().to_string();
93
94 match config.provider.as_str() {
95 "anthropic" | "claude" => {
96 let mut client = AnthropicClient::new(api_key, config.model).with_retry_config(retry);
97 if let Some(base_url) = config.base_url {
98 client = client.with_base_url(base_url);
99 }
100 if !config.disable_temperature {
101 if let Some(temp) = config.temperature {
102 client = client.with_temperature(temp);
103 }
104 }
105 if let Some(max) = config.max_tokens {
106 client = client.with_max_tokens(max);
107 }
108 if let Some(budget) = config.thinking_budget {
109 client = client.with_thinking_budget(budget);
110 }
111 Arc::new(client)
112 }
113 "openai" | "gpt" => {
114 let mut client = OpenAiClient::new(api_key, config.model).with_retry_config(retry);
115 if let Some(base_url) = config.base_url {
116 client = client.with_base_url(base_url);
117 }
118 if !config.disable_temperature {
119 if let Some(temp) = config.temperature {
120 client = client.with_temperature(temp);
121 }
122 }
123 if let Some(max) = config.max_tokens {
124 client = client.with_max_tokens(max);
125 }
126 Arc::new(client)
127 }
128 _ => {
130 tracing::info!(
131 "Using OpenAI-compatible client for provider '{}'",
132 config.provider
133 );
134 let mut client = OpenAiClient::new(api_key, config.model).with_retry_config(retry);
135 if let Some(base_url) = config.base_url {
136 client = client.with_base_url(base_url);
137 }
138 if !config.disable_temperature {
139 if let Some(temp) = config.temperature {
140 client = client.with_temperature(temp);
141 }
142 }
143 if let Some(max) = config.max_tokens {
144 client = client.with_max_tokens(max);
145 }
146 Arc::new(client)
147 }
148 }
149}