a3s_code_core/llm/
factory.rs1use super::anthropic::AnthropicClient;
4use super::openai::OpenAiClient;
5use super::types::SecretString;
6use super::zhipu::ZhipuClient;
7use super::LlmClient;
8use crate::retry::RetryConfig;
9use std::sync::Arc;
10
11#[derive(Clone, Default)]
13pub struct LlmConfig {
14 pub provider: String,
15 pub model: String,
16 pub api_key: SecretString,
17 pub base_url: Option<String>,
18 pub retry_config: Option<RetryConfig>,
19 pub temperature: Option<f32>,
21 pub max_tokens: Option<usize>,
23 pub thinking_budget: Option<usize>,
25 pub disable_temperature: bool,
27}
28
29impl std::fmt::Debug for LlmConfig {
30 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
31 f.debug_struct("LlmConfig")
32 .field("provider", &self.provider)
33 .field("model", &self.model)
34 .field("api_key", &"[REDACTED]")
35 .field("base_url", &self.base_url)
36 .field("retry_config", &self.retry_config)
37 .field("temperature", &self.temperature)
38 .field("max_tokens", &self.max_tokens)
39 .field("thinking_budget", &self.thinking_budget)
40 .field("disable_temperature", &self.disable_temperature)
41 .finish()
42 }
43}
44
45impl LlmConfig {
46 pub fn new(
47 provider: impl Into<String>,
48 model: impl Into<String>,
49 api_key: impl Into<String>,
50 ) -> Self {
51 Self {
52 provider: provider.into(),
53 model: model.into(),
54 api_key: SecretString::new(api_key.into()),
55 base_url: None,
56 retry_config: None,
57 temperature: None,
58 max_tokens: None,
59 thinking_budget: None,
60 disable_temperature: false,
61 }
62 }
63
64 pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
65 self.base_url = Some(base_url.into());
66 self
67 }
68
69 pub fn with_retry_config(mut self, retry_config: RetryConfig) -> Self {
70 self.retry_config = Some(retry_config);
71 self
72 }
73
74 pub fn with_temperature(mut self, temperature: f32) -> Self {
75 self.temperature = Some(temperature);
76 self
77 }
78
79 pub fn with_max_tokens(mut self, max_tokens: usize) -> Self {
80 self.max_tokens = Some(max_tokens);
81 self
82 }
83
84 pub fn with_thinking_budget(mut self, budget: usize) -> Self {
85 self.thinking_budget = Some(budget);
86 self
87 }
88}
89
90pub fn create_client_with_config(config: LlmConfig) -> Arc<dyn LlmClient> {
92 let retry = config.retry_config.unwrap_or_default();
93 let api_key = config.api_key.expose().to_string();
94
95 match config.provider.as_str() {
96 "anthropic" | "claude" => {
97 let mut client = AnthropicClient::new(api_key, config.model)
98 .with_provider_name(config.provider.clone())
99 .with_retry_config(retry);
100 if let Some(base_url) = config.base_url {
101 client = client.with_base_url(base_url);
102 }
103 if !config.disable_temperature {
104 if let Some(temp) = config.temperature {
105 client = client.with_temperature(temp);
106 }
107 }
108 if let Some(max) = config.max_tokens {
109 client = client.with_max_tokens(max);
110 }
111 if let Some(budget) = config.thinking_budget {
112 client = client.with_thinking_budget(budget);
113 }
114 Arc::new(client)
115 }
116 "openai" | "gpt" => {
117 let mut client = OpenAiClient::new(api_key, config.model)
118 .with_provider_name(config.provider.clone())
119 .with_retry_config(retry);
120 if let Some(base_url) = config.base_url {
121 client = client.with_base_url(base_url);
122 }
123 if !config.disable_temperature {
124 if let Some(temp) = config.temperature {
125 client = client.with_temperature(temp);
126 }
127 }
128 if let Some(max) = config.max_tokens {
129 client = client.with_max_tokens(max);
130 }
131 Arc::new(client)
132 }
133 "glm" | "zhipu" | "bigmodel" => {
134 let mut client = ZhipuClient::new(api_key, config.model).with_retry_config(retry);
135 if let Some(base_url) = config.base_url {
136 client = client.with_base_url(base_url);
137 }
138 if !config.disable_temperature {
139 if let Some(temp) = config.temperature {
140 client = client.with_temperature(temp);
141 }
142 }
143 if let Some(max) = config.max_tokens {
144 client = client.with_max_tokens(max);
145 }
146 Arc::new(client)
147 }
148 _ => {
150 tracing::info!(
151 "Using OpenAI-compatible client for provider '{}'",
152 config.provider
153 );
154 let mut client = OpenAiClient::new(api_key, config.model)
155 .with_provider_name(config.provider.clone())
156 .with_retry_config(retry);
157 if let Some(base_url) = config.base_url {
158 client = client.with_base_url(base_url);
159 }
160 if !config.disable_temperature {
161 if let Some(temp) = config.temperature {
162 client = client.with_temperature(temp);
163 }
164 }
165 if let Some(max) = config.max_tokens {
166 client = client.with_max_tokens(max);
167 }
168 Arc::new(client)
169 }
170 }
171}