1use crate::error::ApiError;
2use crate::sse::SseParser;
3use crate::types::*;
4use std::collections::VecDeque;
5use std::time::Duration;
6
7const DEFAULT_BASE_URL: &str = "https://api.ternlang.com";
8const REQUEST_ID_HEADER: &str = "x-request-id";
9const ALT_REQUEST_ID_HEADER: &str = "request-id";
10const DEFAULT_INITIAL_BACKOFF: Duration = Duration::from_millis(500);
11const DEFAULT_MAX_BACKOFF: Duration = Duration::from_secs(30);
12
13#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
14pub enum LlmProvider {
15 Ternlang,
16 Anthropic,
17 OpenAi,
18 HuggingFace,
19 Google,
20 Azure,
21 Aws,
22 Ollama,
23 Xai,
24}
25
26impl LlmProvider {
27 pub fn default_base_url(&self) -> &'static str {
28 match self {
29 Self::Ternlang => "https://api.ternlang.com",
30 Self::Anthropic => "https://api.anthropic.com",
31 Self::OpenAi => "https://api.openai.com",
32 Self::HuggingFace => "https://api-inference.huggingface.co",
33 Self::Google => "https://generativelanguage.googleapis.com",
34 Self::Azure => "https://api.azure.com",
35 Self::Aws => "https://bedrock-runtime.us-east-1.amazonaws.com",
36 Self::Ollama => "http://localhost:11434",
37 Self::Xai => "https://api.x.ai",
38 }
39 }
40
41 pub fn api_path(&self) -> &'static str {
42 match self {
43 Self::Ternlang => "/v1/messages",
44 Self::Anthropic => "/v1/messages",
45 Self::OpenAi => "/v1/chat/completions",
46 Self::HuggingFace => "/models",
47 Self::Google => "/v1beta",
48 Self::Ollama => "/v1/chat/completions",
49 Self::Xai => "/v1/chat/completions",
50 _ => "/v1/messages",
51 }
52 }
53}
54
55#[derive(Clone)]
56pub struct TernlangClient {
57 pub provider: LlmProvider,
58 pub base_url: String,
59 pub auth: AuthSource,
60 pub http: reqwest::Client,
61 pub max_retries: u32,
62 pub initial_backoff: Duration,
63 pub max_backoff: Duration,
64}
65
66impl TernlangClient {
67 pub fn from_auth(auth: AuthSource) -> Self {
68 Self {
69 provider: LlmProvider::Ternlang,
70 base_url: DEFAULT_BASE_URL.to_string(),
71 auth,
72 http: reqwest::Client::new(),
73 max_retries: 3,
74 initial_backoff: DEFAULT_INITIAL_BACKOFF,
75 max_backoff: DEFAULT_MAX_BACKOFF,
76 }
77 }
78
79 pub fn from_env() -> Result<Self, ApiError> {
80 Ok(Self::from_auth(AuthSource::from_env_or_saved()?).with_base_url(read_base_url()))
81 }
82
83 #[must_use]
84 pub fn with_auth_source(mut self, auth: AuthSource) -> Self {
85 self.auth = auth;
86 self
87 }
88
89 #[must_use]
90 pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
91 self.base_url = base_url.into();
92 self
93 }
94
95 #[must_use]
96 pub fn with_provider(mut self, provider: LlmProvider) -> Self {
97 self.provider = provider;
98 if self.base_url == DEFAULT_BASE_URL {
99 self.base_url = provider.default_base_url().to_string();
100 }
101 self
102 }
103
104 async fn send_raw_request(
105 &self,
106 request: &MessageRequest,
107 ) -> Result<reqwest::Response, ApiError> {
108 let path = self.provider.api_path();
109 let mut request_url = format!("{}/{}", self.base_url.trim_end_matches('/'), path.trim_start_matches('/'));
110
111 let body = match self.provider {
112 LlmProvider::Google => {
113 let model_id = if request.model.starts_with("models/") {
114 request.model.clone()
115 } else {
116 format!("models/{}", request.model)
117 };
118 let base = format!("{}/v1beta/{}:generateContent", self.base_url.trim_end_matches('/'), model_id);
119 request_url = if let Some(key) = self.auth.api_key() {
120 format!("{}?key={}", base, key)
121 } else {
122 base
123 };
124 translate_to_gemini(request)
125 }
126 LlmProvider::Anthropic => translate_to_anthropic(request),
127 LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => translate_to_openai(request),
128 _ => serde_json::to_value(request).map_err(ApiError::from)?,
129 };
130
131 let mut request_builder = self
132 .http
133 .post(&request_url)
134 .header("content-type", "application/json");
135
136 if self.provider == LlmProvider::Anthropic {
137 request_builder = request_builder.header("anthropic-version", "2023-06-01");
138 }
139
140 let request_builder = self.auth.apply(self.provider, request_builder);
141
142 request_builder.json(&body).send().await.map_err(ApiError::from)
143 }
144
145 pub async fn send_message(
146 &self,
147 request: &MessageRequest,
148 ) -> Result<MessageResponse, ApiError> {
149 let request = MessageRequest {
150 stream: false,
151 ..request.clone()
152 };
153 let response = self.send_with_retry(&request).await?;
154 let request_id = request_id_from_headers(response.headers());
155 let response_json = response
156 .json::<serde_json::Value>()
157 .await
158 .map_err(ApiError::from)?;
159
160 let mut final_response = match self.provider {
161 LlmProvider::Google => translate_from_gemini(response_json, &request.model),
162 LlmProvider::Anthropic => translate_from_anthropic(response_json, &request.model),
163 LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => translate_from_openai(response_json, &request.model),
164 _ => serde_json::from_value::<MessageResponse>(response_json).map_err(ApiError::from)?,
165 };
166
167 if final_response.request_id.is_none() {
168 final_response.request_id = request_id;
169 }
170 Ok(final_response)
171 }
172
173 pub async fn stream_message(
174 &mut self,
175 request: &MessageRequest,
176 ) -> Result<MessageStream, ApiError> {
177 if self.provider == LlmProvider::Google {
179 let non_stream_req = MessageRequest { stream: false, ..request.clone() };
180 let buffered = self.send_message(&non_stream_req).await?;
181 return Ok(MessageStream::from_buffered_response(buffered));
182 }
183 let response = self
184 .send_with_retry(&request.clone().with_streaming())
185 .await?;
186 Ok(MessageStream {
187 _request_id: request_id_from_headers(response.headers()),
188 response: Some(response),
189 parser: SseParser::new(),
190 pending: VecDeque::new(),
191 done: false,
192 })
193 }
194
195 async fn send_with_retry(
196 &self,
197 request: &MessageRequest,
198 ) -> Result<reqwest::Response, ApiError> {
199 let mut attempts = 0;
200 let mut last_error: Option<ApiError>;
201
202 loop {
203 attempts += 1;
204 match self.send_raw_request(request).await {
205 Ok(response) => match expect_success(response).await {
206 Ok(response) => return Ok(response),
207 Err(error) if error.is_retryable() && attempts <= self.max_retries => {
208 last_error = Some(error);
209 }
210 Err(error) => return Err(error),
211 },
212 Err(error) if error.is_retryable() && attempts <= self.max_retries => {
213 last_error = Some(error);
214 }
215 Err(error) => return Err(error),
216 }
217
218 if attempts > self.max_retries {
219 break;
220 }
221
222 tokio::time::sleep(self.backoff_for_attempt(attempts)?).await;
223 }
224
225 Err(ApiError::RetriesExhausted {
226 attempts,
227 last_error: Box::new(last_error.unwrap_or(ApiError::Auth("Max retries exceeded without error capture".to_string()))),
228 })
229 }
230
231 fn backoff_for_attempt(&self, attempt: u32) -> Result<Duration, ApiError> {
232 let multiplier = 2_u32.pow(attempt.saturating_sub(1));
233 Ok(self
234 .initial_backoff
235 .checked_mul(multiplier)
236 .map_or(self.max_backoff, |delay| delay.min(self.max_backoff)))
237 }
238
239 pub async fn list_remote_models(&self) -> Result<Vec<String>, ApiError> {
240 match self.provider {
241 LlmProvider::Google => {
242 let url = format!("{}/v1beta/models?key={}", self.base_url.trim_end_matches('/'), self.auth.api_key().unwrap_or(""));
243 let res = self.http.get(&url).send().await.map_err(ApiError::from)?;
244 let json: serde_json::Value = res.json().await.map_err(ApiError::from)?;
245
246 let mut models = vec![];
247 if let Some(list) = json.get("models").and_then(|m| m.as_array()) {
248 for m in list {
249 if let Some(name) = m.get("name").and_then(|n| n.as_str()) {
250 models.push(name.replace("models/", ""));
251 }
252 }
253 }
254 Ok(models)
255 }
256 LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => {
257 let url = format!("{}/v1/models", self.base_url.trim_end_matches('/'));
258 let res = self.auth.apply(self.provider, self.http.get(&url)).send().await.map_err(ApiError::from)?;
259 let json: serde_json::Value = res.json().await.map_err(ApiError::from)?;
260
261 let mut models = vec![];
262 if let Some(list) = json.get("data").and_then(|m| m.as_array()) {
263 for m in list {
264 if let Some(id) = m.get("id").and_then(|i| i.as_str()) {
265 models.push(id.to_string());
266 }
267 }
268 }
269 Ok(models)
270 }
271 _ => Ok(vec![])
272 }
273 }
274
275 pub async fn exchange_oauth_code(
276 &self,
277 _config: OAuthConfig,
278 _request: &OAuthTokenExchangeRequest,
279 ) -> Result<RuntimeTokenSet, ApiError> {
280 Ok(RuntimeTokenSet {
281 access_token: "dummy_token".to_string(),
282 refresh_token: None,
283 expires_at: None,
284 scopes: vec![],
285 })
286 }
287}
288
289#[derive(Debug)]
290pub struct MessageStream {
291 _request_id: Option<String>,
292 response: Option<reqwest::Response>,
293 parser: SseParser,
294 pending: VecDeque<StreamEvent>,
295 done: bool,
296}
297
298impl MessageStream {
299 fn from_buffered_response(response: MessageResponse) -> Self {
300 let mut pending = VecDeque::new();
301 pending.push_back(StreamEvent::MessageStart(MessageStartEvent {
302 message: response.clone(),
303 }));
304 for (i, block) in response.content.iter().enumerate() {
305 let index = i as u32;
306 pending.push_back(StreamEvent::ContentBlockStart(ContentBlockStartEvent {
307 index,
308 content_block: block.clone(),
309 }));
310 if let OutputContentBlock::Text { text } = block {
311 pending.push_back(StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
312 index,
313 delta: ContentBlockDelta::TextDelta { text: text.clone() },
314 }));
315 }
316 pending.push_back(StreamEvent::ContentBlockStop(ContentBlockStopEvent { index }));
317 }
318 pending.push_back(StreamEvent::MessageDelta(MessageDeltaEvent {
319 delta: MessageDelta {
320 stop_reason: response.stop_reason,
321 stop_sequence: response.stop_sequence,
322 },
323 usage: response.usage,
324 }));
325 pending.push_back(StreamEvent::MessageStop(MessageStopEvent {}));
326 Self {
327 _request_id: None,
328 response: None,
329 parser: SseParser::new(),
330 pending,
331 done: true,
332 }
333 }
334
335 pub async fn next_event(&mut self) -> Result<Option<StreamEvent>, ApiError> {
336 loop {
337 if let Some(event) = self.pending.pop_front() {
338 return Ok(Some(event));
339 }
340 if self.done { return Ok(None); }
341 match self.response.as_mut() {
342 None => {
343 self.done = true;
344 return Ok(None);
345 }
346 Some(response) => match response.chunk().await? {
347 None => {
348 self.done = true;
349 return Ok(None);
350 }
351 Some(chunk) => {
352 self.pending.extend(self.parser.push(&chunk)?);
353 }
354 },
355 }
356 }
357 }
358}
359
360fn translate_to_anthropic(request: &MessageRequest) -> serde_json::Value {
361 use serde_json::json;
362 let messages: Vec<serde_json::Value> = request.messages.iter().map(|msg| {
363 let content: Vec<serde_json::Value> = msg.content.iter().map(|block| {
364 match block {
365 InputContentBlock::Text { text } => json!({ "type": "text", "text": text }),
366 InputContentBlock::ToolUse { id, name, input } => json!({
367 "type": "tool_use", "id": id, "name": name, "input": input
368 }),
369 InputContentBlock::ToolResult { tool_use_id, content, is_error } => {
370 let text = content.iter().filter_map(|c| {
371 if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
372 }).collect::<Vec<String>>().join("\n");
373 json!({
374 "type": "tool_result", "tool_use_id": tool_use_id, "content": text, "is_error": is_error
375 })
376 }
377 }
378 }).collect();
379 json!({ "role": msg.role, "content": content })
380 }).collect();
381
382 let mut body = json!({
383 "model": request.model,
384 "messages": messages,
385 "max_tokens": request.max_tokens.unwrap_or(4096),
386 "stream": request.stream
387 });
388 if let Some(system) = &request.system { body["system"] = json!(system); }
389 if let Some(tools) = &request.tools {
390 body["tools"] = json!(tools.iter().map(|t| {
391 json!({ "name": t.name, "description": t.description, "input_schema": t.input_schema })
392 }).collect::<Vec<_>>());
393 }
394 body
395}
396
397fn translate_to_openai(request: &MessageRequest) -> serde_json::Value {
398 use serde_json::json;
399 let mut messages = vec![];
400 if let Some(system) = &request.system { messages.push(json!({ "role": "system", "content": system })); }
401
402 for msg in &request.messages {
403 let mut content_text = String::new();
404 let mut tool_calls = vec![];
405
406 for block in &msg.content {
407 match block {
408 InputContentBlock::Text { text } => content_text.push_str(text),
409 InputContentBlock::ToolUse { id, name, input } => {
410 tool_calls.push(json!({
411 "id": id, "type": "function", "function": { "name": name, "arguments": input.to_string() }
412 }));
413 }
414 InputContentBlock::ToolResult { tool_use_id, content, .. } => {
415 let text = content.iter().filter_map(|c| {
416 if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
417 }).collect::<Vec<String>>().join("\n");
418 messages.push(json!({ "role": "tool", "tool_call_id": tool_use_id, "content": text }));
419 }
420 }
421 }
422
423 if !content_text.is_empty() || !tool_calls.is_empty() {
424 let mut m = json!({ "role": msg.role });
425 if !content_text.is_empty() { m["content"] = json!(content_text); }
426 if !tool_calls.is_empty() { m["tool_calls"] = json!(tool_calls); }
427 messages.push(m);
428 }
429 }
430
431 let mut body = json!({ "model": request.model, "messages": messages, "stream": request.stream });
432 if let Some(tools) = &request.tools {
433 body["tools"] = json!(tools.iter().map(|t| {
434 json!({ "type": "function", "function": { "name": t.name, "description": t.description, "parameters": t.input_schema } })
435 }).collect::<Vec<_>>());
436 }
437 body
438}
439
440fn translate_to_gemini(request: &MessageRequest) -> serde_json::Value {
441 use serde_json::json;
442 let contents: Vec<serde_json::Value> = request.messages.iter().map(|msg| {
443 let role = if msg.role == "assistant" { "model" } else { "user" };
444 let parts: Vec<serde_json::Value> = msg.content.iter().map(|block| {
445 match block {
446 InputContentBlock::Text { text } => json!({ "text": text }),
447 InputContentBlock::ToolUse { name, input, .. } => json!({ "functionCall": { "name": name, "args": input } }),
448 InputContentBlock::ToolResult { tool_use_id, content, .. } => {
449 let text = content.iter().filter_map(|c| {
450 if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
451 }).collect::<Vec<String>>().join("\n");
452 json!({ "functionResponse": { "name": tool_use_id, "response": { "result": text } } })
453 }
454 }
455 }).collect();
456 json!({ "role": role, "parts": parts })
457 }).collect();
458
459 let mut body = json!({ "contents": contents });
460 if let Some(system) = &request.system { body["systemInstruction"] = json!({ "parts": [{ "text": system }] }); }
461 if let Some(tools) = &request.tools {
462 let declarations: Vec<serde_json::Value> = tools.iter().map(|t| {
463 json!({ "name": t.name, "description": t.description, "parameters": t.input_schema })
464 }).collect();
465 body["tools"] = json!([{ "functionDeclarations": declarations }]);
466 }
467 body
468}
469
470fn translate_from_anthropic(response: serde_json::Value, model: &str) -> MessageResponse {
471 let mut content = vec![];
472 if let Some(blocks) = response.get("content").and_then(|c| c.as_array()) {
473 for block in blocks {
474 match block.get("type").and_then(|t| t.as_str()) {
475 Some("text") => if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
476 content.push(OutputContentBlock::Text { text: text.to_string() });
477 },
478 Some("tool_use") => if let (Some(id), Some(name), Some(input)) = (
479 block.get("id").and_then(|i| i.as_str()),
480 block.get("name").and_then(|n| n.as_str()),
481 block.get("input")
482 ) {
483 content.push(OutputContentBlock::ToolUse { id: id.to_string(), name: name.to_string(), input: input.clone() });
484 },
485 _ => {}
486 }
487 }
488 }
489 let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
490 if let Some(u) = response.get("usage") {
491 usage.input_tokens = u.get("input_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
492 usage.output_tokens = u.get("output_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
493 }
494 MessageResponse {
495 id: response.get("id").and_then(|i| i.as_str()).unwrap_or("anthropic-response").to_string(),
496 kind: "message".to_string(), role: "assistant".to_string(), content, model: model.to_string(),
497 stop_reason: response.get("stop_reason").and_then(|s| s.as_str()).map(|s| s.to_string()),
498 stop_sequence: None, usage, request_id: None,
499 }
500}
501
502fn translate_from_openai(response: serde_json::Value, model: &str) -> MessageResponse {
503 let mut content = vec![];
504 if let Some(choices) = response.get("choices").and_then(|c| c.as_array()) {
505 if let Some(choice) = choices.first() {
506 if let Some(message) = choice.get("message") {
507 if let Some(text) = message.get("content").and_then(|c| c.as_str()) {
508 content.push(OutputContentBlock::Text { text: text.to_string() });
509 }
510 if let Some(tool_calls) = message.get("tool_calls").and_then(|t| t.as_array()) {
511 for call in tool_calls {
512 if let (Some(id), Some(name), Some(args_str)) = (
513 call.get("id").and_then(|i| i.as_str()),
514 call.get("function").and_then(|f| f.get("name")).and_then(|n| n.as_str()),
515 call.get("function").and_then(|f| f.get("arguments")).and_then(|a| a.as_str())
516 ) {
517 if let Ok(args) = serde_json::from_str(args_str) {
518 content.push(OutputContentBlock::ToolUse { id: id.to_string(), name: name.to_string(), input: args });
519 }
520 }
521 }
522 }
523 }
524 }
525 }
526 let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
527 if let Some(u) = response.get("usage") {
528 usage.input_tokens = u.get("prompt_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
529 usage.output_tokens = u.get("completion_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
530 }
531 MessageResponse {
532 id: response.get("id").and_then(|i| i.as_str()).unwrap_or("openai-response").to_string(),
533 kind: "message".to_string(), role: "assistant".to_string(), content, model: model.to_string(),
534 stop_reason: Some("end_turn".to_string()), stop_sequence: None, usage, request_id: None,
535 }
536}
537
538fn translate_from_gemini(response: serde_json::Value, model: &str) -> MessageResponse {
539 let mut content = vec![];
540 if let Some(candidates) = response.get("candidates").and_then(|c| c.as_array()) {
541 if let Some(candidate) = candidates.first() {
542 if let Some(parts) = candidate.get("content").and_then(|c| c.get("parts")).and_then(|p| p.as_array()) {
543 for part in parts {
544 if let Some(text) = part.get("text").and_then(|t| t.as_str()) {
545 content.push(OutputContentBlock::Text { text: text.to_string() });
546 }
547 if let Some(call) = part.get("functionCall") {
548 if let (Some(name), Some(args)) = (call.get("name").and_then(|n| n.as_str()), call.get("args")) {
549 content.push(OutputContentBlock::ToolUse { id: name.to_string(), name: name.to_string(), input: args.clone() });
550 }
551 }
552 }
553 }
554 }
555 }
556 let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
557 if let Some(u) = response.get("usageMetadata") {
558 usage.input_tokens = u.get("promptTokenCount").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
559 usage.output_tokens = u.get("candidatesTokenCount").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
560 }
561 MessageResponse {
562 id: "gemini-response".to_string(), kind: "message".to_string(), role: "assistant".to_string(),
563 content, model: model.to_string(), stop_reason: Some("end_turn".to_string()),
564 stop_sequence: None, usage, request_id: None,
565 }
566}
567
568pub fn read_env_non_empty(key: &str) -> Result<Option<String>, ApiError> {
569 match std::env::var(key) {
570 Ok(value) if !value.is_empty() => Ok(Some(value)),
571 Ok(_) | Err(std::env::VarError::NotPresent) => Ok(None),
572 Err(error) => Err(ApiError::from(error)),
573 }
574}
575
576pub fn read_base_url() -> String {
577 std::env::var("TERNLANG_BASE_URL").unwrap_or_else(|_| DEFAULT_BASE_URL.to_string())
578}
579
580fn request_id_from_headers(headers: &reqwest::header::HeaderMap) -> Option<String> {
581 headers
582 .get(REQUEST_ID_HEADER)
583 .or_else(|| headers.get(ALT_REQUEST_ID_HEADER))
584 .and_then(|value| value.to_str().ok())
585 .map(ToOwned::to_owned)
586}
587
588async fn expect_success(response: reqwest::Response) -> Result<reqwest::Response, ApiError> {
589 if response.status().is_success() {
590 Ok(response)
591 } else {
592 Err(ApiError::Auth(format!("HTTP {}", response.status())))
593 }
594}
595
596pub fn resolve_startup_auth_source() -> Result<AuthSource, ApiError> {
597 if let Some(api_key) = read_env_non_empty("TERNLANG_API_KEY")? {
598 return Ok(AuthSource::ApiKey(api_key));
599 }
600 Ok(AuthSource::None)
601}
602
603pub fn resolve_auth_for_provider(provider: LlmProvider) -> Result<AuthSource, ApiError> {
605 let key = match provider {
606 LlmProvider::Anthropic => read_env_non_empty("ANTHROPIC_API_KEY")?,
607 LlmProvider::Google => {
608 let k = read_env_non_empty("GEMINI_API_KEY")?;
609 if k.is_some() { k } else { read_env_non_empty("GOOGLE_API_KEY")? }
610 }
611 LlmProvider::OpenAi => read_env_non_empty("OPENAI_API_KEY")?,
612 LlmProvider::Xai => read_env_non_empty("XAI_API_KEY")?,
613 LlmProvider::HuggingFace => read_env_non_empty("HUGGINGFACE_API_KEY")?,
614 LlmProvider::Ollama => return Ok(AuthSource::None),
615 _ => read_env_non_empty("TERNLANG_API_KEY")?,
616 };
617 Ok(key.map_or(AuthSource::None, AuthSource::ApiKey))
618}
619
620pub fn detect_provider_and_model_from_env() -> Option<(LlmProvider, &'static str)> {
623 let env_set = |var: &str| std::env::var(var).ok().filter(|v| !v.is_empty()).is_some();
624 if env_set("ANTHROPIC_API_KEY") {
625 return Some((LlmProvider::Anthropic, "claude-sonnet-4-5"));
626 }
627 if env_set("GEMINI_API_KEY") || env_set("GOOGLE_API_KEY") {
628 return Some((LlmProvider::Google, "gemini-2.0-flash"));
629 }
630 if env_set("OPENAI_API_KEY") {
631 return Some((LlmProvider::OpenAi, "gpt-4o-mini"));
632 }
633 if env_set("XAI_API_KEY") {
634 return Some((LlmProvider::Xai, "grok-2-1212"));
635 }
636 if env_set("HUGGINGFACE_API_KEY") {
637 return Some((LlmProvider::HuggingFace, "meta-llama/Meta-Llama-3-8B-Instruct"));
638 }
639 None
640}
641
642#[derive(serde::Deserialize)]
643pub struct OAuthConfig {}