Skip to main content

albert_api/
client.rs

1use crate::error::ApiError;
2use crate::sse::SseParser;
3use crate::types::*;
4use std::collections::VecDeque;
5use std::time::Duration;
6
7const DEFAULT_BASE_URL: &str = "https://api.ternlang.com";
8const REQUEST_ID_HEADER: &str = "x-request-id";
9const ALT_REQUEST_ID_HEADER: &str = "request-id";
10const DEFAULT_INITIAL_BACKOFF: Duration = Duration::from_millis(500);
11const DEFAULT_MAX_BACKOFF: Duration = Duration::from_secs(30);
12
13#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
14pub enum LlmProvider {
15    Ternlang,
16    Anthropic,
17    OpenAi,
18    HuggingFace,
19    Google,
20    Azure,
21    Aws,
22    Ollama,
23    Xai,
24}
25
26impl LlmProvider {
27    pub fn default_base_url(&self) -> &'static str {
28        match self {
29            Self::Ternlang => "https://api.ternlang.com",
30            Self::Anthropic => "https://api.anthropic.com",
31            Self::OpenAi => "https://api.openai.com",
32            Self::HuggingFace => "https://api-inference.huggingface.co",
33            Self::Google => "https://generativelanguage.googleapis.com",
34            Self::Azure => "https://api.azure.com",
35            Self::Aws => "https://bedrock-runtime.us-east-1.amazonaws.com",
36            Self::Ollama => "http://localhost:11434",
37            Self::Xai => "https://api.x.ai",
38        }
39    }
40
41    pub fn api_path(&self) -> &'static str {
42        match self {
43            Self::Ternlang => "/v1/messages",
44            Self::Anthropic => "/v1/messages",
45            Self::OpenAi => "/v1/chat/completions",
46            Self::HuggingFace => "/models",
47            Self::Google => "/v1beta",
48            Self::Ollama => "/v1/chat/completions",
49            Self::Xai => "/v1/chat/completions",
50            _ => "/v1/messages",
51        }
52    }
53}
54
55#[derive(Clone)]
56pub struct TernlangClient {
57    pub provider: LlmProvider,
58    pub base_url: String,
59    pub auth: AuthSource,
60    pub http: reqwest::Client,
61    pub max_retries: u32,
62    pub initial_backoff: Duration,
63    pub max_backoff: Duration,
64}
65
66impl TernlangClient {
67    pub fn from_auth(auth: AuthSource) -> Self {
68        Self {
69            provider: LlmProvider::Ternlang,
70            base_url: DEFAULT_BASE_URL.to_string(),
71            auth,
72            http: reqwest::Client::new(),
73            max_retries: 3,
74            initial_backoff: DEFAULT_INITIAL_BACKOFF,
75            max_backoff: DEFAULT_MAX_BACKOFF,
76        }
77    }
78
79    pub fn from_env() -> Result<Self, ApiError> {
80        Ok(Self::from_auth(AuthSource::from_env_or_saved()?).with_base_url(read_base_url()))
81    }
82
83    #[must_use]
84    pub fn with_auth_source(mut self, auth: AuthSource) -> Self {
85        self.auth = auth;
86        self
87    }
88
89    #[must_use]
90    pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
91        self.base_url = base_url.into();
92        self
93    }
94
95    #[must_use]
96    pub fn with_provider(mut self, provider: LlmProvider) -> Self {
97        self.provider = provider;
98        if self.base_url == DEFAULT_BASE_URL {
99            self.base_url = provider.default_base_url().to_string();
100        }
101        self
102    }
103
104    async fn send_raw_request(
105        &self,
106        request: &MessageRequest,
107    ) -> Result<reqwest::Response, ApiError> {
108        let path = self.provider.api_path();
109        let mut request_url = format!("{}/{}", self.base_url.trim_end_matches('/'), path.trim_start_matches('/'));
110
111        let body = match self.provider {
112            LlmProvider::Google => {
113                let model_id = if request.model.starts_with("models/") {
114                    request.model.clone()
115                } else {
116                    format!("models/{}", request.model)
117                };
118                let base = format!("{}/v1beta/{}:generateContent", self.base_url.trim_end_matches('/'), model_id);
119                request_url = if let Some(key) = self.auth.api_key() {
120                    format!("{}?key={}", base, key)
121                } else {
122                    base
123                };
124                translate_to_gemini(request)
125            }
126            LlmProvider::Anthropic => translate_to_anthropic(request),
127            LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => translate_to_openai(request),
128            _ => serde_json::to_value(request).map_err(ApiError::from)?,
129        };
130
131        let mut request_builder = self
132            .http
133            .post(&request_url)
134            .header("content-type", "application/json");
135
136        if self.provider == LlmProvider::Anthropic {
137            request_builder = request_builder.header("anthropic-version", "2023-06-01");
138        }
139
140        let request_builder = self.auth.apply(self.provider, request_builder);
141
142        request_builder.json(&body).send().await.map_err(ApiError::from)
143    }
144
145    pub async fn send_message(
146        &self,
147        request: &MessageRequest,
148    ) -> Result<MessageResponse, ApiError> {
149        let request = MessageRequest {
150            stream: false,
151            ..request.clone()
152        };
153        let response = self.send_with_retry(&request).await?;
154        let request_id = request_id_from_headers(response.headers());
155        let response_json = response
156            .json::<serde_json::Value>()
157            .await
158            .map_err(ApiError::from)?;
159        
160        let mut final_response = match self.provider {
161            LlmProvider::Google => translate_from_gemini(response_json, &request.model),
162            LlmProvider::Anthropic => translate_from_anthropic(response_json, &request.model),
163            LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => translate_from_openai(response_json, &request.model),
164            _ => serde_json::from_value::<MessageResponse>(response_json).map_err(ApiError::from)?,
165        };
166
167        if final_response.request_id.is_none() {
168            final_response.request_id = request_id;
169        }
170        Ok(final_response)
171    }
172
173    pub async fn stream_message(
174        &mut self,
175        request: &MessageRequest,
176    ) -> Result<MessageStream, ApiError> {
177        // Gemini SSE format differs from Anthropic's — use non-streaming and wrap events
178        if self.provider == LlmProvider::Google {
179            let non_stream_req = MessageRequest { stream: false, ..request.clone() };
180            let buffered = self.send_message(&non_stream_req).await?;
181            return Ok(MessageStream::from_buffered_response(buffered));
182        }
183        let response = self
184            .send_with_retry(&request.clone().with_streaming())
185            .await?;
186        Ok(MessageStream {
187            _request_id: request_id_from_headers(response.headers()),
188            response: Some(response),
189            parser: SseParser::new(),
190            pending: VecDeque::new(),
191            done: false,
192        })
193    }
194
195    async fn send_with_retry(
196        &self,
197        request: &MessageRequest,
198    ) -> Result<reqwest::Response, ApiError> {
199        let mut attempts = 0;
200        let mut last_error: Option<ApiError>;
201
202        loop {
203            attempts += 1;
204            match self.send_raw_request(request).await {
205                Ok(response) => match expect_success(response).await {
206                    Ok(response) => return Ok(response),
207                    Err(error) if error.is_retryable() && attempts <= self.max_retries => {
208                        last_error = Some(error);
209                    }
210                    Err(error) => return Err(error),
211                },
212                Err(error) if error.is_retryable() && attempts <= self.max_retries => {
213                    last_error = Some(error);
214                }
215                Err(error) => return Err(error),
216            }
217
218            if attempts > self.max_retries {
219                break;
220            }
221
222            tokio::time::sleep(self.backoff_for_attempt(attempts)?).await;
223        }
224
225        Err(ApiError::RetriesExhausted {
226            attempts,
227            last_error: Box::new(last_error.unwrap_or(ApiError::Auth("Max retries exceeded without error capture".to_string()))),
228        })
229    }
230
231    fn backoff_for_attempt(&self, attempt: u32) -> Result<Duration, ApiError> {
232        let multiplier = 2_u32.pow(attempt.saturating_sub(1));
233        Ok(self
234            .initial_backoff
235            .checked_mul(multiplier)
236            .map_or(self.max_backoff, |delay| delay.min(self.max_backoff)))
237    }
238
239    pub async fn list_remote_models(&self) -> Result<Vec<String>, ApiError> {
240        match self.provider {
241            LlmProvider::Google => {
242                let url = format!("{}/v1beta/models?key={}", self.base_url.trim_end_matches('/'), self.auth.api_key().unwrap_or(""));
243                let res = self.http.get(&url).send().await.map_err(ApiError::from)?;
244                let json: serde_json::Value = res.json().await.map_err(ApiError::from)?;
245                
246                let mut models = vec![];
247                if let Some(list) = json.get("models").and_then(|m| m.as_array()) {
248                    for m in list {
249                        if let Some(name) = m.get("name").and_then(|n| n.as_str()) {
250                            models.push(name.replace("models/", ""));
251                        }
252                    }
253                }
254                Ok(models)
255            }
256            LlmProvider::OpenAi | LlmProvider::Ollama | LlmProvider::Xai => {
257                let url = format!("{}/v1/models", self.base_url.trim_end_matches('/'));
258                let res = self.auth.apply(self.provider, self.http.get(&url)).send().await.map_err(ApiError::from)?;
259                let json: serde_json::Value = res.json().await.map_err(ApiError::from)?;
260                
261                let mut models = vec![];
262                if let Some(list) = json.get("data").and_then(|m| m.as_array()) {
263                    for m in list {
264                        if let Some(id) = m.get("id").and_then(|i| i.as_str()) {
265                            models.push(id.to_string());
266                        }
267                    }
268                }
269                Ok(models)
270            }
271            _ => Ok(vec![])
272        }
273    }
274
275    pub async fn exchange_oauth_code(
276        &self,
277        _config: OAuthConfig,
278        _request: &OAuthTokenExchangeRequest,
279    ) -> Result<RuntimeTokenSet, ApiError> {
280        Ok(RuntimeTokenSet {
281            access_token: "dummy_token".to_string(),
282            refresh_token: None,
283            expires_at: None,
284            scopes: vec![],
285        })
286    }
287}
288
289#[derive(Debug)]
290pub struct MessageStream {
291    _request_id: Option<String>,
292    response: Option<reqwest::Response>,
293    parser: SseParser,
294    pending: VecDeque<StreamEvent>,
295    done: bool,
296}
297
298impl MessageStream {
299    fn from_buffered_response(response: MessageResponse) -> Self {
300        let mut pending = VecDeque::new();
301        pending.push_back(StreamEvent::MessageStart(MessageStartEvent {
302            message: response.clone(),
303        }));
304        for (i, block) in response.content.iter().enumerate() {
305            let index = i as u32;
306            pending.push_back(StreamEvent::ContentBlockStart(ContentBlockStartEvent {
307                index,
308                content_block: block.clone(),
309            }));
310            if let OutputContentBlock::Text { text } = block {
311                pending.push_back(StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
312                    index,
313                    delta: ContentBlockDelta::TextDelta { text: text.clone() },
314                }));
315            }
316            pending.push_back(StreamEvent::ContentBlockStop(ContentBlockStopEvent { index }));
317        }
318        pending.push_back(StreamEvent::MessageDelta(MessageDeltaEvent {
319            delta: MessageDelta {
320                stop_reason: response.stop_reason,
321                stop_sequence: response.stop_sequence,
322            },
323            usage: response.usage,
324        }));
325        pending.push_back(StreamEvent::MessageStop(MessageStopEvent {}));
326        Self {
327            _request_id: None,
328            response: None,
329            parser: SseParser::new(),
330            pending,
331            done: true,
332        }
333    }
334
335    pub async fn next_event(&mut self) -> Result<Option<StreamEvent>, ApiError> {
336        loop {
337            if let Some(event) = self.pending.pop_front() {
338                return Ok(Some(event));
339            }
340            if self.done { return Ok(None); }
341            match self.response.as_mut() {
342                None => {
343                    self.done = true;
344                    return Ok(None);
345                }
346                Some(response) => match response.chunk().await? {
347                    None => {
348                        self.done = true;
349                        return Ok(None);
350                    }
351                    Some(chunk) => {
352                        self.pending.extend(self.parser.push(&chunk)?);
353                    }
354                },
355            }
356        }
357    }
358}
359
360fn translate_to_anthropic(request: &MessageRequest) -> serde_json::Value {
361    use serde_json::json;
362    let messages: Vec<serde_json::Value> = request.messages.iter().map(|msg| {
363        let content: Vec<serde_json::Value> = msg.content.iter().map(|block| {
364            match block {
365                InputContentBlock::Text { text } => json!({ "type": "text", "text": text }),
366                InputContentBlock::ToolUse { id, name, input } => json!({
367                    "type": "tool_use", "id": id, "name": name, "input": input
368                }),
369                InputContentBlock::ToolResult { tool_use_id, content, is_error } => {
370                    let text = content.iter().filter_map(|c| {
371                        if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
372                    }).collect::<Vec<String>>().join("\n");
373                    json!({
374                        "type": "tool_result", "tool_use_id": tool_use_id, "content": text, "is_error": is_error
375                    })
376                }
377            }
378        }).collect();
379        json!({ "role": msg.role, "content": content })
380    }).collect();
381
382    let mut body = json!({
383        "model": request.model,
384        "messages": messages,
385        "max_tokens": request.max_tokens.unwrap_or(4096),
386        "stream": request.stream
387    });
388    if let Some(system) = &request.system { body["system"] = json!(system); }
389    if let Some(tools) = &request.tools {
390        body["tools"] = json!(tools.iter().map(|t| {
391            json!({ "name": t.name, "description": t.description, "input_schema": t.input_schema })
392        }).collect::<Vec<_>>());
393    }
394    body
395}
396
397fn translate_to_openai(request: &MessageRequest) -> serde_json::Value {
398    use serde_json::json;
399    let mut messages = vec![];
400    if let Some(system) = &request.system { messages.push(json!({ "role": "system", "content": system })); }
401
402    for msg in &request.messages {
403        let mut content_text = String::new();
404        let mut tool_calls = vec![];
405
406        for block in &msg.content {
407            match block {
408                InputContentBlock::Text { text } => content_text.push_str(text),
409                InputContentBlock::ToolUse { id, name, input } => {
410                    tool_calls.push(json!({
411                        "id": id, "type": "function", "function": { "name": name, "arguments": input.to_string() }
412                    }));
413                }
414                InputContentBlock::ToolResult { tool_use_id, content, .. } => {
415                    let text = content.iter().filter_map(|c| {
416                        if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
417                    }).collect::<Vec<String>>().join("\n");
418                    messages.push(json!({ "role": "tool", "tool_call_id": tool_use_id, "content": text }));
419                }
420            }
421        }
422
423        if !content_text.is_empty() || !tool_calls.is_empty() {
424            let mut m = json!({ "role": msg.role });
425            if !content_text.is_empty() { m["content"] = json!(content_text); }
426            if !tool_calls.is_empty() { m["tool_calls"] = json!(tool_calls); }
427            messages.push(m);
428        }
429    }
430
431    let mut body = json!({ "model": request.model, "messages": messages, "stream": request.stream });
432    if let Some(max) = request.max_tokens {
433        body["max_tokens"] = json!(max);
434    }
435    if let Some(tools) = &request.tools {
436        body["tools"] = json!(tools.iter().map(|t| {
437            json!({ "type": "function", "function": { "name": t.name, "description": t.description, "parameters": t.input_schema } })
438        }).collect::<Vec<_>>());
439    }
440    body
441}
442
443/// Gemini only supports a subset of JSON Schema — strip/normalize fields it rejects.
444fn strip_gemini_unsupported_schema_fields(schema: serde_json::Value) -> serde_json::Value {
445    match schema {
446        serde_json::Value::Object(mut map) => {
447            map.remove("additionalProperties");
448            // "type": ["string", "null"] → "type": "string" (Gemini requires a single type string)
449            if let Some(serde_json::Value::Array(types)) = map.get("type") {
450                let first = types.iter()
451                    .find(|t| t.as_str() != Some("null"))
452                    .or_else(|| types.first())
453                    .cloned()
454                    .unwrap_or(serde_json::Value::String("string".to_string()));
455                map.insert("type".to_string(), first);
456            }
457            let cleaned = map.into_iter()
458                .map(|(k, v)| (k, strip_gemini_unsupported_schema_fields(v)))
459                .collect();
460            serde_json::Value::Object(cleaned)
461        }
462        serde_json::Value::Array(arr) => {
463            serde_json::Value::Array(arr.into_iter().map(strip_gemini_unsupported_schema_fields).collect())
464        }
465        other => other,
466    }
467}
468
469fn translate_to_gemini(request: &MessageRequest) -> serde_json::Value {
470    use serde_json::json;
471    let contents: Vec<serde_json::Value> = request.messages.iter().map(|msg| {
472        let role = if msg.role == "assistant" { "model" } else { "user" };
473        let parts: Vec<serde_json::Value> = msg.content.iter().map(|block| {
474            match block {
475                InputContentBlock::Text { text } => json!({ "text": text }),
476                InputContentBlock::ToolUse { name, input, .. } => json!({ "functionCall": { "name": name, "args": input } }),
477                InputContentBlock::ToolResult { tool_use_id, content, .. } => {
478                    let text = content.iter().filter_map(|c| {
479                        if let ToolResultContentBlock::Text { text } = c { Some(text.clone()) } else { None }
480                    }).collect::<Vec<String>>().join("\n");
481                    json!({ "functionResponse": { "name": tool_use_id, "response": { "result": text } } })
482                }
483            }
484        }).collect();
485        json!({ "role": role, "parts": parts })
486    }).collect();
487
488    let mut body = json!({ "contents": contents });
489    if let Some(system) = &request.system { body["systemInstruction"] = json!({ "parts": [{ "text": system }] }); }
490    if let Some(tools) = &request.tools {
491        let declarations: Vec<serde_json::Value> = tools.iter().map(|t| {
492            json!({ "name": t.name, "description": t.description, "parameters": strip_gemini_unsupported_schema_fields(t.input_schema.clone()) })
493        }).collect();
494        body["tools"] = json!([{ "functionDeclarations": declarations }]);
495    }
496    if let Some(max) = request.max_tokens {
497        body["generationConfig"] = json!({ "maxOutputTokens": max });
498    }
499    body
500}
501
502fn translate_from_anthropic(response: serde_json::Value, model: &str) -> MessageResponse {
503    let mut content = vec![];
504    if let Some(blocks) = response.get("content").and_then(|c| c.as_array()) {
505        for block in blocks {
506            match block.get("type").and_then(|t| t.as_str()) {
507                Some("text") => if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
508                    content.push(OutputContentBlock::Text { text: text.to_string() });
509                },
510                Some("tool_use") => if let (Some(id), Some(name), Some(input)) = (
511                    block.get("id").and_then(|i| i.as_str()),
512                    block.get("name").and_then(|n| n.as_str()),
513                    block.get("input")
514                ) {
515                    content.push(OutputContentBlock::ToolUse { id: id.to_string(), name: name.to_string(), input: input.clone() });
516                },
517                _ => {}
518            }
519        }
520    }
521    let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
522    if let Some(u) = response.get("usage") {
523        usage.input_tokens = u.get("input_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
524        usage.output_tokens = u.get("output_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
525    }
526    MessageResponse {
527        id: response.get("id").and_then(|i| i.as_str()).unwrap_or("anthropic-response").to_string(),
528        kind: "message".to_string(), role: "assistant".to_string(), content, model: model.to_string(),
529        stop_reason: response.get("stop_reason").and_then(|s| s.as_str()).map(|s| s.to_string()),
530        stop_sequence: None, usage, request_id: None,
531    }
532}
533
534fn translate_from_openai(response: serde_json::Value, model: &str) -> MessageResponse {
535    let mut content = vec![];
536    if let Some(choices) = response.get("choices").and_then(|c| c.as_array()) {
537        if let Some(choice) = choices.first() {
538            if let Some(message) = choice.get("message") {
539                if let Some(text) = message.get("content").and_then(|c| c.as_str()) {
540                    content.push(OutputContentBlock::Text { text: text.to_string() });
541                }
542                if let Some(tool_calls) = message.get("tool_calls").and_then(|t| t.as_array()) {
543                    for call in tool_calls {
544                        if let (Some(id), Some(name), Some(args_str)) = (
545                            call.get("id").and_then(|i| i.as_str()),
546                            call.get("function").and_then(|f| f.get("name")).and_then(|n| n.as_str()),
547                            call.get("function").and_then(|f| f.get("arguments")).and_then(|a| a.as_str())
548                        ) {
549                            if let Ok(args) = serde_json::from_str(args_str) {
550                                content.push(OutputContentBlock::ToolUse { id: id.to_string(), name: name.to_string(), input: args });
551                            }
552                        }
553                    }
554                }
555            }
556        }
557    }
558    let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
559    if let Some(u) = response.get("usage") {
560        usage.input_tokens = u.get("prompt_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
561        usage.output_tokens = u.get("completion_tokens").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
562    }
563    MessageResponse {
564        id: response.get("id").and_then(|i| i.as_str()).unwrap_or("openai-response").to_string(),
565        kind: "message".to_string(), role: "assistant".to_string(), content, model: model.to_string(),
566        stop_reason: Some("end_turn".to_string()), stop_sequence: None, usage, request_id: None,
567    }
568}
569
570fn translate_from_gemini(response: serde_json::Value, model: &str) -> MessageResponse {
571    let mut content = vec![];
572    if let Some(candidates) = response.get("candidates").and_then(|c| c.as_array()) {
573        if let Some(candidate) = candidates.first() {
574            if let Some(parts) = candidate.get("content").and_then(|c| c.get("parts")).and_then(|p| p.as_array()) {
575                for part in parts {
576                    if let Some(text) = part.get("text").and_then(|t| t.as_str()) {
577                        content.push(OutputContentBlock::Text { text: text.to_string() });
578                    }
579                    if let Some(call) = part.get("functionCall") {
580                        if let (Some(name), Some(args)) = (call.get("name").and_then(|n| n.as_str()), call.get("args")) {
581                            content.push(OutputContentBlock::ToolUse { id: name.to_string(), name: name.to_string(), input: args.clone() });
582                        }
583                    }
584                }
585            }
586        }
587    }
588    let mut usage = Usage { input_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0, output_tokens: 0 };
589    if let Some(u) = response.get("usageMetadata") {
590        usage.input_tokens = u.get("promptTokenCount").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
591        usage.output_tokens = u.get("candidatesTokenCount").and_then(|c| c.as_u64()).unwrap_or(0) as u32;
592    }
593    MessageResponse {
594        id: "gemini-response".to_string(), kind: "message".to_string(), role: "assistant".to_string(),
595        content, model: model.to_string(), stop_reason: Some("end_turn".to_string()),
596        stop_sequence: None, usage, request_id: None,
597    }
598}
599
600pub fn read_env_non_empty(key: &str) -> Result<Option<String>, ApiError> {
601    match std::env::var(key) {
602        Ok(value) if !value.is_empty() => Ok(Some(value)),
603        Ok(_) | Err(std::env::VarError::NotPresent) => Ok(None),
604        Err(error) => Err(ApiError::from(error)),
605    }
606}
607
608pub fn read_base_url() -> String {
609    std::env::var("TERNLANG_BASE_URL").unwrap_or_else(|_| DEFAULT_BASE_URL.to_string())
610}
611
612fn request_id_from_headers(headers: &reqwest::header::HeaderMap) -> Option<String> {
613    headers
614        .get(REQUEST_ID_HEADER)
615        .or_else(|| headers.get(ALT_REQUEST_ID_HEADER))
616        .and_then(|value| value.to_str().ok())
617        .map(ToOwned::to_owned)
618}
619
620async fn expect_success(response: reqwest::Response) -> Result<reqwest::Response, ApiError> {
621    if response.status().is_success() {
622        return Ok(response);
623    }
624    let status = response.status();
625    let body = response.text().await.unwrap_or_default();
626    Err(ApiError::Auth(format!("HTTP {status}: {body}")))
627}
628
629pub fn resolve_startup_auth_source() -> Result<AuthSource, ApiError> {
630    if let Some(api_key) = read_env_non_empty("TERNLANG_API_KEY")? {
631        return Ok(AuthSource::ApiKey(api_key));
632    }
633    Ok(AuthSource::None)
634}
635
636/// Read the standard env var for `provider` and return the appropriate auth.
637pub fn resolve_auth_for_provider(provider: LlmProvider) -> Result<AuthSource, ApiError> {
638    let key = match provider {
639        LlmProvider::Anthropic => read_env_non_empty("ANTHROPIC_API_KEY")?,
640        LlmProvider::Google => {
641            read_env_non_empty("GEMINI_API_KEY").ok().flatten()
642                .or_else(|| read_env_non_empty("GOOGLE_API_KEY").ok().flatten())
643        }
644        LlmProvider::OpenAi => read_env_non_empty("OPENAI_API_KEY")?,
645        LlmProvider::Xai => read_env_non_empty("XAI_API_KEY")?,
646        LlmProvider::HuggingFace => read_env_non_empty("HUGGINGFACE_API_KEY")?,
647        LlmProvider::Ollama => return Ok(AuthSource::None),
648        _ => read_env_non_empty("TERNLANG_API_KEY")?,
649    };
650    Ok(key.map_or(AuthSource::None, AuthSource::ApiKey))
651}
652
653/// Scan well-known env vars and return the first available (provider, default-model) pair.
654/// Returns None if no recognised key is set (Ollama local is not detected here).
655pub fn detect_provider_and_model_from_env() -> Option<(LlmProvider, &'static str)> {
656    let env_set = |var: &str| std::env::var(var).ok().filter(|v| !v.is_empty()).is_some();
657    if env_set("ANTHROPIC_API_KEY") {
658        return Some((LlmProvider::Anthropic, "claude-sonnet-4-5"));
659    }
660    if env_set("GEMINI_API_KEY") || env_set("GOOGLE_API_KEY") {
661        return Some((LlmProvider::Google, "gemini-2.0-flash"));
662    }
663    if env_set("OPENAI_API_KEY") {
664        return Some((LlmProvider::OpenAi, "gpt-4o-mini"));
665    }
666    if env_set("XAI_API_KEY") {
667        return Some((LlmProvider::Xai, "grok-2-1212"));
668    }
669    if env_set("HUGGINGFACE_API_KEY") {
670        return Some((LlmProvider::HuggingFace, "meta-llama/Meta-Llama-3-8B-Instruct"));
671    }
672    None
673}
674
675#[derive(serde::Deserialize)]
676pub struct OAuthConfig {}