Skip to main content

llm/providers/openai_compatible/
generic.rs

1use async_openai::{Client, config::OpenAIConfig};
2
3use crate::provider::get_context_window;
4use crate::{Context, LlmError, LlmModel, LlmResponseStream, Result, StreamingModelProvider};
5
6use super::{build_chat_request, create_custom_stream_generic};
7
8/// Configuration for an OpenAI-compatible provider.
9///
10/// Each provider that uses the standard `build_chat_request → create_custom_stream_generic`
11/// flow differs only in these constants.
12pub struct ProviderConfig {
13    pub api_base: &'static str,
14    pub env_var: &'static str,
15    pub default_model: &'static str,
16    pub prefix: &'static str,
17    pub display_name: &'static str,
18}
19
20pub const DEEPSEEK: ProviderConfig = ProviderConfig {
21    api_base: "https://api.deepseek.com",
22    env_var: "DEEPSEEK_API_KEY",
23    default_model: "deepseek-chat",
24    prefix: "deepseek",
25    display_name: "DeepSeek",
26};
27
28pub const MOONSHOT: ProviderConfig = ProviderConfig {
29    api_base: "https://api.moonshot.ai/v1",
30    env_var: "MOONSHOT_API_KEY",
31    default_model: "moonshot-v1-8k",
32    prefix: "moonshot",
33    display_name: "Moonshot",
34};
35
36pub const ZAI: ProviderConfig = ProviderConfig {
37    api_base: "https://api.z.ai/api/coding/paas/v4",
38    env_var: "ZAI_API_KEY",
39    default_model: "GLM-4.6",
40    prefix: "zai",
41    display_name: "Z.ai",
42};
43
44/// A generic provider for APIs that are fully OpenAI-compatible.
45pub struct GenericOpenAiProvider {
46    client: Client<OpenAIConfig>,
47    model: String,
48    config: &'static ProviderConfig,
49}
50
51impl GenericOpenAiProvider {
52    pub fn from_env(config: &'static ProviderConfig) -> Result<Self> {
53        let api_key = std::env::var(config.env_var).map_err(|_| LlmError::MissingApiKey(config.env_var.to_string()))?;
54        Ok(Self::new(api_key, config))
55    }
56
57    pub fn new(api_key: String, config: &'static ProviderConfig) -> Self {
58        let openai_config = OpenAIConfig::new().with_api_key(api_key).with_api_base(config.api_base.to_string());
59
60        Self { client: Client::with_config(openai_config), model: config.default_model.to_string(), config }
61    }
62
63    pub fn with_model(mut self, model: &str) -> Self {
64        self.model = model.to_string();
65        self
66    }
67}
68
69impl StreamingModelProvider for GenericOpenAiProvider {
70    fn model(&self) -> Option<LlmModel> {
71        format!("{}:{}", self.config.prefix, self.model).parse().ok()
72    }
73
74    fn context_window(&self) -> Option<u32> {
75        get_context_window(self.config.prefix, &self.model)
76    }
77
78    fn stream_response(&self, context: &Context) -> LlmResponseStream {
79        let request = match build_chat_request(&self.model, context) {
80            Ok(req) => req,
81            Err(e) => return Box::pin(async_stream::stream! { yield Err(e); }),
82        };
83        create_custom_stream_generic(&self.client, request)
84    }
85
86    fn display_name(&self) -> String {
87        format!("{} ({})", self.config.display_name, self.model)
88    }
89}