llm/providers/openai_compatible/
generic.rs1use async_openai::{Client, config::OpenAIConfig};
2use schemars::Schema;
3
4use crate::provider::get_context_window;
5use crate::tool_schema::normalize_for_moonshot;
6use crate::{Context, LlmError, LlmModel, LlmResponseStream, Result, StreamingModelProvider};
7
8use super::{build_chat_request, create_custom_stream_generic};
9
10pub struct ProviderConfig {
15 pub api_base: &'static str,
16 pub env_var: &'static str,
17 pub default_model: &'static str,
18 pub prefix: &'static str,
19 pub display_name: &'static str,
20 pub tool_schema_transform: Option<fn(&mut Schema)>,
21}
22
23pub const DEEPSEEK: ProviderConfig = ProviderConfig {
24 api_base: "https://api.deepseek.com",
25 env_var: "DEEPSEEK_API_KEY",
26 default_model: "deepseek-chat",
27 prefix: "deepseek",
28 display_name: "DeepSeek",
29 tool_schema_transform: None,
30};
31
32pub const MOONSHOT: ProviderConfig = ProviderConfig {
33 api_base: "https://api.moonshot.ai/v1",
34 env_var: "MOONSHOT_API_KEY",
35 default_model: "moonshot-v1-8k",
36 prefix: "moonshot",
37 display_name: "Moonshot",
38 tool_schema_transform: Some(normalize_for_moonshot),
39};
40
41pub const ZAI: ProviderConfig = ProviderConfig {
42 api_base: "https://api.z.ai/api/coding/paas/v4",
43 env_var: "ZAI_API_KEY",
44 default_model: "GLM-4.6",
45 prefix: "zai",
46 display_name: "Z.ai",
47 tool_schema_transform: None,
48};
49
50pub struct GenericOpenAiProvider {
52 client: Client<OpenAIConfig>,
53 model: String,
54 config: &'static ProviderConfig,
55}
56
57impl GenericOpenAiProvider {
58 pub fn from_env(config: &'static ProviderConfig) -> Result<Self> {
59 let api_key = std::env::var(config.env_var).map_err(|_| LlmError::MissingApiKey(config.env_var.to_string()))?;
60 Ok(Self::new(api_key, config))
61 }
62
63 pub fn new(api_key: String, config: &'static ProviderConfig) -> Self {
64 let openai_config = OpenAIConfig::new().with_api_key(api_key).with_api_base(config.api_base.to_string());
65
66 Self { client: Client::with_config(openai_config), model: config.default_model.to_string(), config }
67 }
68
69 pub fn with_model(mut self, model: &str) -> Self {
70 self.model = model.to_string();
71 self
72 }
73}
74
75impl StreamingModelProvider for GenericOpenAiProvider {
76 fn model(&self) -> Option<LlmModel> {
77 format!("{}:{}", self.config.prefix, self.model).parse().ok()
78 }
79
80 fn context_window(&self) -> Option<u32> {
81 get_context_window(self.config.prefix, &self.model)
82 }
83
84 fn stream_response(&self, context: &Context) -> LlmResponseStream {
85 let request = match build_chat_request(&self.model, context, self.config.tool_schema_transform) {
86 Ok(req) => req,
87 Err(e) => return Box::pin(async_stream::stream! { yield Err(e); }),
88 };
89 create_custom_stream_generic(&self.client, request)
90 }
91
92 fn display_name(&self) -> String {
93 format!("{} ({})", self.config.display_name, self.model)
94 }
95}