use crate::protocol::LLMProtocol;
use crate::stream::AssistantMessageEventStream;
use crate::types::*;
use async_trait::async_trait;
const ZENMUX_HOST_PREFIX: &str = "https://zenmux.ai";
const ZENMUX_OPENAI_BASE_URL: &str = "https://zenmux.ai/api/v1";
const ZENMUX_GOOGLE_BASE_URL: &str = "https://zenmux.ai/api/vertex-ai";
const ZENMUX_ANTHROPIC_BASE_URL: &str = "https://zenmux.ai/api/anthropic/v1";
enum ProtocolRoute {
Google,
OpenAI,
Anthropic,
}
pub struct ZenmuxProvider {
default_api_key: Option<String>,
}
impl ZenmuxProvider {
pub fn new() -> Self {
Self {
default_api_key: None,
}
}
pub fn with_api_key(api_key: impl Into<String>) -> Self {
Self {
default_api_key: Some(api_key.into()),
}
}
fn resolve_api_key(&self, options: &StreamOptions) -> Option<String> {
if let Some(ref key) = options.api_key {
return Some(key.clone());
}
if let Some(ref key) = self.default_api_key {
return Some(key.clone());
}
std::env::var("ZENMUX_API_KEY").ok()
}
fn should_adapt(options_base_url: &Option<String>, model_base_url: &Option<String>) -> bool {
let effective = options_base_url.as_deref().or(model_base_url.as_deref());
match effective {
None => true,
Some(url) => url.is_empty() || url.starts_with(ZENMUX_HOST_PREFIX),
}
}
fn detect_route(model_id: &str) -> ProtocolRoute {
let lower = model_id.to_lowercase();
if lower.contains("google") || lower.contains("gemini") {
ProtocolRoute::Google
} else if lower.contains("openai") || lower.contains("gpt") {
ProtocolRoute::OpenAI
} else {
ProtocolRoute::Anthropic
}
}
}
impl Default for ZenmuxProvider {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl LLMProtocol for ZenmuxProvider {
fn provider_type(&self) -> Provider {
Provider::Zenmux
}
fn stream(
&self,
model: &Model,
context: &Context,
options: StreamOptions,
) -> AssistantMessageEventStream {
let mut opts = options;
if opts.api_key.is_none() {
opts.api_key = self.resolve_api_key(&opts);
}
let mut m = model.clone();
if Self::should_adapt(&opts.base_url, &m.base_url) {
opts.base_url = None;
match Self::detect_route(&m.id) {
ProtocolRoute::Google => {
m.base_url = Some(ZENMUX_GOOGLE_BASE_URL.to_string());
m.api = Some(Api::GoogleVertex);
let provider = crate::protocol::google::GoogleProtocol::new();
provider.stream(&m, context, opts)
}
ProtocolRoute::OpenAI => {
m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
m.api = Some(Api::OpenAIResponses);
let provider =
crate::protocol::openai_responses::OpenAIResponsesProtocol::new();
provider.stream(&m, context, opts)
}
ProtocolRoute::Anthropic => {
m.base_url = Some(ZENMUX_ANTHROPIC_BASE_URL.to_string());
m.api = Some(Api::AnthropicMessages);
let provider = crate::protocol::anthropic::AnthropicProtocol::new();
provider.stream(&m, context, opts)
}
}
} else {
m.api = Some(Api::OpenAICompletions);
let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream(&m, context, opts)
}
}
fn stream_simple(
&self,
model: &Model,
context: &Context,
options: SimpleStreamOptions,
) -> AssistantMessageEventStream {
let mut opts = options;
if opts.base.api_key.is_none() {
opts.base.api_key = self.resolve_api_key(&opts.base);
}
let mut m = model.clone();
if Self::should_adapt(&opts.base.base_url, &m.base_url) {
opts.base.base_url = None;
match Self::detect_route(&m.id) {
ProtocolRoute::Google => {
m.base_url = Some(ZENMUX_GOOGLE_BASE_URL.to_string());
m.api = Some(Api::GoogleVertex);
let provider = crate::protocol::google::GoogleProtocol::new();
provider.stream_simple(&m, context, opts)
}
ProtocolRoute::OpenAI => {
m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
m.api = Some(Api::OpenAIResponses);
let provider =
crate::protocol::openai_responses::OpenAIResponsesProtocol::new();
provider.stream_simple(&m, context, opts)
}
ProtocolRoute::Anthropic => {
m.base_url = Some(ZENMUX_ANTHROPIC_BASE_URL.to_string());
m.api = Some(Api::AnthropicMessages);
let provider = crate::protocol::anthropic::AnthropicProtocol::new();
provider.stream_simple(&m, context, opts)
}
}
} else {
m.api = Some(Api::OpenAICompletions);
let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream_simple(&m, context, opts)
}
}
}