tiycore 0.2.3

Unified LLM API and stateful Agent runtime in Rust
Documentation
//! Zenmux provider (adaptive multi-protocol proxy based on model ID).
//!
//! Zenmux is a multi-protocol proxy that supports:
//! - OpenAI Responses protocol at `https://zenmux.ai/api/v1`
//! - OpenAI-compatible protocol at `https://zenmux.ai/api/v1`
//! - Google Vertex AI protocol at `https://zenmux.ai/api/vertex-ai`
//! - Anthropic Messages protocol at `https://zenmux.ai/api/anthropic/v1`
//!
//! Adaptive routing logic (when base_url is empty or starts with `https://zenmux.ai`):
//! - If the model ID contains "google" or "gemini" (case-insensitive),
//!   routes to Google Vertex AI protocol
//! - If the model ID contains "deepseek", "kimi" or "moonshotai" (case-insensitive),
//!   routes to OpenAI-compatible protocol
//! - If the model ID contains "openai" or "gpt" (case-insensitive),
//!   routes to OpenAI Responses protocol
//! - Otherwise, routes to Anthropic Messages protocol
//!
//! A trailing `:source` suffix on the model ID is ignored for protocol routing.
//! For example, `claude-opus-4.6:google` still routes to Anthropic protocol.
//!
//! When a custom base_url is provided (not empty and not starting with
//! `https://zenmux.ai`), the provider uses OpenAI Completions protocol
//! with the given base_url as-is.
//!
//! API key environment variable: `ZENMUX_API_KEY`

use crate::protocol::LLMProtocol;
use crate::stream::AssistantMessageEventStream;
use crate::types::*;
use async_trait::async_trait;

/// Zenmux base URL prefix used to detect adaptive routing mode.
pub(crate) const ZENMUX_HOST_PREFIX: &str = "https://zenmux.ai";

/// Default OpenAI-family endpoint for Zenmux.
const ZENMUX_OPENAI_BASE_URL: &str = "https://zenmux.ai/api/v1";

/// Default Google Vertex AI endpoint for Zenmux.
const ZENMUX_GOOGLE_BASE_URL: &str = "https://zenmux.ai/api/vertex-ai";

/// Default Anthropic Messages endpoint for Zenmux.
const ZENMUX_ANTHROPIC_BASE_URL: &str = "https://zenmux.ai/api/anthropic/v1";

/// Protocol routing decision for a model.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ProtocolRoute {
    Google,
    OpenAICompatible,
    OpenAIResponses,
    Anthropic,
}

/// Returns the model ID portion used for Zenmux protocol routing.
///
/// Zenmux allows an optional trailing `:source` suffix to select a channel.
/// That suffix must not affect protocol detection, so only the segment before
/// the final `:` is inspected when such a suffix is present.
pub(crate) fn zenmux_routing_model_id(model_id: &str) -> &str {
    model_id
        .rsplit_once(':')
        .map(|(base, _)| base)
        .unwrap_or(model_id)
}

/// Determine the adaptive Zenmux protocol route from a model ID.
pub(crate) fn zenmux_detect_route(model_id: &str) -> ProtocolRoute {
    let lower = zenmux_routing_model_id(model_id).to_ascii_lowercase();
    if lower.contains("google") || lower.contains("gemini") {
        ProtocolRoute::Google
    } else if lower.contains("deepseek") || lower.contains("kimi") || lower.contains("moonshotai") {
        ProtocolRoute::OpenAICompatible
    } else if lower.contains("openai") || lower.contains("gpt") {
        ProtocolRoute::OpenAIResponses
    } else {
        ProtocolRoute::Anthropic
    }
}

/// Map a Zenmux adaptive route to the corresponding API type.
pub(crate) fn zenmux_api_for_route(route: ProtocolRoute) -> Api {
    match route {
        ProtocolRoute::Google => Api::GoogleVertex,
        ProtocolRoute::OpenAICompatible => Api::OpenAICompletions,
        ProtocolRoute::OpenAIResponses => Api::OpenAIResponses,
        ProtocolRoute::Anthropic => Api::AnthropicMessages,
    }
}

/// Determine the adaptive Zenmux API type from a model ID.
pub(crate) fn zenmux_detect_api(model_id: &str) -> Api {
    zenmux_api_for_route(zenmux_detect_route(model_id))
}

/// Zenmux provider (multi-protocol proxy).
pub struct ZenmuxProvider {
    default_api_key: Option<String>,
}

impl ZenmuxProvider {
    /// Create a new Zenmux provider.
    pub fn new() -> Self {
        Self {
            default_api_key: None,
        }
    }

    /// Create a provider with a default API key.
    pub fn with_api_key(api_key: impl Into<String>) -> Self {
        Self {
            default_api_key: Some(api_key.into()),
        }
    }

    /// Resolve API key from options, self, or environment.
    fn resolve_api_key(&self, options: &StreamOptions) -> Option<String> {
        if let Some(ref key) = options.api_key {
            return Some(key.clone());
        }
        if let Some(ref key) = self.default_api_key {
            return Some(key.clone());
        }
        std::env::var("ZENMUX_API_KEY").ok()
    }

    /// Check if adaptive routing should be enabled.
    ///
    /// Resolves the effective base_url (options.base_url > model.base_url),
    /// then returns true when it is None, empty, or starts with `https://zenmux.ai`.
    fn should_adapt(options_base_url: &Option<String>, model_base_url: &Option<String>) -> bool {
        let effective = options_base_url.as_deref().or(model_base_url.as_deref());
        match effective {
            None => true,
            Some(url) => url.is_empty() || url.starts_with(ZENMUX_HOST_PREFIX),
        }
    }

    /// Determine protocol route based on model ID.
    fn detect_route(model_id: &str) -> ProtocolRoute {
        zenmux_detect_route(model_id)
    }
}

impl Default for ZenmuxProvider {
    fn default() -> Self {
        Self::new()
    }
}

#[async_trait]
impl LLMProtocol for ZenmuxProvider {
    fn provider_type(&self) -> Provider {
        Provider::Zenmux
    }

    fn stream(
        &self,
        model: &Model,
        context: &Context,
        options: StreamOptions,
    ) -> AssistantMessageEventStream {
        let mut opts = options;
        if opts.api_key.is_none() {
            opts.api_key = self.resolve_api_key(&opts);
        }

        let mut m = model.clone();

        if Self::should_adapt(&opts.base_url, &m.base_url) {
            // Adaptive mode: choose protocol and endpoint based on model ID.
            // Clear options.base_url so the routed endpoint in model.base_url takes effect.
            opts.base_url = None;
            let route = Self::detect_route(&m.id);
            m.api = Some(zenmux_api_for_route(route));
            match route {
                ProtocolRoute::Google => {
                    m.base_url = Some(ZENMUX_GOOGLE_BASE_URL.to_string());
                    let provider = crate::protocol::google::GoogleProtocol::new();
                    provider.stream(&m, context, opts)
                }
                ProtocolRoute::OpenAICompatible => {
                    m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
                    let provider =
                        crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
                    provider.stream(&m, context, opts)
                }
                ProtocolRoute::OpenAIResponses => {
                    m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
                    let provider =
                        crate::protocol::openai_responses::OpenAIResponsesProtocol::new();
                    provider.stream(&m, context, opts)
                }
                ProtocolRoute::Anthropic => {
                    m.base_url = Some(ZENMUX_ANTHROPIC_BASE_URL.to_string());
                    let provider = crate::protocol::anthropic::AnthropicProtocol::new();
                    provider.stream(&m, context, opts)
                }
            }
        } else {
            // Custom base_url: use OpenAI Completions protocol as-is
            m.api = Some(Api::OpenAICompletions);
            let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
            provider.stream(&m, context, opts)
        }
    }

    fn stream_simple(
        &self,
        model: &Model,
        context: &Context,
        options: SimpleStreamOptions,
    ) -> AssistantMessageEventStream {
        let mut opts = options;
        if opts.base.api_key.is_none() {
            opts.base.api_key = self.resolve_api_key(&opts.base);
        }

        let mut m = model.clone();

        if Self::should_adapt(&opts.base.base_url, &m.base_url) {
            opts.base.base_url = None;
            let route = Self::detect_route(&m.id);
            m.api = Some(zenmux_api_for_route(route));
            match route {
                ProtocolRoute::Google => {
                    m.base_url = Some(ZENMUX_GOOGLE_BASE_URL.to_string());
                    let provider = crate::protocol::google::GoogleProtocol::new();
                    provider.stream_simple(&m, context, opts)
                }
                ProtocolRoute::OpenAICompatible => {
                    m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
                    let provider =
                        crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
                    provider.stream_simple(&m, context, opts)
                }
                ProtocolRoute::OpenAIResponses => {
                    m.base_url = Some(ZENMUX_OPENAI_BASE_URL.to_string());
                    let provider =
                        crate::protocol::openai_responses::OpenAIResponsesProtocol::new();
                    provider.stream_simple(&m, context, opts)
                }
                ProtocolRoute::Anthropic => {
                    m.base_url = Some(ZENMUX_ANTHROPIC_BASE_URL.to_string());
                    let provider = crate::protocol::anthropic::AnthropicProtocol::new();
                    provider.stream_simple(&m, context, opts)
                }
            }
        } else {
            // Custom base_url: use OpenAI Completions protocol as-is
            m.api = Some(Api::OpenAICompletions);
            let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
            provider.stream_simple(&m, context, opts)
        }
    }
}

#[cfg(test)]
mod tests {
    use super::{
        zenmux_api_for_route, zenmux_detect_api, zenmux_detect_route, zenmux_routing_model_id,
        ProtocolRoute,
    };
    use crate::types::Api;

    #[test]
    fn test_zenmux_route_detection_ignores_source_suffix() {
        assert_eq!(
            zenmux_routing_model_id("claude-opus-4.6"),
            "claude-opus-4.6"
        );
        assert_eq!(
            zenmux_detect_route("claude-opus-4.6"),
            ProtocolRoute::Anthropic
        );
        assert_eq!(zenmux_detect_route("gemini-2.5-pro"), ProtocolRoute::Google);
        assert_eq!(
            zenmux_detect_route("gpt-4.1"),
            ProtocolRoute::OpenAIResponses
        );
        assert_eq!(
            zenmux_detect_route("deepseek-r1"),
            ProtocolRoute::OpenAICompatible
        );
        assert_eq!(
            zenmux_detect_route("deepseek-v3-0324"),
            ProtocolRoute::OpenAICompatible
        );
        assert_eq!(
            zenmux_detect_route("kimi-k2.5"),
            ProtocolRoute::OpenAICompatible
        );
        assert_eq!(
            zenmux_detect_route("moonshotai/kimi-k2.5"),
            ProtocolRoute::OpenAICompatible
        );

        assert_eq!(
            zenmux_routing_model_id("claude-opus-4.6:google"),
            "claude-opus-4.6"
        );
        assert_eq!(
            zenmux_detect_route("claude-opus-4.6:google"),
            ProtocolRoute::Anthropic
        );
        assert_eq!(
            zenmux_detect_route("gemini-2.5-pro:openai"),
            ProtocolRoute::Google
        );
        assert_eq!(
            zenmux_detect_route("gpt-4.1:anthropic"),
            ProtocolRoute::OpenAIResponses
        );
        assert_eq!(
            zenmux_detect_route("moonshotai/kimi-k2.5:anthropic"),
            ProtocolRoute::OpenAICompatible
        );
    }

    #[test]
    fn test_zenmux_route_to_api_mapping() {
        assert_eq!(
            zenmux_api_for_route(ProtocolRoute::Google),
            Api::GoogleVertex
        );
        assert_eq!(
            zenmux_api_for_route(ProtocolRoute::OpenAICompatible),
            Api::OpenAICompletions
        );
        assert_eq!(
            zenmux_api_for_route(ProtocolRoute::OpenAIResponses),
            Api::OpenAIResponses
        );
        assert_eq!(
            zenmux_api_for_route(ProtocolRoute::Anthropic),
            Api::AnthropicMessages
        );

        assert_eq!(zenmux_detect_api("gemini-2.5-pro"), Api::GoogleVertex);
        assert_eq!(zenmux_detect_api("gpt-4.1"), Api::OpenAIResponses);
        assert_eq!(zenmux_detect_api("deepseek-r1"), Api::OpenAICompletions);
        assert_eq!(
            zenmux_detect_api("moonshotai/kimi-k2.5"),
            Api::OpenAICompletions
        );
        assert_eq!(zenmux_detect_api("claude-opus-4.6"), Api::AnthropicMessages);
    }
}