use crate::protocol::LLMProtocol;
use crate::stream::AssistantMessageEventStream;
use crate::types::*;
use async_trait::async_trait;
pub(crate) const BAI_HOST_PREFIX: &str = "https://api.b.ai";
const BAI_BASE_URL: &str = "https://api.b.ai/v1";
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum BaiProtocolRoute {
OpenAICompatible,
Anthropic,
}
pub(crate) fn bai_detect_route(model_id: &str) -> BaiProtocolRoute {
let lower = model_id.to_ascii_lowercase();
if lower.contains("claude") {
BaiProtocolRoute::Anthropic
} else {
BaiProtocolRoute::OpenAICompatible
}
}
pub(crate) fn bai_api_for_route(route: BaiProtocolRoute) -> Api {
match route {
BaiProtocolRoute::OpenAICompatible => Api::OpenAICompletions,
BaiProtocolRoute::Anthropic => Api::AnthropicMessages,
}
}
pub(crate) fn bai_detect_api(model_id: &str) -> Api {
bai_api_for_route(bai_detect_route(model_id))
}
pub struct BaiProvider {
default_api_key: Option<String>,
}
impl BaiProvider {
pub fn new() -> Self {
Self {
default_api_key: None,
}
}
pub fn with_api_key(api_key: impl Into<String>) -> Self {
Self {
default_api_key: Some(api_key.into()),
}
}
fn resolve_api_key(&self, options: &StreamOptions) -> Option<String> {
if let Some(ref key) = options.api_key {
return Some(key.clone());
}
if let Some(ref key) = self.default_api_key {
return Some(key.clone());
}
std::env::var("BAI_API_KEY").ok()
}
fn should_adapt(options_base_url: &Option<String>, model_base_url: &Option<String>) -> bool {
let effective = options_base_url.as_deref().or(model_base_url.as_deref());
match effective {
None => true,
Some(url) => url.is_empty() || url.starts_with(BAI_HOST_PREFIX),
}
}
fn detect_route(model_id: &str) -> BaiProtocolRoute {
bai_detect_route(model_id)
}
}
impl Default for BaiProvider {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl LLMProtocol for BaiProvider {
fn provider_type(&self) -> Provider {
Provider::Bai
}
fn stream(
&self,
model: &Model,
context: &Context,
options: StreamOptions,
) -> AssistantMessageEventStream {
let mut opts = options;
if opts.api_key.is_none() {
opts.api_key = self.resolve_api_key(&opts);
}
let mut m = model.clone();
if Self::should_adapt(&opts.base_url, &m.base_url) {
opts.base_url = None;
let route = Self::detect_route(&m.id);
m.api = Some(bai_api_for_route(route));
m.base_url = Some(BAI_BASE_URL.to_string());
match route {
BaiProtocolRoute::OpenAICompatible => {
let provider =
crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream(&m, context, opts)
}
BaiProtocolRoute::Anthropic => {
let provider = crate::protocol::anthropic::AnthropicProtocol::new();
provider.stream(&m, context, opts)
}
}
} else {
m.api = Some(Api::OpenAICompletions);
let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream(&m, context, opts)
}
}
fn stream_simple(
&self,
model: &Model,
context: &Context,
options: SimpleStreamOptions,
) -> AssistantMessageEventStream {
let mut opts = options;
if opts.base.api_key.is_none() {
opts.base.api_key = self.resolve_api_key(&opts.base);
}
let mut m = model.clone();
if Self::should_adapt(&opts.base.base_url, &m.base_url) {
opts.base.base_url = None;
let route = Self::detect_route(&m.id);
m.api = Some(bai_api_for_route(route));
m.base_url = Some(BAI_BASE_URL.to_string());
match route {
BaiProtocolRoute::OpenAICompatible => {
let provider =
crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream_simple(&m, context, opts)
}
BaiProtocolRoute::Anthropic => {
let provider = crate::protocol::anthropic::AnthropicProtocol::new();
provider.stream_simple(&m, context, opts)
}
}
} else {
m.api = Some(Api::OpenAICompletions);
let provider = crate::protocol::openai_completions::OpenAICompletionsProtocol::new();
provider.stream_simple(&m, context, opts)
}
}
}
#[cfg(test)]
mod tests {
use super::{bai_api_for_route, bai_detect_api, bai_detect_route, BaiProtocolRoute};
use crate::types::Api;
#[test]
fn test_bai_route_detection() {
assert_eq!(
bai_detect_route("claude-sonnet-4"),
BaiProtocolRoute::Anthropic
);
assert_eq!(
bai_detect_route("claude-opus-4.6"),
BaiProtocolRoute::Anthropic
);
assert_eq!(
bai_detect_route("Claude-3.5-Sonnet"),
BaiProtocolRoute::Anthropic
);
assert_eq!(
bai_detect_route("CLAUDE-HAIKU"),
BaiProtocolRoute::Anthropic
);
assert_eq!(
bai_detect_route("gpt-4o"),
BaiProtocolRoute::OpenAICompatible
);
assert_eq!(
bai_detect_route("deepseek-r1"),
BaiProtocolRoute::OpenAICompatible
);
assert_eq!(
bai_detect_route("gemini-2.5-pro"),
BaiProtocolRoute::OpenAICompatible
);
assert_eq!(
bai_detect_route(""),
BaiProtocolRoute::OpenAICompatible
);
}
#[test]
fn test_bai_route_to_api_mapping() {
assert_eq!(
bai_api_for_route(BaiProtocolRoute::OpenAICompatible),
Api::OpenAICompletions
);
assert_eq!(
bai_api_for_route(BaiProtocolRoute::Anthropic),
Api::AnthropicMessages
);
assert_eq!(bai_detect_api("gpt-4o"), Api::OpenAICompletions);
assert_eq!(bai_detect_api("claude-sonnet-4"), Api::AnthropicMessages);
}
}