use std::cell::RefCell;
use std::collections::HashSet;
use super::api::{DeltaSender, LlmRequestPayload, LlmResult};
use crate::value::VmError;
#[allow(dead_code)]
pub(crate) trait LlmProvider {
fn name(&self) -> &str;
fn is_anthropic_style(&self) -> bool {
false
}
fn supports_cache(&self) -> bool {
false
}
fn supports_thinking(&self) -> bool {
false
}
fn is_mock(&self) -> bool {
false
}
fn is_local(&self) -> bool {
false
}
fn requires_model(&self) -> bool {
true
}
fn transform_request(&self, _body: &mut serde_json::Value) {}
fn supports_defer_loading(&self, model: &str) -> bool {
super::capabilities::lookup(self.name(), model).defer_loading
}
fn native_tool_search_variants(&self, model: &str) -> Vec<String> {
super::capabilities::lookup(self.name(), model).tool_search
}
}
#[allow(dead_code)]
pub(crate) trait LlmProviderChat: LlmProvider {
fn chat<'a>(
&'a self,
request: &'a LlmRequestPayload,
delta_tx: Option<DeltaSender>,
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<LlmResult, VmError>> + 'a>>;
}
thread_local! {
static PROVIDER_NAMES: RefCell<HashSet<String>> = RefCell::new(HashSet::new());
}
pub(crate) fn register_default_providers() {
PROVIDER_NAMES.with(|names| {
let mut names = names.borrow_mut();
if !names.is_empty() {
return;
}
names.insert("mock".to_string());
names.insert("anthropic".to_string());
names.insert("ollama".to_string());
for name in [
"openai",
"openrouter",
"together",
"groq",
"deepseek",
"fireworks",
"huggingface",
"local",
"vllm",
"tgi",
"dashscope",
] {
names.insert(name.to_string());
}
});
}
#[allow(dead_code)]
pub(crate) fn register_provider_name(name: &str) {
PROVIDER_NAMES.with(|names| {
names.borrow_mut().insert(name.to_string());
});
}
pub(crate) fn is_provider_registered(name: &str) -> bool {
PROVIDER_NAMES.with(|names| names.borrow().contains(name))
}
#[allow(dead_code)]
pub(crate) fn registered_provider_names() -> Vec<String> {
PROVIDER_NAMES.with(|names| names.borrow().iter().cloned().collect())
}
pub(crate) fn provider_supports_defer_loading(provider: &str, model: &str) -> bool {
super::capabilities::lookup(provider, model).defer_loading
}
pub(crate) fn provider_tool_search_variants(provider: &str, model: &str) -> Vec<String> {
super::capabilities::lookup(provider, model).tool_search
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum NativeToolSearchShape {
Anthropic,
OpenAi,
}