use anyhow::Result;
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
pub mod claude;
pub use claude as anthropic;
pub mod candle;
pub mod google;
pub mod grok;
pub mod memory;
pub mod ollama;
pub mod openai;
pub mod openai_compat;
pub mod oauth;
pub mod openrouter;
pub mod server;
pub mod token_store;
pub mod zai;
#[async_trait]
pub trait LlmProvider: Send + Sync {
async fn complete(&self, request: LlmRequest) -> Result<LlmResponse>;
fn name(&self) -> &'static str;
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmRequest {
pub model: String,
pub messages: Vec<LlmMessage>,
pub temperature: Option<f32>,
pub max_tokens: Option<usize>,
pub stream: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmMessage {
pub role: LlmRole,
pub content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum LlmRole {
System,
User,
Assistant,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmResponse {
pub content: String,
pub model: String,
pub usage: Option<LlmUsage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmUsage {
pub prompt_tokens: usize,
pub completion_tokens: usize,
pub total_tokens: usize,
}
pub struct LlmProxy {
pub providers: Vec<Box<dyn LlmProvider>>,
}
impl LlmProxy {
pub fn new() -> Self {
Self {
providers: Vec::new(),
}
}
pub fn add_provider(&mut self, provider: Box<dyn LlmProvider>) {
self.providers.push(provider);
}
pub async fn complete(&self, provider_name: &str, request: LlmRequest) -> Result<LlmResponse> {
for provider in &self.providers {
if provider.name().to_lowercase() == provider_name.to_lowercase() {
return provider.complete(request).await;
}
}
Err(anyhow::anyhow!("Provider '{}' not found", provider_name))
}
}
impl LlmProxy {
pub async fn with_local_detection() -> Self {
let mut proxy = Self::default();
let local_llms = ollama::detect_local_llms().await;
for info in local_llms {
match info.server_type {
ollama::LocalLlmType::Ollama => {
eprintln!("🦙 Detected Ollama with {} model(s)", info.models.len());
proxy.add_provider(Box::new(ollama::OllamaProvider::ollama()));
}
ollama::LocalLlmType::LmStudio => {
eprintln!("🖥️ Detected LM Studio with {} model(s)", info.models.len());
proxy.add_provider(Box::new(ollama::OllamaProvider::lmstudio()));
}
}
}
proxy
}
pub fn list_providers(&self) -> Vec<&'static str> {
self.providers.iter().map(|p| p.name()).collect()
}
}
impl Default for LlmProxy {
fn default() -> Self {
let mut proxy = Self::new();
if std::env::var("OPENAI_API_KEY").is_ok() {
proxy.add_provider(Box::new(openai::OpenAiProvider::default()));
}
if std::env::var("ANTHROPIC_API_KEY").is_ok() {
proxy.add_provider(Box::new(anthropic::AnthropicProvider::default()));
}
if std::env::var("GOOGLE_API_KEY").is_ok() {
proxy.add_provider(Box::new(google::GoogleProvider::default()));
}
if std::env::var("XAI_API_KEY").is_ok() || std::env::var("GROK_API_KEY").is_ok() {
proxy.add_provider(Box::new(grok::GrokProvider::default()));
}
if std::env::var("OPENROUTER_API_KEY").is_ok() {
proxy.add_provider(Box::new(openrouter::OpenRouterProvider::default()));
}
if std::env::var("ZAI_API_KEY").is_ok() || std::env::var("ZHIPU_API_KEY").is_ok() {
proxy.add_provider(Box::new(zai::ZaiProvider::default()));
}
proxy.add_provider(Box::new(candle::CandleProvider::default()));
proxy
}
}