1use anyhow::Result;
9use async_trait::async_trait;
10use serde::{Deserialize, Serialize};
11
12pub mod claude;
13pub use claude as anthropic;
15pub mod candle;
16pub mod google;
17pub mod grok;
18pub mod memory;
19pub mod ollama;
20pub mod openai;
21pub mod openai_compat;
22pub mod oauth;
23pub mod openrouter;
24pub mod server;
25pub mod token_store;
26pub mod zai;
27
28#[async_trait]
30pub trait LlmProvider: Send + Sync {
31 async fn complete(&self, request: LlmRequest) -> Result<LlmResponse>;
33
34 fn name(&self) -> &'static str;
36}
37
38#[derive(Debug, Clone, Serialize, Deserialize)]
40pub struct LlmRequest {
41 pub model: String,
42 pub messages: Vec<LlmMessage>,
43 pub temperature: Option<f32>,
44 pub max_tokens: Option<usize>,
45 pub stream: bool,
46}
47
48#[derive(Debug, Clone, Serialize, Deserialize)]
50pub struct LlmMessage {
51 pub role: LlmRole,
52 pub content: String,
53}
54
55#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
57#[serde(rename_all = "lowercase")]
58pub enum LlmRole {
59 System,
60 User,
61 Assistant,
62}
63
64#[derive(Debug, Clone, Serialize, Deserialize)]
66pub struct LlmResponse {
67 pub content: String,
68 pub model: String,
69 pub usage: Option<LlmUsage>,
70}
71
72#[derive(Debug, Clone, Serialize, Deserialize)]
74pub struct LlmUsage {
75 pub prompt_tokens: usize,
76 pub completion_tokens: usize,
77 pub total_tokens: usize,
78}
79
80pub struct LlmProxy {
82 pub providers: Vec<Box<dyn LlmProvider>>,
83}
84
85impl LlmProxy {
86 pub fn new() -> Self {
87 Self {
88 providers: Vec::new(),
89 }
90 }
91
92 pub fn add_provider(&mut self, provider: Box<dyn LlmProvider>) {
93 self.providers.push(provider);
94 }
95
96 pub async fn complete(&self, provider_name: &str, request: LlmRequest) -> Result<LlmResponse> {
97 for provider in &self.providers {
98 if provider.name().to_lowercase() == provider_name.to_lowercase() {
99 return provider.complete(request).await;
100 }
101 }
102 Err(anyhow::anyhow!("Provider '{}' not found", provider_name))
103 }
104}
105
106impl LlmProxy {
107 pub async fn with_local_detection() -> Self {
110 let mut proxy = Self::default();
111
112 let local_llms = ollama::detect_local_llms().await;
114 for info in local_llms {
115 match info.server_type {
116 ollama::LocalLlmType::Ollama => {
117 eprintln!("🦙 Detected Ollama with {} model(s)", info.models.len());
118 proxy.add_provider(Box::new(ollama::OllamaProvider::ollama()));
119 }
120 ollama::LocalLlmType::LmStudio => {
121 eprintln!("🖥️ Detected LM Studio with {} model(s)", info.models.len());
122 proxy.add_provider(Box::new(ollama::OllamaProvider::lmstudio()));
123 }
124 }
125 }
126
127 proxy
128 }
129
130 pub fn list_providers(&self) -> Vec<&'static str> {
132 self.providers.iter().map(|p| p.name()).collect()
133 }
134}
135
136impl Default for LlmProxy {
137 fn default() -> Self {
138 let mut proxy = Self::new();
139
140 if std::env::var("OPENAI_API_KEY").is_ok() {
142 proxy.add_provider(Box::new(openai::OpenAiProvider::default()));
143 }
144
145 if std::env::var("ANTHROPIC_API_KEY").is_ok() {
146 proxy.add_provider(Box::new(anthropic::AnthropicProvider::default()));
147 }
148
149 if std::env::var("GOOGLE_API_KEY").is_ok() {
150 proxy.add_provider(Box::new(google::GoogleProvider::default()));
151 }
152
153 if std::env::var("XAI_API_KEY").is_ok() || std::env::var("GROK_API_KEY").is_ok() {
155 proxy.add_provider(Box::new(grok::GrokProvider::default()));
156 }
157
158 if std::env::var("OPENROUTER_API_KEY").is_ok() {
160 proxy.add_provider(Box::new(openrouter::OpenRouterProvider::default()));
161 }
162
163 if std::env::var("ZAI_API_KEY").is_ok() || std::env::var("ZHIPU_API_KEY").is_ok() {
165 proxy.add_provider(Box::new(zai::ZaiProvider::default()));
166 }
167
168 proxy.add_provider(Box::new(candle::CandleProvider::default()));
170
171 proxy
172 }
173}