vtcode_core/llm/
client.rs1use super::provider::LLMError;
2use super::providers::{
3 AnthropicProvider, DeepSeekProvider, GeminiProvider, OpenAIProvider, OpenRouterProvider,
4 XAIProvider,
5};
6use super::types::{BackendKind, LLMResponse};
7use crate::config::models::{ModelId, Provider};
8use async_trait::async_trait;
9
10#[async_trait]
12pub trait LLMClient: Send + Sync {
13 async fn generate(&mut self, prompt: &str) -> Result<LLMResponse, LLMError>;
14 fn backend_kind(&self) -> BackendKind;
15 fn model_id(&self) -> &str;
16}
17
18pub type AnyClient = Box<dyn LLMClient>;
20
21pub fn make_client(api_key: String, model: ModelId) -> AnyClient {
23 match model.provider() {
24 Provider::Gemini => Box::new(GeminiProvider::with_model(
25 api_key,
26 model.as_str().to_string(),
27 )),
28 Provider::OpenAI => Box::new(OpenAIProvider::with_model(
29 api_key,
30 model.as_str().to_string(),
31 )),
32 Provider::Anthropic => Box::new(AnthropicProvider::new(api_key)),
33 Provider::DeepSeek => Box::new(DeepSeekProvider::with_model(
34 api_key,
35 model.as_str().to_string(),
36 )),
37 Provider::OpenRouter => Box::new(OpenRouterProvider::with_model(
38 api_key,
39 model.as_str().to_string(),
40 )),
41 Provider::XAI => Box::new(XAIProvider::with_model(api_key, model.as_str().to_string())),
42 }
43}