1mod chat;
2mod models;
3mod stream;
4mod types;
5
6pub use types::{Model, Response};
7
8use crate::models::ModelsConfig;
9use crate::settings::LlmBackendSettings;
10use anyhow::Result;
11use llm_connector::LlmClient;
12
13pub struct Client {
15 backend: LlmBackendSettings,
16 llm_client: LlmClient,
17 models_config: ModelsConfig,
18}
19
20impl Client {
21 pub fn new(config: &LlmBackendSettings) -> Result<Self> {
23 let llm_client = match config {
24 LlmBackendSettings::OpenAI {
25 api_key, base_url, ..
26 } => {
27 if let Some(base_url) = base_url {
29 LlmClient::openai_compatible(api_key, base_url, "openai")?
30 } else {
31 LlmClient::openai(api_key)?
32 }
33 }
34 LlmBackendSettings::Anthropic { api_key, .. } => {
35 LlmClient::anthropic(api_key)?
37 }
38 LlmBackendSettings::Aliyun { api_key, .. } => LlmClient::aliyun(api_key)?,
39 LlmBackendSettings::Zhipu { api_key, .. } => {
40 LlmClient::zhipu_openai_compatible(api_key)?
42 }
43 LlmBackendSettings::Volcengine { api_key, .. } => LlmClient::volcengine(api_key)?,
44 LlmBackendSettings::Tencent { api_key, .. } => LlmClient::tencent(api_key)?,
45 LlmBackendSettings::Longcat { api_key, .. } => {
46 LlmClient::openai_compatible(api_key, "https://api.longcat.chat/v1", "longcat")?
48 }
49 LlmBackendSettings::Ollama { base_url, .. } => {
50 if base_url.is_some() {
51 LlmClient::ollama()?
54 } else {
55 LlmClient::ollama()?
56 }
57 }
58 };
59
60 let models_config = ModelsConfig::load_with_fallback();
62
63 Ok(Self {
64 backend: config.clone(),
65 llm_client,
66 models_config,
67 })
68 }
69}