1use crate::llm::{Client, Model, Response};
2use crate::settings::LlmBackendSettings;
3use anyhow::Result;
4use llm_connector::types::Tool;
5use llm_connector::StreamFormat;
6use tokio_stream::wrappers::UnboundedReceiverStream;
7
8pub struct Service {
16 client: Client,
17 model: String,
18}
19
20impl Service {
21 pub fn new(config: &LlmBackendSettings) -> Result<Self> {
23 let client = Client::new(config)?;
24 let model = match config {
25 LlmBackendSettings::OpenAI { model, .. } => model.clone(),
26 LlmBackendSettings::Anthropic { model, .. } => model.clone(),
27 LlmBackendSettings::Ollama { model, .. } => model.clone(),
28 LlmBackendSettings::Aliyun { model, .. } => model.clone(),
29 LlmBackendSettings::Zhipu { model, .. } => model.clone(),
30 LlmBackendSettings::Volcengine { model, .. } => model.clone(),
31 LlmBackendSettings::Tencent { model, .. } => model.clone(),
32 LlmBackendSettings::Longcat { model, .. } => model.clone(),
33 };
34
35 Ok(Self { client, model })
36 }
37
38 pub async fn chat(
42 &self,
43 model: Option<&str>,
44 messages: Vec<llm_connector::types::Message>,
45 tools: Option<Vec<Tool>>,
46 ) -> Result<Response> {
47 let model = model.unwrap_or(&self.model);
48 self.client.chat(model, messages, tools).await
49 }
50
51 pub async fn chat_stream_ollama(
55 &self,
56 model: Option<&str>,
57 messages: Vec<llm_connector::types::Message>,
58 format: StreamFormat,
59 ) -> Result<UnboundedReceiverStream<String>> {
60 let model = model.unwrap_or(&self.model);
61 self.client.chat_stream_with_format(model, messages, format).await
62 }
63
64 pub async fn chat_stream_openai(
68 &self,
69 model: Option<&str>,
70 messages: Vec<llm_connector::types::Message>,
71 tools: Option<Vec<Tool>>,
72 format: StreamFormat,
73 ) -> Result<UnboundedReceiverStream<String>> {
74 let model = model.unwrap_or(&self.model);
75 self.client.chat_stream_openai(model, messages, tools, format).await
76 }
77
78 pub async fn list_models(&self) -> Result<Vec<Model>> {
80 self.client.list_models().await
81 }
82
83 pub async fn validate_model(&self, model: &str) -> Result<bool> {
85 let available_models = self.client.list_models().await?;
86 Ok(available_models.iter().any(|m| m.id == model))
87 }
88}