1use crate::system::error::Result;
2use crate::system::LLMConfig;
3use serde::{Deserialize, Serialize};
4
5#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
7#[serde(rename_all = "lowercase")]
8pub enum Role {
9 User,
10 Assistant,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct LLMRequest {
16 pub system: String,
17 pub messages: Vec<Message>,
18 pub use_websearch: bool,
19}
20
21#[derive(Debug, Clone, Serialize, Deserialize)]
23pub struct Message {
24 pub role: Role,
25 pub content: String,
26}
27
28impl Message {
29 pub fn user(content: impl Into<String>) -> Self {
31 Self {
32 role: Role::User,
33 content: content.into(),
34 }
35 }
36
37 pub fn assistant(content: impl Into<String>) -> Self {
39 Self {
40 role: Role::Assistant,
41 content: content.into(),
42 }
43 }
44}
45
46#[async_trait::async_trait]
48pub trait LLMClient: Send + Sync {
49 async fn generate(&self, request: LLMRequest) -> Result<String>;
51
52 async fn verify(&self) -> Result<bool> {
54 let test_request = LLMRequest {
56 system: "This is a test, please respond shortly.".to_string(),
57 messages: vec![Message::user("Hello")],
58 use_websearch: false,
59 };
60
61 match self.generate(test_request).await {
62 Ok(_) => Ok(true),
63 Err(_) => Ok(false),
64 }
65 }
66}
67
68pub fn create_client(config: &LLMConfig) -> Result<Box<dyn LLMClient>> {
70 let provider_config = config.get_current_provider()?;
71
72 match provider_config {
73 crate::system::ProviderConfig::Openai(openai_config) => {
74 let client = crate::llm::openai::OpenAIClient::new(
75 openai_config.clone(),
76 config.timeout
77 )?;
78 Ok(Box::new(client))
79 }
80 crate::system::ProviderConfig::Anthropic(anthropic_config) => {
81 let client = crate::llm::anthropic::AnthropicClient::new(
82 anthropic_config.clone(),
83 config.timeout
84 )?;
85 Ok(Box::new(client))
86 }
87 }
88}