doum_cli/llm/
client.rs

1use crate::llm::{
2    AnthropicClient, AnthropicConfig, AnthropicSecret, OpenAIClient, OpenAIConfig, Provider,
3};
4use crate::system::SecretManager;
5use crate::{llm::OpenAISecret, system::LLMConfig};
6use anyhow::{Context, Result};
7use serde::{Deserialize, Serialize};
8
9/// LLM Message Role
10#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
11#[serde(rename_all = "lowercase")]
12pub enum Role {
13    User,
14    Assistant,
15}
16
17/// LLM Request
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LLMRequest {
20    pub system: String,
21    pub messages: Vec<Message>,
22    pub use_websearch: bool,
23}
24
25/// LLM Message
26#[derive(Debug, Clone, Serialize, Deserialize)]
27pub struct Message {
28    pub role: Role,
29    pub content: String,
30}
31
32impl Message {
33    /// create user message
34    pub fn user(content: impl Into<String>) -> Self {
35        Self {
36            role: Role::User,
37            content: content.into(),
38        }
39    }
40
41    /// create assistant message
42    pub fn assistant(content: impl Into<String>) -> Self {
43        Self {
44            role: Role::Assistant,
45            content: content.into(),
46        }
47    }
48}
49
50/// LLM Client Trait
51#[async_trait::async_trait]
52pub trait LLMClient: Send + Sync {
53    /// Generate response from LLM
54    async fn generate(&self, request: LLMRequest) -> Result<String>;
55
56    /// Verify LLM client connectivity
57    async fn verify(&self) -> Result<bool> {
58        let test_request = LLMRequest {
59            system: "This is a test, please respond shortly.".to_string(),
60            messages: vec![Message::user("Hello")],
61            use_websearch: false,
62        };
63
64        match self.generate(test_request).await {
65            Ok(_) => Ok(true),
66            Err(_) => Ok(false),
67        }
68    }
69}
70
71/// Verify LLM configuration without creating a persistent client
72pub async fn verify_config(provider: &str, model: &str) -> Result<bool> {
73    let test_config = LLMConfig {
74        provider: provider.to_string(),
75        model: model.to_string(),
76        timeout: 30,
77        max_retries: 3,
78        use_thinking: false,
79        use_web_search: false,
80    };
81
82    let client = create_client(&test_config)?;
83    client.verify().await
84}
85
86/// Create LLM client based on configuration
87pub fn create_client(config: &LLMConfig) -> Result<Box<dyn LLMClient>> {
88    let provider: Provider = config.provider.parse()?;
89
90    match provider {
91        Provider::OpenAI => {
92            let secret: OpenAISecret =
93                SecretManager::load(provider.as_str()).context("Failed to load OpenAI secret")?;
94
95            let openai_config = OpenAIConfig {
96                model: config.model.clone(),
97                api_key: secret.api_key,
98                organization: secret.organization,
99                project: secret.project,
100            };
101            let client = OpenAIClient::new(openai_config, config.timeout)?;
102            Ok(Box::new(client))
103        }
104        Provider::Anthropic => {
105            let secret: AnthropicSecret = SecretManager::load(provider.as_str())
106                .context("Failed to load Anthropic secret")?;
107
108            let anthropic_config = AnthropicConfig {
109                model: config.model.clone(),
110                api_key: secret.api_key,
111            };
112            let client = AnthropicClient::new(anthropic_config, config.timeout)?;
113            Ok(Box::new(client))
114        }
115    }
116}