doum_cli/llm/
client.rs

1use crate::llm::{AnthropicClient, AnthropicConfig, AnthropicSecret, OpenAIClient, OpenAIConfig};
2use crate::system::SecretManager;
3use crate::{llm::OpenAISecret, system::LLMConfig};
4use anyhow::{Context, Result};
5use serde::{Deserialize, Serialize};
6
7/// LLM Message Role
8#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
9#[serde(rename_all = "lowercase")]
10pub enum Role {
11    User,
12    Assistant,
13}
14
15/// LLM Request
16#[derive(Debug, Clone, Serialize, Deserialize)]
17pub struct LLMRequest {
18    pub system: String,
19    pub messages: Vec<Message>,
20    pub use_websearch: bool,
21}
22
23/// LLM Message
24#[derive(Debug, Clone, Serialize, Deserialize)]
25pub struct Message {
26    pub role: Role,
27    pub content: String,
28}
29
30impl Message {
31    /// create user message
32    pub fn user(content: impl Into<String>) -> Self {
33        Self {
34            role: Role::User,
35            content: content.into(),
36        }
37    }
38
39    /// create assistant message
40    pub fn assistant(content: impl Into<String>) -> Self {
41        Self {
42            role: Role::Assistant,
43            content: content.into(),
44        }
45    }
46}
47
48/// LLM Client Trait
49#[async_trait::async_trait]
50pub trait LLMClient: Send + Sync {
51    /// Generate response from LLM
52    async fn generate(&self, request: LLMRequest) -> Result<String>;
53
54    /// Verify LLM client connectivity
55    async fn verify(&self) -> Result<bool> {
56        let test_request = LLMRequest {
57            system: "This is a test, please respond shortly.".to_string(),
58            messages: vec![Message::user("Hello")],
59            use_websearch: false,
60        };
61
62        match self.generate(test_request).await {
63            Ok(_) => Ok(true),
64            Err(_) => Ok(false),
65        }
66    }
67}
68
69/// Verify LLM configuration without creating a persistent client
70pub async fn verify_config(provider: &str, model: &str) -> Result<bool> {
71    let test_config = LLMConfig {
72        provider: provider.to_string(),
73        model: model.to_string(),
74        timeout: 30,
75        max_retries: 3,
76        use_thinking: false,
77        use_web_search: false,
78    };
79
80    let client = create_client(&test_config)?;
81    client.verify().await
82}
83
84/// Create LLM client based on configuration
85pub fn create_client(config: &LLMConfig) -> Result<Box<dyn LLMClient>> {
86    let provider = &config.provider;
87
88    match provider.as_str() {
89        "openai" => {
90            let secret: OpenAISecret =
91                SecretManager::load("openai").context("Failed to load OpenAI secret")?;
92
93            let openai_config = OpenAIConfig {
94                model: config.model.clone(),
95                api_key: secret.api_key,
96                organization: secret.organization,
97                project: secret.project,
98            };
99            let client = OpenAIClient::new(openai_config, config.timeout)?;
100            Ok(Box::new(client))
101        }
102        "anthropic" => {
103            let secret: AnthropicSecret =
104                SecretManager::load("anthropic").context("Failed to load Anthropic secret")?;
105
106            let anthropic_config = AnthropicConfig {
107                model: config.model.clone(),
108                api_key: secret.api_key,
109            };
110            let client = AnthropicClient::new(anthropic_config, config.timeout)?;
111            Ok(Box::new(client))
112        }
113        _ => anyhow::bail!("Unknown provider: {}", provider),
114    }
115}