1use crate::llm::{
2 AnthropicClient, AnthropicConfig, AnthropicSecret, OpenAIClient, OpenAIConfig, Provider,
3};
4use crate::system::SecretManager;
5use crate::{llm::OpenAISecret, system::LLMConfig};
6use anyhow::{Context, Result};
7use serde::{Deserialize, Serialize};
8
9#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
11#[serde(rename_all = "lowercase")]
12pub enum LLMRole {
13 User,
14 Assistant,
15}
16
17#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LLMRequest {
20 pub system: String,
21 pub messages: Vec<LLMMessage>,
22}
23
24#[derive(Debug, Clone, Serialize, Deserialize)]
26pub struct LLMMessage {
27 pub role: LLMRole,
28 pub content: String,
29}
30
31impl LLMMessage {
32 pub fn user(content: impl Into<String>) -> Self {
34 Self {
35 role: LLMRole::User,
36 content: content.into(),
37 }
38 }
39
40 pub fn assistant(content: impl Into<String>) -> Self {
42 Self {
43 role: LLMRole::Assistant,
44 content: content.into(),
45 }
46 }
47}
48
49#[async_trait::async_trait]
51pub trait LLMClient: Send + Sync {
52 async fn generate(&self, request: LLMRequest) -> Result<String>;
54}
55
56pub enum Client {
58 OpenAI(OpenAIClient),
59 Anthropic(AnthropicClient),
60}
61
62impl Client {
63 pub async fn generate(&self, request: LLMRequest) -> Result<String> {
65 match self {
66 Client::OpenAI(client) => client.generate(request).await,
67 Client::Anthropic(client) => client.generate(request).await,
68 }
69 }
70
71 pub async fn generate_with_parser<T, P>(&self, request: LLMRequest, parser: P) -> Result<T>
73 where
74 P: Fn(&str) -> Result<T>,
75 {
76 const MAX_RETRIES: u32 = 3;
77
78 let response = self.generate(request).await?;
80
81 for attempt in 1..=MAX_RETRIES {
83 match parser(&response) {
84 Ok(parsed) => return Ok(parsed),
85 Err(e) => {
86 if attempt < MAX_RETRIES {
87 tracing::warn!(
88 "Parse failed (attempt {}/{}): Retrying...",
89 attempt,
90 MAX_RETRIES
91 );
92 continue;
93 } else {
94 tracing::error!("All parse retry attempts exhausted.");
95 return Err(e);
96 }
97 }
98 }
99 }
100
101 unreachable!()
102 }
103}
104
105pub async fn verify_client(config: &LLMConfig) -> Result<bool> {
107 let client = create_client(config)?;
108 let request = LLMRequest {
109 system: "This is a test, please respond shortly.".to_string(),
110 messages: vec![LLMMessage::user("Hello")],
111 };
112
113 match client.generate(request).await {
114 Ok(_) => Ok(true),
115 Err(_) => Ok(false),
116 }
117}
118
119pub fn create_client(config: &LLMConfig) -> Result<Client> {
121 match config.provider {
122 Provider::OpenAI => {
123 let secret: OpenAISecret =
124 SecretManager::load(&config.provider).context("Failed to load OpenAI secret")?;
125
126 let openai_config = OpenAIConfig {
127 model: config.model.clone(),
128 api_key: secret.api_key,
129 organization: secret.organization,
130 project: secret.project,
131 };
132 let client = OpenAIClient::new(openai_config, config.timeout)?;
133 Ok(Client::OpenAI(client))
134 }
135 Provider::Anthropic => {
136 let secret: AnthropicSecret =
137 SecretManager::load(&config.provider).context("Failed to load Anthropic secret")?;
138
139 let anthropic_config = AnthropicConfig {
140 model: config.model.clone(),
141 api_key: secret.api_key,
142 };
143 let client = AnthropicClient::new(anthropic_config, config.timeout)?;
144 Ok(Client::Anthropic(client))
145 }
146 }
147}