oxify_connect_llm/
errors.rs1use crate::LlmError;
6use std::fmt;
7
8#[derive(Debug, Clone)]
10pub struct ErrorContext {
11 pub operation: String,
13 pub provider: Option<String>,
15 pub model: Option<String>,
17 pub details: Option<String>,
19 pub timestamp: Option<std::time::SystemTime>,
21}
22
23impl ErrorContext {
24 pub fn new(operation: impl Into<String>) -> Self {
26 Self {
27 operation: operation.into(),
28 provider: None,
29 model: None,
30 details: None,
31 timestamp: Some(std::time::SystemTime::now()),
32 }
33 }
34
35 pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
37 self.provider = Some(provider.into());
38 self
39 }
40
41 pub fn with_model(mut self, model: impl Into<String>) -> Self {
43 self.model = Some(model.into());
44 self
45 }
46
47 pub fn with_details(mut self, details: impl Into<String>) -> Self {
49 self.details = Some(details.into());
50 self
51 }
52
53 pub fn format(&self) -> String {
55 let mut msg = format!("Operation: {}", self.operation);
56
57 if let Some(provider) = &self.provider {
58 msg.push_str(&format!(", Provider: {}", provider));
59 }
60
61 if let Some(model) = &self.model {
62 msg.push_str(&format!(", Model: {}", model));
63 }
64
65 if let Some(details) = &self.details {
66 msg.push_str(&format!(", Details: {}", details));
67 }
68
69 msg
70 }
71}
72
73impl fmt::Display for ErrorContext {
74 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
75 write!(f, "{}", self.format())
76 }
77}
78
79#[derive(Debug)]
81pub struct ContextualError {
82 pub error: LlmError,
84 pub context: ErrorContext,
86}
87
88impl ContextualError {
89 pub fn new(error: LlmError, context: ErrorContext) -> Self {
91 Self { error, context }
92 }
93
94 pub fn detailed_message(&self) -> String {
96 format!("{} | Context: {}", self.error, self.context.format())
97 }
98}
99
100impl fmt::Display for ContextualError {
101 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
102 write!(f, "{}", self.detailed_message())
103 }
104}
105
106impl std::error::Error for ContextualError {}
107
108pub trait ErrorContextExt {
110 fn with_context(self, context: ErrorContext) -> ContextualError;
112}
113
114impl ErrorContextExt for LlmError {
115 fn with_context(self, context: ErrorContext) -> ContextualError {
116 ContextualError::new(self, context)
117 }
118}
119
120pub struct ErrorContextBuilder;
122
123impl ErrorContextBuilder {
124 pub fn completion(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
126 ErrorContext::new("LLM Completion")
127 .with_provider(provider)
128 .with_model(model)
129 }
130
131 pub fn embedding(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
133 ErrorContext::new("Embedding Generation")
134 .with_provider(provider)
135 .with_model(model)
136 }
137
138 pub fn streaming(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
140 ErrorContext::new("Streaming Completion")
141 .with_provider(provider)
142 .with_model(model)
143 }
144
145 pub fn initialization(provider: impl Into<String>) -> ErrorContext {
147 ErrorContext::new("Provider Initialization").with_provider(provider)
148 }
149
150 pub fn rate_limit(provider: impl Into<String>) -> ErrorContext {
152 ErrorContext::new("Rate Limit Check").with_provider(provider)
153 }
154
155 pub fn cache(operation: impl Into<String>) -> ErrorContext {
157 ErrorContext::new(format!("Cache {}", operation.into()))
158 }
159}
160
161#[cfg(test)]
162mod tests {
163 use super::*;
164
165 #[test]
166 fn test_error_context_creation() {
167 let context = ErrorContext::new("test operation")
168 .with_provider("openai")
169 .with_model("gpt-4")
170 .with_details("test details");
171
172 assert_eq!(context.operation, "test operation");
173 assert_eq!(context.provider, Some("openai".to_string()));
174 assert_eq!(context.model, Some("gpt-4".to_string()));
175 assert_eq!(context.details, Some("test details".to_string()));
176 }
177
178 #[test]
179 fn test_error_context_format() {
180 let context = ErrorContext::new("test")
181 .with_provider("anthropic")
182 .with_model("claude-3");
183
184 let formatted = context.format();
185 assert!(formatted.contains("test"));
186 assert!(formatted.contains("anthropic"));
187 assert!(formatted.contains("claude-3"));
188 }
189
190 #[test]
191 fn test_contextual_error() {
192 let error = LlmError::ApiError("test error".to_string());
193 let context = ErrorContext::new("test").with_provider("openai");
194 let contextual = error.with_context(context);
195
196 let message = contextual.detailed_message();
197 assert!(message.contains("test error"));
198 assert!(message.contains("openai"));
199 }
200
201 #[test]
202 fn test_error_context_builder_completion() {
203 let context = ErrorContextBuilder::completion("openai", "gpt-4");
204 assert_eq!(context.operation, "LLM Completion");
205 assert_eq!(context.provider, Some("openai".to_string()));
206 assert_eq!(context.model, Some("gpt-4".to_string()));
207 }
208
209 #[test]
210 fn test_error_context_builder_embedding() {
211 let context = ErrorContextBuilder::embedding("openai", "text-embedding-ada-002");
212 assert_eq!(context.operation, "Embedding Generation");
213 }
214
215 #[test]
216 fn test_error_context_builder_streaming() {
217 let context = ErrorContextBuilder::streaming("anthropic", "claude-3");
218 assert_eq!(context.operation, "Streaming Completion");
219 }
220}