oxify_connect_llm/
errors.rs

1//! Enhanced error types with rich context
2//!
3//! This module provides error utilities with detailed context to help with debugging.
4
5use crate::LlmError;
6use std::fmt;
7
8/// Error context for better debugging
9#[derive(Debug, Clone)]
10pub struct ErrorContext {
11    /// The operation that failed
12    pub operation: String,
13    /// The provider that was being used
14    pub provider: Option<String>,
15    /// The model that was being used
16    pub model: Option<String>,
17    /// Additional context
18    pub details: Option<String>,
19    /// Timestamp when the error occurred
20    pub timestamp: Option<std::time::SystemTime>,
21}
22
23impl ErrorContext {
24    /// Create a new error context
25    pub fn new(operation: impl Into<String>) -> Self {
26        Self {
27            operation: operation.into(),
28            provider: None,
29            model: None,
30            details: None,
31            timestamp: Some(std::time::SystemTime::now()),
32        }
33    }
34
35    /// Set the provider
36    pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
37        self.provider = Some(provider.into());
38        self
39    }
40
41    /// Set the model
42    pub fn with_model(mut self, model: impl Into<String>) -> Self {
43        self.model = Some(model.into());
44        self
45    }
46
47    /// Set additional details
48    pub fn with_details(mut self, details: impl Into<String>) -> Self {
49        self.details = Some(details.into());
50        self
51    }
52
53    /// Format the context as a string
54    pub fn format(&self) -> String {
55        let mut msg = format!("Operation: {}", self.operation);
56
57        if let Some(provider) = &self.provider {
58            msg.push_str(&format!(", Provider: {}", provider));
59        }
60
61        if let Some(model) = &self.model {
62            msg.push_str(&format!(", Model: {}", model));
63        }
64
65        if let Some(details) = &self.details {
66            msg.push_str(&format!(", Details: {}", details));
67        }
68
69        msg
70    }
71}
72
73impl fmt::Display for ErrorContext {
74    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
75        write!(f, "{}", self.format())
76    }
77}
78
79/// Enhanced error with context
80#[derive(Debug)]
81pub struct ContextualError {
82    /// The underlying error
83    pub error: LlmError,
84    /// The context
85    pub context: ErrorContext,
86}
87
88impl ContextualError {
89    /// Create a new contextual error
90    pub fn new(error: LlmError, context: ErrorContext) -> Self {
91        Self { error, context }
92    }
93
94    /// Get a detailed error message
95    pub fn detailed_message(&self) -> String {
96        format!("{} | Context: {}", self.error, self.context.format())
97    }
98}
99
100impl fmt::Display for ContextualError {
101    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
102        write!(f, "{}", self.detailed_message())
103    }
104}
105
106impl std::error::Error for ContextualError {}
107
108/// Extension trait for adding context to errors
109pub trait ErrorContextExt {
110    /// Add context to an error
111    fn with_context(self, context: ErrorContext) -> ContextualError;
112}
113
114impl ErrorContextExt for LlmError {
115    fn with_context(self, context: ErrorContext) -> ContextualError {
116        ContextualError::new(self, context)
117    }
118}
119
120/// Helper to create common error contexts
121pub struct ErrorContextBuilder;
122
123impl ErrorContextBuilder {
124    /// Context for completion requests
125    pub fn completion(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
126        ErrorContext::new("LLM Completion")
127            .with_provider(provider)
128            .with_model(model)
129    }
130
131    /// Context for embedding requests
132    pub fn embedding(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
133        ErrorContext::new("Embedding Generation")
134            .with_provider(provider)
135            .with_model(model)
136    }
137
138    /// Context for streaming requests
139    pub fn streaming(provider: impl Into<String>, model: impl Into<String>) -> ErrorContext {
140        ErrorContext::new("Streaming Completion")
141            .with_provider(provider)
142            .with_model(model)
143    }
144
145    /// Context for provider initialization
146    pub fn initialization(provider: impl Into<String>) -> ErrorContext {
147        ErrorContext::new("Provider Initialization").with_provider(provider)
148    }
149
150    /// Context for rate limiting
151    pub fn rate_limit(provider: impl Into<String>) -> ErrorContext {
152        ErrorContext::new("Rate Limit Check").with_provider(provider)
153    }
154
155    /// Context for caching
156    pub fn cache(operation: impl Into<String>) -> ErrorContext {
157        ErrorContext::new(format!("Cache {}", operation.into()))
158    }
159}
160
161#[cfg(test)]
162mod tests {
163    use super::*;
164
165    #[test]
166    fn test_error_context_creation() {
167        let context = ErrorContext::new("test operation")
168            .with_provider("openai")
169            .with_model("gpt-4")
170            .with_details("test details");
171
172        assert_eq!(context.operation, "test operation");
173        assert_eq!(context.provider, Some("openai".to_string()));
174        assert_eq!(context.model, Some("gpt-4".to_string()));
175        assert_eq!(context.details, Some("test details".to_string()));
176    }
177
178    #[test]
179    fn test_error_context_format() {
180        let context = ErrorContext::new("test")
181            .with_provider("anthropic")
182            .with_model("claude-3");
183
184        let formatted = context.format();
185        assert!(formatted.contains("test"));
186        assert!(formatted.contains("anthropic"));
187        assert!(formatted.contains("claude-3"));
188    }
189
190    #[test]
191    fn test_contextual_error() {
192        let error = LlmError::ApiError("test error".to_string());
193        let context = ErrorContext::new("test").with_provider("openai");
194        let contextual = error.with_context(context);
195
196        let message = contextual.detailed_message();
197        assert!(message.contains("test error"));
198        assert!(message.contains("openai"));
199    }
200
201    #[test]
202    fn test_error_context_builder_completion() {
203        let context = ErrorContextBuilder::completion("openai", "gpt-4");
204        assert_eq!(context.operation, "LLM Completion");
205        assert_eq!(context.provider, Some("openai".to_string()));
206        assert_eq!(context.model, Some("gpt-4".to_string()));
207    }
208
209    #[test]
210    fn test_error_context_builder_embedding() {
211        let context = ErrorContextBuilder::embedding("openai", "text-embedding-ada-002");
212        assert_eq!(context.operation, "Embedding Generation");
213    }
214
215    #[test]
216    fn test_error_context_builder_streaming() {
217        let context = ErrorContextBuilder::streaming("anthropic", "claude-3");
218        assert_eq!(context.operation, "Streaming Completion");
219    }
220}