1use serde::{Deserialize, Serialize};
7use std::fmt;
8
9#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct UserFacingError {
12 pub summary: String,
14 pub message: String,
16 pub suggestion: String,
18 pub category: ErrorCategory,
20 pub recoverable: bool,
22}
23
24#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
26pub enum ErrorCategory {
27 Connection,
29 Auth,
31 Config,
33 NotFound,
35 Temporary,
37 Internal,
39}
40
41#[derive(Debug)]
43pub enum ModelError {
44 Backend(BackendError),
46
47 Config(ConfigError),
49
50 ModelNotFound { model: String, searched: Vec<String> },
52
53 Timeout { operation: String, duration_secs: u64 },
55
56 RateLimit { retry_after: Option<u64> },
58
59 InvalidRequest(String),
61
62 ParseError { message: String, raw: Option<String> },
64
65 StreamError(String),
67
68 Authentication(String),
70}
71
72impl fmt::Display for ModelError {
73 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
74 match self {
75 ModelError::Backend(e) => write!(f, "Backend error: {}", e),
76 ModelError::Config(e) => write!(f, "Configuration error: {}", e),
77 ModelError::ModelNotFound { model, searched } => {
78 write!(f, "Model '{}' not found. Searched: {}", model, searched.join(", "))
79 }
80 ModelError::Timeout { operation, duration_secs } => {
81 write!(f, "Operation '{}' timed out after {} seconds", operation, duration_secs)
82 }
83 ModelError::RateLimit { retry_after } => {
84 if let Some(secs) = retry_after {
85 write!(f, "Rate limit exceeded. Retry after {} seconds", secs)
86 } else {
87 write!(f, "Rate limit exceeded")
88 }
89 }
90 ModelError::InvalidRequest(msg) => write!(f, "Invalid request: {}", msg),
91 ModelError::ParseError { message, raw } => {
92 if let Some(r) = raw {
93 write!(f, "Parse error: {} (raw: {})", message, r)
94 } else {
95 write!(f, "Parse error: {}", message)
96 }
97 }
98 ModelError::StreamError(msg) => write!(f, "Stream error: {}", msg),
99 ModelError::Authentication(msg) => write!(f, "Authentication error: {}", msg),
100 }
101 }
102}
103
104impl std::error::Error for ModelError {}
105
106impl ModelError {
107 pub fn to_user_facing(&self) -> UserFacingError {
109 match self {
110 ModelError::Backend(BackendError::ConnectionFailed { backend, url, .. }) => {
111 UserFacingError {
112 summary: format!("{} connection failed", backend),
113 message: format!("Could not connect to {} at {}", backend, url),
114 suggestion: if backend == "ollama" {
115 "Run 'ollama serve' to start Ollama, or check if it's running on the correct port".to_string()
116 } else {
117 format!("Check if {} is running and accessible", backend)
118 },
119 category: ErrorCategory::Connection,
120 recoverable: true,
121 }
122 }
123 ModelError::Backend(BackendError::NotAvailable { backend, reason }) => {
124 UserFacingError {
125 summary: format!("{} unavailable", backend),
126 message: format!("{} is not available: {}", backend, reason),
127 suggestion: if backend == "ollama" {
128 "Start Ollama with 'ollama serve' or pull the model with 'ollama pull <model>'".to_string()
129 } else {
130 format!("Ensure {} service is running and healthy", backend)
131 },
132 category: ErrorCategory::Connection,
133 recoverable: true,
134 }
135 }
136 ModelError::Backend(BackendError::HttpError { status, message }) => {
137 let (summary, suggestion) = match status {
138 401 | 403 => ("Authentication failed", "Check your API key in ~/.config/mermaid/config.toml"),
139 404 => ("Resource not found", "The requested model or endpoint does not exist"),
140 429 => ("Rate limited", "Wait a moment before retrying, or switch to a local model"),
141 500..=599 => ("Server error", "The backend service is experiencing issues - try again later"),
142 _ => ("Request failed", "Check your network connection and backend configuration"),
143 };
144 UserFacingError {
145 summary: summary.to_string(),
146 message: format!("HTTP {}: {}", status, message),
147 suggestion: suggestion.to_string(),
148 category: if *status == 401 || *status == 403 {
149 ErrorCategory::Auth
150 } else if *status == 429 {
151 ErrorCategory::Temporary
152 } else {
153 ErrorCategory::Internal
154 },
155 recoverable: *status == 429 || *status >= 500,
156 }
157 }
158 ModelError::Backend(BackendError::UnexpectedResponse { backend, message }) => {
159 UserFacingError {
160 summary: "Unexpected response".to_string(),
161 message: format!("Received unexpected response from {}: {}", backend, message),
162 suggestion: "This might be a version mismatch - try updating the backend".to_string(),
163 category: ErrorCategory::Internal,
164 recoverable: false,
165 }
166 }
167 ModelError::Backend(BackendError::ProviderError { provider, code, message }) => {
168 let code_str = code.as_deref().unwrap_or("unknown");
169 UserFacingError {
170 summary: format!("{} error", provider),
171 message: format!("{} returned error {}: {}", provider, code_str, message),
172 suggestion: format!("Check {} documentation for error code {}", provider, code_str),
173 category: ErrorCategory::Internal,
174 recoverable: false,
175 }
176 }
177 ModelError::Config(ConfigError::MissingRequired(field)) => {
178 UserFacingError {
179 summary: "Missing configuration".to_string(),
180 message: format!("Required configuration '{}' is missing", field),
181 suggestion: format!("Add '{}' to ~/.config/mermaid/config.toml", field),
182 category: ErrorCategory::Config,
183 recoverable: false,
184 }
185 }
186 ModelError::Config(ConfigError::InvalidValue { field, value, reason }) => {
187 UserFacingError {
188 summary: "Invalid configuration".to_string(),
189 message: format!("Invalid value '{}' for '{}': {}", value, field, reason),
190 suggestion: format!("Fix '{}' in ~/.config/mermaid/config.toml", field),
191 category: ErrorCategory::Config,
192 recoverable: false,
193 }
194 }
195 ModelError::Config(ConfigError::FileError { path, reason }) => {
196 UserFacingError {
197 summary: "Config file error".to_string(),
198 message: format!("Cannot read config file '{}': {}", path, reason),
199 suggestion: "Check file permissions and syntax".to_string(),
200 category: ErrorCategory::Config,
201 recoverable: false,
202 }
203 }
204 ModelError::ModelNotFound { model, searched } => {
205 UserFacingError {
206 summary: "Model not found".to_string(),
207 message: format!("Model '{}' not found in: {}", model, searched.join(", ")),
208 suggestion: format!(
209 "Pull the model with 'ollama pull {}' or check if the model name is correct",
210 model
211 ),
212 category: ErrorCategory::NotFound,
213 recoverable: false,
214 }
215 }
216 ModelError::Timeout { operation, duration_secs } => {
217 UserFacingError {
218 summary: "Request timed out".to_string(),
219 message: format!("'{}' timed out after {} seconds", operation, duration_secs),
220 suggestion: "The model might be overloaded - try a smaller model or wait and retry".to_string(),
221 category: ErrorCategory::Temporary,
222 recoverable: true,
223 }
224 }
225 ModelError::RateLimit { retry_after } => {
226 let wait_msg = retry_after
227 .map(|s| format!("Wait {} seconds", s))
228 .unwrap_or_else(|| "Wait a moment".to_string());
229 UserFacingError {
230 summary: "Rate limited".to_string(),
231 message: "Too many requests - rate limit exceeded".to_string(),
232 suggestion: format!("{}. Consider using a local Ollama model to avoid rate limits", wait_msg),
233 category: ErrorCategory::Temporary,
234 recoverable: true,
235 }
236 }
237 ModelError::InvalidRequest(msg) => {
238 UserFacingError {
239 summary: "Invalid request".to_string(),
240 message: format!("The request was invalid: {}", msg),
241 suggestion: "Check your message format or try rephrasing".to_string(),
242 category: ErrorCategory::Internal,
243 recoverable: false,
244 }
245 }
246 ModelError::ParseError { message, .. } => {
247 UserFacingError {
248 summary: "Parse error".to_string(),
249 message: format!("Failed to parse response: {}", message),
250 suggestion: "The model returned an unexpected format - try sending the message again".to_string(),
251 category: ErrorCategory::Internal,
252 recoverable: true,
253 }
254 }
255 ModelError::StreamError(msg) => {
256 UserFacingError {
257 summary: "Stream interrupted".to_string(),
258 message: format!("Connection lost during streaming: {}", msg),
259 suggestion: "Check your network connection and try again".to_string(),
260 category: ErrorCategory::Connection,
261 recoverable: true,
262 }
263 }
264 ModelError::Authentication(msg) => {
265 UserFacingError {
266 summary: "Authentication failed".to_string(),
267 message: format!("Authentication error: {}", msg),
268 suggestion: "Check your API key in ~/.config/mermaid/config.toml or environment variables".to_string(),
269 category: ErrorCategory::Auth,
270 recoverable: false,
271 }
272 }
273 }
274 }
275
276 pub fn to_channel_message(&self) -> String {
278 let user_facing = self.to_user_facing();
279 serde_json::to_string(&user_facing).unwrap_or_else(|_| {
281 format!("{}|{}|{}", user_facing.summary, user_facing.message, user_facing.suggestion)
283 })
284 }
285}
286
287#[derive(Debug)]
289pub enum BackendError {
290 ConnectionFailed { backend: String, url: String, reason: String },
292
293 NotAvailable { backend: String, reason: String },
295
296 HttpError { status: u16, message: String },
298
299 UnexpectedResponse { backend: String, message: String },
301
302 ProviderError { provider: String, code: Option<String>, message: String },
304}
305
306impl fmt::Display for BackendError {
307 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
308 match self {
309 BackendError::ConnectionFailed { backend, url, reason } => {
310 write!(f, "Failed to connect to {} at {}: {}", backend, url, reason)
311 }
312 BackendError::NotAvailable { backend, reason } => {
313 write!(f, "Backend '{}' not available: {}", backend, reason)
314 }
315 BackendError::HttpError { status, message } => {
316 write!(f, "HTTP error {}: {}", status, message)
317 }
318 BackendError::UnexpectedResponse { backend, message } => {
319 write!(f, "Unexpected response from {}: {}", backend, message)
320 }
321 BackendError::ProviderError { provider, code, message } => {
322 if let Some(c) = code {
323 write!(f, "{} error {}: {}", provider, c, message)
324 } else {
325 write!(f, "{} error: {}", provider, message)
326 }
327 }
328 }
329 }
330}
331
332impl std::error::Error for BackendError {}
333
334#[derive(Debug)]
336pub enum ConfigError {
337 MissingRequired(String),
339
340 InvalidValue { field: String, value: String, reason: String },
342
343 FileError { path: String, reason: String },
345}
346
347impl fmt::Display for ConfigError {
348 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
349 match self {
350 ConfigError::MissingRequired(field) => {
351 write!(f, "Missing required configuration: {}", field)
352 }
353 ConfigError::InvalidValue { field, value, reason } => {
354 write!(f, "Invalid value for '{}': '{}' ({})", field, value, reason)
355 }
356 ConfigError::FileError { path, reason } => {
357 write!(f, "Error reading config file '{}': {}", path, reason)
358 }
359 }
360 }
361}
362
363impl std::error::Error for ConfigError {}
364
365pub type Result<T> = std::result::Result<T, ModelError>;
367
368impl From<anyhow::Error> for ModelError {
370 fn from(err: anyhow::Error) -> Self {
371 ModelError::InvalidRequest(err.to_string())
372 }
373}
374
375impl From<reqwest::Error> for ModelError {
377 fn from(err: reqwest::Error) -> Self {
378 if err.is_timeout() {
379 ModelError::Timeout {
380 operation: "HTTP request".to_string(),
381 duration_secs: 120,
382 }
383 } else if err.is_connect() {
384 ModelError::Backend(BackendError::ConnectionFailed {
385 backend: "unknown".to_string(),
386 url: err.url().map(|u| u.to_string()).unwrap_or_else(|| "unknown".to_string()),
387 reason: err.to_string(),
388 })
389 } else if err.is_status() {
390 let status = err.status().map(|s| s.as_u16()).unwrap_or(500);
391 ModelError::Backend(BackendError::HttpError {
392 status,
393 message: err.to_string(),
394 })
395 } else {
396 ModelError::Backend(BackendError::UnexpectedResponse {
397 backend: "unknown".to_string(),
398 message: err.to_string(),
399 })
400 }
401 }
402}
403
404impl From<serde_json::Error> for ModelError {
406 fn from(err: serde_json::Error) -> Self {
407 ModelError::ParseError {
408 message: err.to_string(),
409 raw: None,
410 }
411 }
412}