Skip to main content

llm/
error.rs

1use std::fmt;
2
3use thiserror::Error;
4
5#[derive(Debug, Clone, PartialEq, Eq)]
6pub struct ContextOverflowError {
7    pub provider: String,
8    pub model: Option<String>,
9    pub requested_tokens: Option<u32>,
10    pub max_tokens: Option<u32>,
11    pub message: String,
12}
13
14impl ContextOverflowError {
15    pub fn new(
16        provider: impl Into<String>,
17        model: Option<String>,
18        requested_tokens: Option<u32>,
19        max_tokens: Option<u32>,
20        message: impl Into<String>,
21    ) -> Self {
22        Self { provider: provider.into(), model, requested_tokens, max_tokens, message: message.into() }
23    }
24}
25
26impl fmt::Display for ContextOverflowError {
27    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28        let model = self.model.as_deref().unwrap_or("unknown-model");
29        match (self.requested_tokens, self.max_tokens) {
30            (Some(requested), Some(max)) => write!(
31                f,
32                "{} (provider={}, model={}, requested={}, max={})",
33                self.message, self.provider, model, requested, max
34            ),
35            _ => write!(f, "{} (provider={}, model={})", self.message, self.provider, model),
36        }
37    }
38}
39
40#[doc = include_str!("docs/llm_error.md")]
41#[derive(Debug, Error, Clone)]
42pub enum LlmError {
43    /// Environment variable not set or invalid
44    #[error("{0} environment variable not set")]
45    MissingApiKey(String),
46    /// Invalid API key format
47    #[error("Invalid API key: {0}")]
48    InvalidApiKey(String),
49    /// HTTP client creation failed
50    #[error("Failed to create HTTP client: {0}")]
51    HttpClientCreation(String),
52    /// API request failed
53    #[error("API request failed: {0}")]
54    ApiRequest(String),
55    /// API returned an error response
56    #[error("API error: {0}")]
57    ApiError(String),
58    /// API rejected the request because the prompt exceeded the model's context window.
59    #[error("Context overflow: {0}")]
60    ContextOverflow(ContextOverflowError),
61    /// IO error while reading stream
62    #[error("IO error reading stream: {0}")]
63    IoError(String),
64    /// JSON parsing/serialization error
65    #[error("JSON parsing error: {0}")]
66    JsonParsing(String),
67    /// Tool parameter parsing error
68    #[error("Failed to parse tool parameters for {tool_name}: {error}")]
69    ToolParameterParsing { tool_name: String, error: String },
70    /// OAuth authentication error
71    #[error("OAuth error: {0}")]
72    OAuthError(String),
73    /// The message contained only content types this provider doesn't support
74    #[error("Unsupported content: {0}")]
75    UnsupportedContent(String),
76    /// Generic error for other cases
77    #[error("{0}")]
78    Other(String),
79}
80
81impl From<reqwest::Error> for LlmError {
82    fn from(error: reqwest::Error) -> Self {
83        LlmError::ApiRequest(error.to_string())
84    }
85}
86
87impl From<serde_json::Error> for LlmError {
88    fn from(error: serde_json::Error) -> Self {
89        LlmError::JsonParsing(error.to_string())
90    }
91}
92
93impl From<std::io::Error> for LlmError {
94    fn from(error: std::io::Error) -> Self {
95        LlmError::IoError(error.to_string())
96    }
97}
98
99impl From<reqwest::header::InvalidHeaderValue> for LlmError {
100    fn from(error: reqwest::header::InvalidHeaderValue) -> Self {
101        LlmError::InvalidApiKey(error.to_string())
102    }
103}
104
105impl From<async_openai::error::OpenAIError> for LlmError {
106    fn from(error: async_openai::error::OpenAIError) -> Self {
107        LlmError::ApiError(error.to_string())
108    }
109}
110
111#[cfg(feature = "oauth")]
112impl From<crate::oauth::OAuthError> for LlmError {
113    fn from(error: crate::oauth::OAuthError) -> Self {
114        LlmError::OAuthError(error.to_string())
115    }
116}
117
118pub type Result<T> = std::result::Result<T, LlmError>;