autoagents_llamacpp/
error.rs1use autoagents_llm::error::LLMError;
4use std::fmt;
5
6#[derive(Debug)]
8pub enum LlamaCppProviderError {
9 ModelLoad(String),
11 ContextLoad(String),
13 Tokenization(String),
15 Inference(String),
17 Config(String),
19 Template(String),
21 Embedding(String),
23 Unsupported(String),
25 Other(String),
27}
28
29impl fmt::Display for LlamaCppProviderError {
30 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31 match self {
32 LlamaCppProviderError::ModelLoad(e) => write!(f, "Model Load Error: {}", e),
33 LlamaCppProviderError::ContextLoad(e) => write!(f, "Context Load Error: {}", e),
34 LlamaCppProviderError::Tokenization(e) => write!(f, "Tokenization Error: {}", e),
35 LlamaCppProviderError::Inference(e) => write!(f, "Inference Error: {}", e),
36 LlamaCppProviderError::Config(e) => write!(f, "Configuration Error: {}", e),
37 LlamaCppProviderError::Template(e) => write!(f, "Template Error: {}", e),
38 LlamaCppProviderError::Embedding(e) => write!(f, "Embedding Error: {}", e),
39 LlamaCppProviderError::Unsupported(e) => write!(f, "Unsupported: {}", e),
40 LlamaCppProviderError::Other(e) => write!(f, "llama.cpp Error: {}", e),
41 }
42 }
43}
44
45impl std::error::Error for LlamaCppProviderError {}
46
47impl From<LlamaCppProviderError> for LLMError {
48 fn from(err: LlamaCppProviderError) -> Self {
49 match err {
50 LlamaCppProviderError::ModelLoad(e) => {
51 LLMError::ProviderError(format!("Failed to load model: {}", e))
52 }
53 LlamaCppProviderError::ContextLoad(e) => {
54 LLMError::ProviderError(format!("Failed to create context: {}", e))
55 }
56 LlamaCppProviderError::Tokenization(e) => {
57 LLMError::ProviderError(format!("Tokenization failed: {}", e))
58 }
59 LlamaCppProviderError::Inference(e) => {
60 LLMError::ProviderError(format!("Inference failed: {}", e))
61 }
62 LlamaCppProviderError::Config(e) => {
63 LLMError::InvalidRequest(format!("Invalid configuration: {}", e))
64 }
65 LlamaCppProviderError::Template(e) => {
66 LLMError::InvalidRequest(format!("Template error: {}", e))
67 }
68 LlamaCppProviderError::Embedding(e) => {
69 LLMError::ProviderError(format!("Embedding failed: {}", e))
70 }
71 LlamaCppProviderError::Unsupported(e) => LLMError::NoToolSupport(e),
72 LlamaCppProviderError::Other(e) => {
73 LLMError::ProviderError(format!("llama.cpp error: {}", e))
74 }
75 }
76 }
77}
78
79#[cfg(test)]
80mod tests {
81 use super::*;
82
83 #[test]
84 fn test_error_display() {
85 let err = LlamaCppProviderError::ModelLoad("missing file".to_string());
86 assert_eq!(err.to_string(), "Model Load Error: missing file");
87 }
88
89 #[test]
90 fn test_error_to_llm_error() {
91 let err = LlamaCppProviderError::Config("bad config".to_string());
92 let llm_err: LLMError = err.into();
93 assert!(llm_err.to_string().contains("Invalid configuration"));
94 }
95}