1use std::sync::PoisonError;
2
3#[derive(thiserror::Error, Debug)]
5#[allow(missing_docs)]
6#[non_exhaustive]
7pub enum Error
8{
9 #[cfg(feature = "simple-api")]
10 #[error("Serialization error: {0}")]
11 JsonSerializationError(#[from] serde_json::Error),
12 #[error("SendError in streams communication, most likely end of stream: {0}.")]
13 SendError(String),
14 #[error("IO Error: {0}.")]
15 IOError(#[from] std::io::Error),
16 #[error("Deserialization error: {0}")]
17 DeserializationError(String),
18 #[cfg(feature = "ollama")]
19 #[error("Ollama error: {0}")]
20 OllamaError(#[from] ollama_rs::error::OllamaError),
21 #[error("Poison (Mutex/RwLock) error: {0}")]
22 PoisonError(String),
23 #[error("{0}")]
24 FuturesMpscSendError(#[from] futures::channel::mpsc::SendError),
25 #[cfg(feature = "image")]
26 #[error("{0}")]
27 ValueError(#[from] kproc_values::Error),
28 #[cfg(feature = "llama.cpp")]
29 #[error("{0}")]
30 HfApiError(#[from] hf_hub::api::sync::ApiError),
31 #[error("Invalid HugginFace uri.")]
32 HfInvalidUri,
33 #[cfg(feature = "llama.cpp")]
34 #[error("{0}")]
35 LlamaCpp(#[from] llama_cpp_2::LLamaCppError),
36 #[cfg(feature = "llama.cpp")]
37 #[error("{0}")]
38 LlamaModelLoad(#[from] llama_cpp_2::LlamaModelLoadError),
39 #[cfg(feature = "llama.cpp")]
40 #[error("{0}")]
41 LlamaContextLoad(#[from] llama_cpp_2::LlamaContextLoadError),
42 #[cfg(feature = "llama.cpp")]
43 #[error("{0}")]
44 LlamaStringToToken(#[from] llama_cpp_2::StringToTokenError),
45 #[cfg(feature = "llama.cpp")]
46 #[error("{0}")]
47 LlamaBatchAddError(#[from] llama_cpp_2::llama_batch::BatchAddError),
48 #[cfg(feature = "llama.cpp")]
49 #[error("{0}")]
50 LlamaDecodeError(#[from] llama_cpp_2::DecodeError),
51 #[cfg(feature = "llama.cpp")]
52 #[error("{0}")]
53 LlamaTokenToString(#[from] llama_cpp_2::TokenToStringError),
54 #[cfg(feature = "template")]
55 #[error("{0}")]
56 MinijinjaError(#[from] minijinja::Error),
57 #[cfg(feature = "llama.cpp")]
58 #[error("{0}")]
59 CCUtilsServerError(#[from] ccutils::servers::ServerError),
60 #[cfg(feature = "llama.cpp")]
61 #[error("Invalid JSON Grammar")]
62 InvalidJsonGrammar,
63 #[cfg(feature = "simple-api")]
64 #[error("HTTP Request error: {0}")]
65 HttpError(String),
66 #[cfg(feature = "simple-api")]
67 #[error("Error in API usage: code: {code} message: {message} type: {error_type}")]
68 SimpleApiError
69 {
70 code: u32,
71 message: String,
72 error_type: String,
73 },
74}
75
76impl<T> From<PoisonError<T>> for Error
77{
78 fn from(value: PoisonError<T>) -> Self
79 {
80 Error::PoisonError(value.to_string())
81 }
82}