burn_lm_inference/
errors.rs1pub type InferenceResult<T> = Result<T, InferenceError>;
2pub type InferenceOptionalResult<T> = Result<Option<T>, InferenceError>;
3
4#[derive(thiserror::Error, Debug)]
5pub enum InferenceError {
6 #[error("Error deleting model: {0} (reason: {1})")]
7 DeleteError(String, String),
8 #[error("Error downloading model: {0} (reason: {1})")]
9 DownloadError(String, String),
10 #[error("Error loading model: {0}")]
11 LoadError(String),
12 #[error("Model has not been loaded.")]
13 ModelNotLoaded,
14 #[error("The plugin '{0}' does not support downloading.")]
15 PluginDownloadUnsupportedError(String),
16 #[error("Error unloading model: {0} (reason: {1})")]
17 UnloadError(String, String),
18 #[error("Input sequence length ({0} tokens) exceeds maximum context window ({1} tokens). Please shorten your input or increase the maximum context window.")]
19 ContextLengthExceeded(usize, usize),
20}