use thiserror::Error;
pub type Result<T> = std::result::Result<T, VmmError>;
#[derive(Debug, Error)]
pub enum VmmError {
#[error("CUDA error: {0}")]
CudaError(String),
#[error(
"Out of virtual address space: requested {requested} bytes, available {available} bytes"
)]
OutOfVirtualMemory { requested: usize, available: usize },
#[error("Out of physical memory: requested {requested} bytes, available {available} bytes")]
OutOfPhysicalMemory { requested: usize, available: usize },
#[error("Invalid offset: {offset} (size: {size}, capacity: {capacity})")]
InvalidOffset {
offset: usize,
size: usize,
capacity: usize,
},
#[error("Mapping failed: {0}")]
MappingFailed(String),
#[error("Unmapping failed: {0}")]
UnmappingFailed(String),
#[error("Invalid alignment: {actual}, required: {required}")]
InvalidAlignment { actual: usize, required: usize },
#[error("Range already mapped: offset {offset}, size {size}")]
AlreadyMapped { offset: usize, size: usize },
#[error("Range not mapped: offset {offset}, size {size}")]
NotMapped { offset: usize, size: usize },
#[error("Invalid page size: {0} (must be power of 2 and >= 64KB)")]
InvalidPageSize(usize),
#[error("Device does not support CUDA Virtual Memory Management")]
UnsupportedDevice,
#[error("Candle error: {0}")]
CandleError(#[from] candle_core::Error),
#[error("Model not found: {0}")]
ModelNotFound(String),
#[error("Model already registered: {0}")]
ModelAlreadyExists(String),
#[error("{0}")]
Other(String),
}
impl VmmError {
pub fn from_cuda_result(result: cudarc::driver::result::DriverError) -> Self {
VmmError::CudaError(format!("{:?}", result))
}
pub fn cuda<S: Into<String>>(msg: S) -> Self {
VmmError::CudaError(msg.into())
}
pub fn other<S: Into<String>>(msg: S) -> Self {
VmmError::Other(msg.into())
}
}