syntaxdot_tokenizers/
error.rs1use std::io;
2
3use sentencepiece::SentencePieceError;
4use thiserror::Error;
5use wordpieces::WordPiecesError;
6
7#[derive(Debug, Error)]
8pub enum TokenizerError {
9 #[error("Cannot open tokenizer model `{model_path:?}`: {inner:?}")]
10 OpenError {
11 model_path: String,
12 inner: io::Error,
13 },
14
15 #[error(transparent)]
16 SentencePiece(#[from] SentencePieceError),
17
18 #[error("Cannot process word pieces: {0}")]
19 WordPieces(#[from] WordPiecesError),
20}
21
22impl TokenizerError {
23 pub fn open_error(model_path: impl Into<String>, inner: io::Error) -> Self {
24 TokenizerError::OpenError {
25 model_path: model_path.into(),
26 inner,
27 }
28 }
29}