1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
use conllu::graph::Sentence;
use ndarray::Array1;

mod albert;
pub use albert::AlbertTokenizer;

mod bert;
pub use bert::BertTokenizer;

mod error;
pub use error::TokenizerError;

mod xlm_roberta;
pub use xlm_roberta::XlmRobertaTokenizer;

/// Trait for wordpiece tokenizers.
pub trait Tokenize: Send + Sync {
    /// Tokenize the tokens in a sentence into word pieces.
    fn tokenize(&self, sentence: Sentence) -> SentenceWithPieces;
}

/// A sentence and its word pieces.
pub struct SentenceWithPieces {
    /// Word pieces in a sentence.
    pub pieces: Array1<i64>,

    /// Sentence graph.
    pub sentence: Sentence,

    /// The the offsets of tokens in `pieces`.
    pub token_offsets: Vec<usize>,
}