1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
use std::ops::Range;

use tokenizers::Tokenizer;

use crate::{ChunkCapacity, ChunkSize, ChunkSizer};

impl ChunkSizer for Tokenizer {
    /// Returns the number of tokens in a given text after tokenization.
    ///
    /// # Panics
    ///
    /// Will panic if you don't have a byte-level tokenizer and the splitter
    /// encounters text it can't tokenize.
    fn chunk_size(&self, chunk: &str, capacity: &impl ChunkCapacity) -> ChunkSize {
        ChunkSize::from_offsets(encoded_offsets(self, chunk), capacity)
    }
}

impl ChunkSizer for &Tokenizer {
    /// Returns the number of tokens in a given text after tokenization.
    ///
    /// # Panics
    ///
    /// Will panic if you don't have a byte-level tokenizer and the splitter
    /// encounters text it can't tokenize.
    fn chunk_size(&self, chunk: &str, capacity: &impl ChunkCapacity) -> ChunkSize {
        ChunkSize::from_offsets(encoded_offsets(self, chunk), capacity)
    }
}

fn encoded_offsets<'text>(
    tokenizer: &Tokenizer,
    chunk: &'text str,
) -> impl Iterator<Item = Range<usize>> + 'text {
    let encoding = tokenizer
        .encode(chunk, false)
        .expect("Unable to tokenize the following string {chunk}");
    let mut offsets = encoding
        .get_offsets()
        .iter()
        .map(|(start, end)| {
            let end = *end + 1;
            *start..end
        })
        .collect::<Vec<_>>();
    // Sometimes the offsets are off by one because of whitespace prefixing
    let prefixed = offsets
        .last()
        .map(|r| r.end != chunk.len())
        .unwrap_or_default();

    if prefixed {
        for range in &mut offsets {
            if range.start != 0 {
                range.start -= 1;
            }
            range.end -= 1;
        }
    }

    offsets.into_iter()
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn returns_offsets() {
        let tokenizer = Tokenizer::from_pretrained("bert-base-cased", None).unwrap();
        let offsets = encoded_offsets(&tokenizer, " An apple a").collect::<Vec<_>>();
        assert_eq!(offsets, vec![0..3, 3..9, 9..11]);
    }

    #[test]
    fn returns_offsets_handles_prefix() {
        let tokenizer = Tokenizer::from_pretrained("bert-base-cased", None).unwrap();
        let offsets = encoded_offsets(&tokenizer, "An apple a").collect::<Vec<_>>();
        assert_eq!(offsets, vec![0..2, 2..8, 8..10]);
    }
}