1use std::str::from_utf8_unchecked;
23use memchr::memchr;
45use crate::TokenSource;
67/// Returns a [`TokenSource`] that uses the lines in `data` as Tokens. The newline
8/// separator (`\r\n` or `\n`) is included in the emitted tokens. This means that changing
9/// the newline separator from `\r\n` to `\n` (or omitting it fully on the last line) is
10/// detected by [`Diff`](crate::Diff).
11pub fn lines(data: &str) -> Lines<'_> {
12 Lines(ByteLines(data.as_bytes()))
13}
1415/// Returns a [`TokenSource`] that uses the lines in `data` as Tokens. The newline
16/// separator (`\r\n` or `\n`) is included in the emitted tokens. This means that changing
17/// the newline separator from `\r\n` to `\n` (or omitting it fully on the last line) is
18/// detected when computing a [`Diff`](crate::Diff).
19pub fn byte_lines(data: &[u8]) -> ByteLines<'_> {
20 ByteLines(data)
21}
2223/// By default, a line diff is produced for a string
24impl<'a> TokenSource for &'a str {
25type Token = &'a str;
2627type Tokenizer = Lines<'a>;
2829fn tokenize(&self) -> Self::Tokenizer {
30 lines(self)
31 }
3233fn estimate_tokens(&self) -> u32 {
34 lines(self).estimate_tokens()
35 }
36}
3738/// By default, a line diff is produced for a bytes
39impl<'a> TokenSource for &'a [u8] {
40type Token = Self;
41type Tokenizer = ByteLines<'a>;
4243fn tokenize(&self) -> Self::Tokenizer {
44 byte_lines(self)
45 }
4647fn estimate_tokens(&self) -> u32 {
48 byte_lines(self).estimate_tokens()
49 }
50}
5152/// A [`TokenSource`] that returns the lines of a `str` as tokens. See [`lines`] for
53/// details.
54#[derive(Clone, Copy, PartialEq, Eq)]
55pub struct Lines<'a>(ByteLines<'a>);
5657impl<'a> Iterator for Lines<'a> {
58type Item = &'a str;
5960fn next(&mut self) -> Option<Self::Item> {
61// safety invariant: this struct may only contain valid utf8
62 // dividing valid utf8 bytes by ascii characters always produces valid utf-8
63self.0.next().map(|it| unsafe { from_utf8_unchecked(it) })
64 }
65}
6667/// By default a line diff is produced for a string
68impl<'a> TokenSource for Lines<'a> {
69type Token = &'a str;
7071type Tokenizer = Self;
7273fn tokenize(&self) -> Self::Tokenizer {
74*self
75}
7677fn estimate_tokens(&self) -> u32 {
78self.0.estimate_tokens()
79 }
80}
8182/// A [`TokenSource`] that returns the lines of a byte slice as tokens. See [`byte_lines`]
83/// for details.
84#[derive(Clone, Copy, PartialEq, Eq)]
85pub struct ByteLines<'a>(&'a [u8]);
8687impl<'a> Iterator for ByteLines<'a> {
88type Item = &'a [u8];
8990fn next(&mut self) -> Option<Self::Item> {
91if self.0.is_empty() {
92return None;
93 }
94let line_len = memchr(b'\n', self.0).map_or(self.0.len(), |len| len + 1);
95let (line, rem) = self.0.split_at(line_len);
96self.0 = rem;
97Some(line)
98 }
99}
100101/// By default a line diff is produced for a string
102impl<'a> TokenSource for ByteLines<'a> {
103type Token = &'a [u8];
104105type Tokenizer = Self;
106107fn tokenize(&self) -> Self::Tokenizer {
108*self
109}
110111fn estimate_tokens(&self) -> u32 {
112let len: usize = self.take(20).map(|line| line.len()).sum();
113if len == 0 {
114100
115} else {
116 (self.0.len() * 20 / len) as u32
117 }
118 }
119}