1use core::cmp::Ordering;
4use core::hash;
5use core::ops::{Bound, RangeBounds};
6
7use crate::StrQueue;
8#[allow(unused_imports)]
11use crate::BoundExt;
12
13#[derive(Debug, Clone, Copy, Eq)]
19pub struct BytesRange<'a> {
20 pub(super) former: &'a [u8],
24 pub(super) latter: &'a [u8],
28}
29
30impl<'a> BytesRange<'a> {
32 #[must_use]
34 pub(crate) fn new<R>(queue: &'a StrQueue, range: R) -> Self
35 where
36 R: RangeBounds<usize>,
37 {
38 let (former, latter) = queue.inner.as_slices();
39 #[allow(unstable_name_collisions)] Self::from_slices_and_bounds(
41 former,
42 latter,
43 range.start_bound().cloned(),
44 range.end_bound().cloned(),
45 )
46 }
47
48 #[must_use]
54 fn from_slices(former: &'a [u8], latter: &'a [u8]) -> Self {
55 let former_range = former.as_ptr_range();
56 let latter_range = latter.as_ptr_range();
57 if !(former.is_empty() || latter.is_empty())
58 && (former_range.contains(&latter_range.start)
59 || latter_range.contains(&former_range.start))
60 {
61 panic!("[precondition] `former` and `latter` should not overlap");
64 }
65
66 Self { former, latter }
67 }
68
69 #[must_use]
76 pub(super) fn from_slices_and_bounds(
77 former: &'a [u8],
78 latter: &'a [u8],
79 start: Bound<usize>,
80 end: Bound<usize>,
81 ) -> Self {
82 if matches!(start, Bound::Unbounded) && matches!(end, Bound::Unbounded) {
83 return Self::from_slices(former, latter);
84 }
85
86 let former_len = former.len();
87 let latter_len = latter.len();
88 let len = former_len + latter_len;
89
90 if len == 0 {
91 return Self::from_slices(&former[former_len..], &latter[..0]);
92 }
93
94 let start = match start {
95 Bound::Included(v) => v,
96 Bound::Excluded(usize::MAX) => {
97 return Self::from_slices(&former[former_len..], &latter[..0])
98 }
99 Bound::Excluded(v) => v + 1,
100 Bound::Unbounded => 0,
101 };
102 let end_included = match end {
103 Bound::Included(v) => v,
104 Bound::Excluded(0) => return Self::from_slices(&former[former_len..], &latter[..0]),
105 Bound::Excluded(v) => v - 1,
106 Bound::Unbounded => len - 1,
107 };
108 debug_assert!(end_included < len);
109 if start > end_included {
110 return Self::from_slices(&former[former_len..], &latter[..0]);
112 };
113
114 if end_included < former_len {
115 debug_assert!(start < former_len, "`start <= end_included` holds");
116 Self::from_slices(&former[start..=end_included], &latter[..0])
117 } else {
118 Self::from_slices(
119 &former[start.min(former_len)..],
120 &latter[..=(end_included - former_len)],
121 )
122 }
123 }
124}
125
126impl<'a> BytesRange<'a> {
128 #[must_use]
134 pub fn range<R>(&self, range: R) -> Self
135 where
136 R: RangeBounds<usize>,
137 {
138 #[allow(unstable_name_collisions)] Self::from_slices_and_bounds(
140 self.former,
141 self.latter,
142 range.start_bound().cloned(),
143 range.end_bound().cloned(),
144 )
145 }
146}
147
148impl<'a> BytesRange<'a> {
150 #[inline]
152 #[must_use]
153 pub fn len(&self) -> usize {
154 self.former.len() + self.latter.len()
156 }
157
158 #[inline]
160 #[must_use]
161 pub fn is_empty(&self) -> bool {
162 self.former.is_empty() && self.latter.is_empty()
163 }
164}
165
166impl<'a> BytesRange<'a> {
168 pub fn clear(&mut self) {
185 self.former = &self.former[self.former.len()..];
186 self.latter = &self.latter[..0];
187 }
188
189 pub(super) fn trim_start(&mut self, trim_len: usize) {
195 let former_len = self.former.len();
196 let latter_len = self.latter.len();
197 if trim_len > former_len + latter_len {
198 panic!("[precondition] length to trim should not be larger than the range");
199 }
200
201 if let Some(latter_trim_len) = trim_len.checked_sub(former_len) {
202 self.former = &self.former[former_len..];
203 self.latter = &self.latter[latter_trim_len..];
204 } else {
205 self.former = &self.former[trim_len..];
206 }
207 }
208
209 pub fn pop(&mut self) -> Option<u8> {
228 if let Some(&b) = self.former.get(0) {
229 self.former = &self.former[1..];
230 Some(b)
231 } else if let Some(&b) = self.latter.get(0) {
232 self.latter = &self.latter[1..];
233 Some(b)
234 } else {
235 None
236 }
237 }
238}
239
240impl<'a> BytesRange<'a> {
242 pub(super) fn first(&self) -> Option<u8> {
244 self.former.get(0).or_else(|| self.latter.get(0)).copied()
245 }
246
247 #[must_use]
249 pub(super) fn get_byte(&self, i: usize) -> Option<u8> {
250 self.former
251 .get(i)
252 .or_else(|| self.latter.get(i - self.former.len()))
253 .copied()
254 }
255
256 #[cfg(not(feature = "memchr"))]
258 pub(super) fn position2(&self, needle1: u8, needle2: u8) -> Option<usize> {
259 self.bytes().position(|b| (b == needle1) || (b == needle2))
260 }
261
262 #[cfg(feature = "memchr")]
264 pub(super) fn position2(&self, needle1: u8, needle2: u8) -> Option<usize> {
265 memchr::memchr2(needle1, needle2, self.former).or_else(|| {
266 memchr::memchr2(needle1, needle2, self.latter).map(|pos| pos + self.former.len())
267 })
268 }
269}
270
271impl<'a> BytesRange<'a> {
273 pub(super) fn bytes(&self) -> impl Iterator<Item = u8> + '_ {
275 self.former.iter().chain(self.latter).copied()
276 }
277}
278
279impl<'a> BytesRange<'a> {
281 #[must_use]
287 fn cmp_self_eqsize(&self, rhs: &Self) -> Ordering {
288 assert_eq!(
289 self.len(),
290 rhs.len(),
291 "[precondition] length of `self` and `rhs` should be the same"
292 );
293
294 let self_former_len = self.former.len();
295 let rhs_former_len = rhs.former.len();
296
297 if self_former_len > rhs_former_len {
298 let rhs_latter_split = self_former_len - rhs_former_len;
299 self.former[..rhs_former_len]
300 .cmp(rhs.former)
301 .then_with(|| self.former[rhs_former_len..].cmp(&rhs.latter[..rhs_latter_split]))
302 .then_with(|| self.latter.cmp(&rhs.latter[rhs_latter_split..]))
303 } else {
304 let self_latter_split = rhs_former_len - self_former_len;
305 self.former
306 .cmp(&rhs.former[..self_former_len])
307 .then_with(|| self.latter[..self_latter_split].cmp(&rhs.former[self_former_len..]))
308 .then_with(|| self.latter[self_latter_split..].cmp(rhs.latter))
309 }
310 }
311
312 #[must_use]
314 pub(super) fn cmp_self(&self, other: &Self) -> Ordering {
315 let self_len = self.len();
316 let other_len = other.len();
317 let len_cmp = self_len.cmp(&other_len);
318
319 let prefix_cmp = match len_cmp {
320 Ordering::Greater => self.range(..other_len).cmp_self_eqsize(other),
321 Ordering::Equal => self.cmp_self_eqsize(other),
322 Ordering::Less => self.cmp_self_eqsize(&other.range(..self_len)),
323 };
324
325 prefix_cmp.then(len_cmp)
326 }
327
328 #[must_use]
330 fn cmp_slice(&self, rhs: &[u8]) -> Ordering {
331 let former_len = self.former.len();
332 let rhs_len = rhs.len();
333
334 if former_len > rhs_len {
335 self.former.cmp(rhs)
336 } else {
337 self.former
338 .cmp(&rhs[..former_len])
339 .then_with(|| self.latter.cmp(&rhs[former_len..]))
340 }
341 }
342}
343
344impl hash::Hash for BytesRange<'_> {
345 fn hash<H: hash::Hasher>(&self, state: &mut H) {
346 u8::hash_slice(self.former, state);
347 u8::hash_slice(self.latter, state);
348 }
349}
350
351impl PartialEq for BytesRange<'_> {
352 #[inline]
353 fn eq(&self, other: &Self) -> bool {
354 (self.len() == other.len()) && self.cmp_self_eqsize(other).is_eq()
356 }
357}
358
359impl PartialOrd for BytesRange<'_> {
360 #[inline]
361 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
362 Some(self.cmp(other))
363 }
364}
365
366impl Ord for BytesRange<'_> {
367 #[inline]
368 fn cmp(&self, other: &Self) -> Ordering {
369 self.cmp_self(other)
370 }
371}
372
373impl PartialEq<[u8]> for BytesRange<'_> {
374 #[inline]
375 fn eq(&self, other: &[u8]) -> bool {
376 (self.len() == other.len()) && self.cmp_slice(other).is_eq()
378 }
379}
380
381impl PartialOrd<[u8]> for BytesRange<'_> {
382 #[inline]
383 fn partial_cmp(&self, other: &[u8]) -> Option<Ordering> {
384 Some(self.cmp_slice(other))
385 }
386}
387
388impl PartialEq<BytesRange<'_>> for [u8] {
389 #[inline]
390 fn eq(&self, other: &BytesRange<'_>) -> bool {
391 other.eq(self)
392 }
393}
394
395impl PartialOrd<BytesRange<'_>> for [u8] {
396 #[inline]
397 fn partial_cmp(&self, other: &BytesRange<'_>) -> Option<Ordering> {
398 other.partial_cmp(self).map(Ordering::reverse)
399 }
400}
401
402impl PartialEq<&[u8]> for BytesRange<'_> {
403 #[inline]
404 fn eq(&self, other: &&[u8]) -> bool {
405 self.eq(*other)
406 }
407}
408
409impl PartialOrd<&[u8]> for BytesRange<'_> {
410 #[inline]
411 fn partial_cmp(&self, other: &&[u8]) -> Option<Ordering> {
412 self.partial_cmp(*other)
413 }
414}
415
416impl PartialEq<BytesRange<'_>> for &[u8] {
417 #[inline]
418 fn eq(&self, other: &BytesRange<'_>) -> bool {
419 other.eq(*self)
420 }
421}
422
423impl PartialOrd<BytesRange<'_>> for &[u8] {
424 #[inline]
425 fn partial_cmp(&self, other: &BytesRange<'_>) -> Option<Ordering> {
426 other.partial_cmp(*self).map(Ordering::reverse)
427 }
428}