use core::cmp::Ordering;
use core::hash;
use core::ops::{Bound, RangeBounds};
use crate::StrQueue;
#[allow(unused_imports)]
use crate::BoundExt;
#[derive(Debug, Clone, Copy, Eq)]
pub struct BytesRange<'a> {
pub(super) former: &'a [u8],
pub(super) latter: &'a [u8],
}
impl<'a> BytesRange<'a> {
#[must_use]
pub(crate) fn new<R>(queue: &'a StrQueue, range: R) -> Self
where
R: RangeBounds<usize>,
{
let (former, latter) = queue.inner.as_slices();
#[allow(unstable_name_collisions)] Self::from_slices_and_bounds(
former,
latter,
range.start_bound().cloned(),
range.end_bound().cloned(),
)
}
#[must_use]
fn from_slices(former: &'a [u8], latter: &'a [u8]) -> Self {
let former_range = former.as_ptr_range();
let latter_range = latter.as_ptr_range();
if !(former.is_empty() || latter.is_empty())
&& (former_range.contains(&latter_range.start)
|| latter_range.contains(&former_range.start))
{
panic!("[precondition] `former` and `latter` should not overlap");
}
Self { former, latter }
}
#[must_use]
pub(super) fn from_slices_and_bounds(
former: &'a [u8],
latter: &'a [u8],
start: Bound<usize>,
end: Bound<usize>,
) -> Self {
if matches!(start, Bound::Unbounded) && matches!(end, Bound::Unbounded) {
return Self::from_slices(former, latter);
}
let former_len = former.len();
let latter_len = latter.len();
let len = former_len + latter_len;
if len == 0 {
return Self::from_slices(&former[former_len..], &latter[..0]);
}
let start = match start {
Bound::Included(v) => v,
Bound::Excluded(usize::MAX) => {
return Self::from_slices(&former[former_len..], &latter[..0])
}
Bound::Excluded(v) => v + 1,
Bound::Unbounded => 0,
};
let end_included = match end {
Bound::Included(v) => v,
Bound::Excluded(0) => return Self::from_slices(&former[former_len..], &latter[..0]),
Bound::Excluded(v) => v - 1,
Bound::Unbounded => len - 1,
};
debug_assert!(end_included < len);
if start > end_included {
return Self::from_slices(&former[former_len..], &latter[..0]);
};
if end_included < former_len {
debug_assert!(start < former_len, "`start <= end_included` holds");
Self::from_slices(&former[start..=end_included], &latter[..0])
} else {
Self::from_slices(
&former[start.min(former_len)..],
&latter[..=(end_included - former_len)],
)
}
}
}
impl<'a> BytesRange<'a> {
#[must_use]
pub fn range<R>(&self, range: R) -> Self
where
R: RangeBounds<usize>,
{
#[allow(unstable_name_collisions)] Self::from_slices_and_bounds(
self.former,
self.latter,
range.start_bound().cloned(),
range.end_bound().cloned(),
)
}
}
impl<'a> BytesRange<'a> {
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.former.len() + self.latter.len()
}
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.former.is_empty() && self.latter.is_empty()
}
}
impl<'a> BytesRange<'a> {
pub fn clear(&mut self) {
self.former = &self.former[self.former.len()..];
self.latter = &self.latter[..0];
}
pub(super) fn trim_start(&mut self, trim_len: usize) {
let former_len = self.former.len();
let latter_len = self.latter.len();
if trim_len > former_len + latter_len {
panic!("[precondition] length to trim should not be larger than the range");
}
if let Some(latter_trim_len) = trim_len.checked_sub(former_len) {
self.former = &self.former[former_len..];
self.latter = &self.latter[latter_trim_len..];
} else {
self.former = &self.former[trim_len..];
}
}
pub fn pop(&mut self) -> Option<u8> {
if let Some(&b) = self.former.get(0) {
self.former = &self.former[1..];
Some(b)
} else if let Some(&b) = self.latter.get(0) {
self.latter = &self.latter[1..];
Some(b)
} else {
None
}
}
}
impl<'a> BytesRange<'a> {
pub(super) fn first(&self) -> Option<u8> {
self.former.get(0).or_else(|| self.latter.get(0)).copied()
}
#[must_use]
pub(super) fn get_byte(&self, i: usize) -> Option<u8> {
self.former
.get(i)
.or_else(|| self.latter.get(i - self.former.len()))
.copied()
}
#[cfg(not(feature = "memchr"))]
pub(super) fn position2(&self, needle1: u8, needle2: u8) -> Option<usize> {
self.bytes().position(|b| (b == needle1) || (b == needle2))
}
#[cfg(feature = "memchr")]
pub(super) fn position2(&self, needle1: u8, needle2: u8) -> Option<usize> {
memchr::memchr2(needle1, needle2, self.former).or_else(|| {
memchr::memchr2(needle1, needle2, self.latter).map(|pos| pos + self.former.len())
})
}
}
impl<'a> BytesRange<'a> {
pub(super) fn bytes(&self) -> impl Iterator<Item = u8> + '_ {
self.former.iter().chain(self.latter).copied()
}
}
impl<'a> BytesRange<'a> {
#[must_use]
fn cmp_self_eqsize(&self, rhs: &Self) -> Ordering {
assert_eq!(
self.len(),
rhs.len(),
"[precondition] length of `self` and `rhs` should be the same"
);
let self_former_len = self.former.len();
let rhs_former_len = rhs.former.len();
if self_former_len > rhs_former_len {
let rhs_latter_split = self_former_len - rhs_former_len;
self.former[..rhs_former_len]
.cmp(rhs.former)
.then_with(|| self.former[rhs_former_len..].cmp(&rhs.latter[..rhs_latter_split]))
.then_with(|| self.latter.cmp(&rhs.latter[rhs_latter_split..]))
} else {
let self_latter_split = rhs_former_len - self_former_len;
self.former
.cmp(&rhs.former[..self_former_len])
.then_with(|| self.latter[..self_latter_split].cmp(&rhs.former[self_former_len..]))
.then_with(|| self.latter[self_latter_split..].cmp(rhs.latter))
}
}
#[must_use]
pub(super) fn cmp_self(&self, other: &Self) -> Ordering {
let self_len = self.len();
let other_len = other.len();
let len_cmp = self_len.cmp(&other_len);
let prefix_cmp = match len_cmp {
Ordering::Greater => self.range(..other_len).cmp_self_eqsize(other),
Ordering::Equal => self.cmp_self_eqsize(other),
Ordering::Less => self.cmp_self_eqsize(&other.range(..self_len)),
};
prefix_cmp.then(len_cmp)
}
#[must_use]
fn cmp_slice(&self, rhs: &[u8]) -> Ordering {
let former_len = self.former.len();
let rhs_len = rhs.len();
if former_len > rhs_len {
self.former.cmp(rhs)
} else {
self.former
.cmp(&rhs[..former_len])
.then_with(|| self.latter.cmp(&rhs[former_len..]))
}
}
}
impl hash::Hash for BytesRange<'_> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
u8::hash_slice(self.former, state);
u8::hash_slice(self.latter, state);
}
}
impl PartialEq for BytesRange<'_> {
#[inline]
fn eq(&self, other: &Self) -> bool {
(self.len() == other.len()) && self.cmp_self_eqsize(other).is_eq()
}
}
impl PartialOrd for BytesRange<'_> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for BytesRange<'_> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.cmp_self(other)
}
}
impl PartialEq<[u8]> for BytesRange<'_> {
#[inline]
fn eq(&self, other: &[u8]) -> bool {
(self.len() == other.len()) && self.cmp_slice(other).is_eq()
}
}
impl PartialOrd<[u8]> for BytesRange<'_> {
#[inline]
fn partial_cmp(&self, other: &[u8]) -> Option<Ordering> {
Some(self.cmp_slice(other))
}
}
impl PartialEq<BytesRange<'_>> for [u8] {
#[inline]
fn eq(&self, other: &BytesRange<'_>) -> bool {
other.eq(self)
}
}
impl PartialOrd<BytesRange<'_>> for [u8] {
#[inline]
fn partial_cmp(&self, other: &BytesRange<'_>) -> Option<Ordering> {
other.partial_cmp(self).map(Ordering::reverse)
}
}
impl PartialEq<&[u8]> for BytesRange<'_> {
#[inline]
fn eq(&self, other: &&[u8]) -> bool {
self.eq(*other)
}
}
impl PartialOrd<&[u8]> for BytesRange<'_> {
#[inline]
fn partial_cmp(&self, other: &&[u8]) -> Option<Ordering> {
self.partial_cmp(*other)
}
}
impl PartialEq<BytesRange<'_>> for &[u8] {
#[inline]
fn eq(&self, other: &BytesRange<'_>) -> bool {
other.eq(*self)
}
}
impl PartialOrd<BytesRange<'_>> for &[u8] {
#[inline]
fn partial_cmp(&self, other: &BytesRange<'_>) -> Option<Ordering> {
other.partial_cmp(*self).map(Ordering::reverse)
}
}