use crate::buffer::ScalarBuffer;
use crate::ArrowNativeType;
#[derive(Debug, Clone)]
pub struct RunEndBuffer<E: ArrowNativeType> {
run_ends: ScalarBuffer<E>,
len: usize,
offset: usize,
}
impl<E> RunEndBuffer<E>
where
E: ArrowNativeType,
{
pub fn new(run_ends: ScalarBuffer<E>, offset: usize, len: usize) -> Self {
assert!(
run_ends.windows(2).all(|w| w[0] < w[1]),
"run-ends not strictly increasing"
);
if len != 0 {
assert!(!run_ends.is_empty(), "non-empty slice but empty run-ends");
let end = E::from_usize(offset.saturating_add(len)).unwrap();
assert!(
*run_ends.first().unwrap() > E::usize_as(0),
"run-ends not greater than 0"
);
assert!(
*run_ends.last().unwrap() >= end,
"slice beyond bounds of run-ends"
);
}
Self {
run_ends,
offset,
len,
}
}
pub unsafe fn new_unchecked(run_ends: ScalarBuffer<E>, offset: usize, len: usize) -> Self {
Self {
run_ends,
offset,
len,
}
}
#[inline]
pub fn offset(&self) -> usize {
self.offset
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[inline]
pub fn values(&self) -> &[E] {
&self.run_ends
}
#[inline]
pub fn max_value(&self) -> usize {
self.values().last().copied().unwrap_or_default().as_usize()
}
pub fn get_physical_index(&self, logical_index: usize) -> usize {
let logical_index = E::usize_as(self.offset + logical_index);
let cmp = |p: &E| p.partial_cmp(&logical_index).unwrap();
match self.run_ends.binary_search_by(cmp) {
Ok(idx) => idx + 1,
Err(idx) => idx,
}
}
pub fn get_start_physical_index(&self) -> usize {
if self.offset == 0 || self.len == 0 {
return 0;
}
self.get_physical_index(0)
}
pub fn get_end_physical_index(&self) -> usize {
if self.len == 0 {
return 0;
}
if self.max_value() == self.offset + self.len {
return self.values().len() - 1;
}
self.get_physical_index(self.len - 1)
}
pub fn slice(&self, offset: usize, len: usize) -> Self {
assert!(
offset.saturating_add(len) <= self.len,
"the length + offset of the sliced RunEndBuffer cannot exceed the existing length"
);
Self {
run_ends: self.run_ends.clone(),
offset: self.offset + offset,
len,
}
}
pub fn inner(&self) -> &ScalarBuffer<E> {
&self.run_ends
}
pub fn into_inner(self) -> ScalarBuffer<E> {
self.run_ends
}
}
#[cfg(test)]
mod tests {
use crate::buffer::RunEndBuffer;
#[test]
fn test_zero_length_slice() {
let buffer = RunEndBuffer::new(vec![1_i32, 4_i32].into(), 0, 4);
assert_eq!(buffer.get_start_physical_index(), 0);
assert_eq!(buffer.get_end_physical_index(), 1);
assert_eq!(buffer.get_physical_index(3), 1);
for offset in 0..4 {
let sliced = buffer.slice(offset, 0);
assert_eq!(sliced.get_start_physical_index(), 0);
assert_eq!(sliced.get_end_physical_index(), 0);
}
let buffer = RunEndBuffer::new(Vec::<i32>::new().into(), 0, 0);
assert_eq!(buffer.get_start_physical_index(), 0);
assert_eq!(buffer.get_end_physical_index(), 0);
}
}