use crate::array::print_long_array;
use crate::builder::{ArrayBuilder, GenericByteViewBuilder};
use crate::iterator::ArrayIter;
use crate::types::bytes::ByteArrayNativeType;
use crate::types::{BinaryViewType, ByteViewType, StringViewType};
use crate::{Array, ArrayAccessor, ArrayRef, GenericByteArray, OffsetSizeTrait, Scalar};
use arrow_buffer::{ArrowNativeType, Buffer, NullBuffer, ScalarBuffer};
use arrow_data::{ArrayData, ArrayDataBuilder, ByteView, MAX_INLINE_VIEW_LEN};
use arrow_schema::{ArrowError, DataType};
use core::str;
use num_traits::ToPrimitive;
use std::any::Any;
use std::cmp::Ordering;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::sync::Arc;
use super::ByteArrayType;
pub struct GenericByteViewArray<T: ByteViewType + ?Sized> {
data_type: DataType,
views: ScalarBuffer<u128>,
buffers: Vec<Buffer>,
phantom: PhantomData<T>,
nulls: Option<NullBuffer>,
}
impl<T: ByteViewType + ?Sized> Clone for GenericByteViewArray<T> {
fn clone(&self) -> Self {
Self {
data_type: T::DATA_TYPE,
views: self.views.clone(),
buffers: self.buffers.clone(),
nulls: self.nulls.clone(),
phantom: Default::default(),
}
}
}
impl<T: ByteViewType + ?Sized> GenericByteViewArray<T> {
pub fn new(views: ScalarBuffer<u128>, buffers: Vec<Buffer>, nulls: Option<NullBuffer>) -> Self {
Self::try_new(views, buffers, nulls).unwrap()
}
pub fn try_new(
views: ScalarBuffer<u128>,
buffers: Vec<Buffer>,
nulls: Option<NullBuffer>,
) -> Result<Self, ArrowError> {
T::validate(&views, &buffers)?;
if let Some(n) = nulls.as_ref() {
if n.len() != views.len() {
return Err(ArrowError::InvalidArgumentError(format!(
"Incorrect length of null buffer for {}ViewArray, expected {} got {}",
T::PREFIX,
views.len(),
n.len(),
)));
}
}
Ok(Self {
data_type: T::DATA_TYPE,
views,
buffers,
nulls,
phantom: Default::default(),
})
}
pub unsafe fn new_unchecked(
views: ScalarBuffer<u128>,
buffers: Vec<Buffer>,
nulls: Option<NullBuffer>,
) -> Self {
if cfg!(feature = "force_validate") {
return Self::new(views, buffers, nulls);
}
Self {
data_type: T::DATA_TYPE,
phantom: Default::default(),
views,
buffers,
nulls,
}
}
pub fn new_null(len: usize) -> Self {
Self {
data_type: T::DATA_TYPE,
views: vec![0; len].into(),
buffers: vec![],
nulls: Some(NullBuffer::new_null(len)),
phantom: Default::default(),
}
}
pub fn new_scalar(value: impl AsRef<T::Native>) -> Scalar<Self> {
Scalar::new(Self::from_iter_values(std::iter::once(value)))
}
pub fn from_iter_values<Ptr, I>(iter: I) -> Self
where
Ptr: AsRef<T::Native>,
I: IntoIterator<Item = Ptr>,
{
let iter = iter.into_iter();
let mut builder = GenericByteViewBuilder::<T>::with_capacity(iter.size_hint().0);
for v in iter {
builder.append_value(v);
}
builder.finish()
}
pub fn into_parts(self) -> (ScalarBuffer<u128>, Vec<Buffer>, Option<NullBuffer>) {
(self.views, self.buffers, self.nulls)
}
#[inline]
pub fn views(&self) -> &ScalarBuffer<u128> {
&self.views
}
#[inline]
pub fn data_buffers(&self) -> &[Buffer] {
&self.buffers
}
pub fn value(&self, i: usize) -> &T::Native {
assert!(
i < self.len(),
"Trying to access an element at index {} from a {}ViewArray of length {}",
i,
T::PREFIX,
self.len()
);
unsafe { self.value_unchecked(i) }
}
pub unsafe fn value_unchecked(&self, idx: usize) -> &T::Native {
let v = unsafe { self.views.get_unchecked(idx) };
let len = *v as u32;
let b = if len <= MAX_INLINE_VIEW_LEN {
unsafe { Self::inline_value(v, len as usize) }
} else {
let view = ByteView::from(*v);
let data = unsafe { self.buffers.get_unchecked(view.buffer_index as usize) };
let offset = view.offset as usize;
unsafe { data.get_unchecked(offset..offset + len as usize) }
};
unsafe { T::Native::from_bytes_unchecked(b) }
}
#[inline(always)]
pub unsafe fn inline_value(view: &u128, len: usize) -> &[u8] {
debug_assert!(len <= MAX_INLINE_VIEW_LEN as usize);
unsafe {
std::slice::from_raw_parts((view as *const u128 as *const u8).wrapping_add(4), len)
}
}
pub fn iter(&self) -> ArrayIter<&Self> {
ArrayIter::new(self)
}
pub fn bytes_iter(&self) -> impl Iterator<Item = &[u8]> {
self.views.iter().map(move |v| {
let len = *v as u32;
if len <= MAX_INLINE_VIEW_LEN {
unsafe { Self::inline_value(v, len as usize) }
} else {
let view = ByteView::from(*v);
let data = &self.buffers[view.buffer_index as usize];
let offset = view.offset as usize;
unsafe { data.get_unchecked(offset..offset + len as usize) }
}
})
}
pub fn prefix_bytes_iter(&self, prefix_len: usize) -> impl Iterator<Item = &[u8]> {
self.views().into_iter().map(move |v| {
let len = (*v as u32) as usize;
if len < prefix_len {
return &[] as &[u8];
}
if prefix_len <= 4 || len as u32 <= MAX_INLINE_VIEW_LEN {
unsafe { StringViewArray::inline_value(v, prefix_len) }
} else {
let view = ByteView::from(*v);
let data = unsafe {
self.data_buffers()
.get_unchecked(view.buffer_index as usize)
};
let offset = view.offset as usize;
unsafe { data.get_unchecked(offset..offset + prefix_len) }
}
})
}
pub fn suffix_bytes_iter(&self, suffix_len: usize) -> impl Iterator<Item = &[u8]> {
self.views().into_iter().map(move |v| {
let len = (*v as u32) as usize;
if len < suffix_len {
return &[] as &[u8];
}
if len as u32 <= MAX_INLINE_VIEW_LEN {
unsafe { &StringViewArray::inline_value(v, len)[len - suffix_len..] }
} else {
let view = ByteView::from(*v);
let data = unsafe {
self.data_buffers()
.get_unchecked(view.buffer_index as usize)
};
let offset = view.offset as usize;
unsafe { data.get_unchecked(offset + len - suffix_len..offset + len) }
}
})
}
pub fn slice(&self, offset: usize, length: usize) -> Self {
Self {
data_type: T::DATA_TYPE,
views: self.views.slice(offset, length),
buffers: self.buffers.clone(),
nulls: self.nulls.as_ref().map(|n| n.slice(offset, length)),
phantom: Default::default(),
}
}
pub fn gc(&self) -> Self {
let len = self.len(); let nulls = self.nulls().cloned();
if self.data_buffers().is_empty() {
return unsafe {
GenericByteViewArray::new_unchecked(
self.views().clone(),
vec![], nulls,
)
};
}
let total_large = self.total_buffer_bytes_used();
if total_large == 0 {
return unsafe {
GenericByteViewArray::new_unchecked(
self.views().clone(),
vec![], nulls,
)
};
}
let (views_buf, data_blocks) = if total_large < i32::MAX as usize {
let mut data_buf = Vec::with_capacity(total_large);
let views_buf: Vec<u128> = (0..len)
.map(|i| unsafe { self.copy_view_to_buffer(i, 0, &mut data_buf) })
.collect();
let data_block = Buffer::from_vec(data_buf);
let data_blocks = vec![data_block];
(views_buf, data_blocks)
} else {
struct GcCopyGroup {
total_buffer_bytes: usize,
total_len: usize,
}
impl GcCopyGroup {
fn new(total_buffer_bytes: u32, total_len: usize) -> Self {
Self {
total_buffer_bytes: total_buffer_bytes as usize,
total_len,
}
}
}
let mut groups = Vec::new();
let mut current_length = 0;
let mut current_elements = 0;
for view in self.views() {
let len = *view as u32;
if len > MAX_INLINE_VIEW_LEN {
if current_length + len > i32::MAX as u32 {
groups.push(GcCopyGroup::new(current_length, current_elements));
current_length = 0;
current_elements = 0;
}
current_length += len;
current_elements += 1;
}
}
if current_elements != 0 {
groups.push(GcCopyGroup::new(current_length, current_elements));
}
debug_assert!(groups.len() <= i32::MAX as usize);
let mut views_buf = Vec::with_capacity(len);
let mut data_blocks = Vec::with_capacity(groups.len());
let mut current_view_idx = 0;
for (group_idx, gc_copy_group) in groups.iter().enumerate() {
let mut data_buf = Vec::with_capacity(gc_copy_group.total_buffer_bytes);
let new_views = (current_view_idx..current_view_idx + gc_copy_group.total_len).map(
|view_idx| {
unsafe {
self.copy_view_to_buffer(view_idx, group_idx as i32, &mut data_buf)
}
},
);
views_buf.extend(new_views);
data_blocks.push(Buffer::from_vec(data_buf));
current_view_idx += gc_copy_group.total_len;
}
(views_buf, data_blocks)
};
let views_scalar = ScalarBuffer::from(views_buf);
unsafe { GenericByteViewArray::new_unchecked(views_scalar, data_blocks, nulls) }
}
#[inline(always)]
unsafe fn copy_view_to_buffer(
&self,
i: usize,
buffer_idx: i32,
data_buf: &mut Vec<u8>,
) -> u128 {
let raw_view = unsafe { *self.views().get_unchecked(i) };
let mut bv = ByteView::from(raw_view);
if bv.length <= MAX_INLINE_VIEW_LEN {
raw_view
} else {
let buffer = unsafe { self.buffers.get_unchecked(bv.buffer_index as usize) };
let start = bv.offset as usize;
let end = start + bv.length as usize;
let slice = unsafe { buffer.get_unchecked(start..end) };
let new_offset = data_buf.len() as u32;
data_buf.extend_from_slice(slice);
bv.buffer_index = buffer_idx as u32;
bv.offset = new_offset;
bv.into()
}
}
pub fn total_buffer_bytes_used(&self) -> usize {
self.views()
.iter()
.map(|v| {
let len = *v as u32;
if len > MAX_INLINE_VIEW_LEN {
len as usize
} else {
0
}
})
.sum()
}
pub unsafe fn compare_unchecked(
left: &GenericByteViewArray<T>,
left_idx: usize,
right: &GenericByteViewArray<T>,
right_idx: usize,
) -> Ordering {
let l_view = unsafe { left.views().get_unchecked(left_idx) };
let l_byte_view = ByteView::from(*l_view);
let r_view = unsafe { right.views().get_unchecked(right_idx) };
let r_byte_view = ByteView::from(*r_view);
let l_len = l_byte_view.length;
let r_len = r_byte_view.length;
if l_len <= 12 && r_len <= 12 {
return Self::inline_key_fast(*l_view).cmp(&Self::inline_key_fast(*r_view));
}
let l_inlined_be = l_byte_view.prefix.swap_bytes();
let r_inlined_be = r_byte_view.prefix.swap_bytes();
if l_inlined_be != r_inlined_be {
return l_inlined_be.cmp(&r_inlined_be);
}
let l_full_data: &[u8] = unsafe { left.value_unchecked(left_idx).as_ref() };
let r_full_data: &[u8] = unsafe { right.value_unchecked(right_idx).as_ref() };
l_full_data.cmp(r_full_data)
}
#[inline(always)]
pub fn inline_key_fast(raw: u128) -> u128 {
let raw_bytes = raw.to_le_bytes();
let length = raw as u32;
let mut buf = [0u8; 16];
buf[0..12].copy_from_slice(&raw_bytes[4..16]);
buf[12..16].copy_from_slice(&length.to_be_bytes());
u128::from_be_bytes(buf)
}
}
impl<T: ByteViewType + ?Sized> Debug for GenericByteViewArray<T> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}ViewArray\n[\n", T::PREFIX)?;
print_long_array(self, f, |array, index, f| {
std::fmt::Debug::fmt(&array.value(index), f)
})?;
write!(f, "]")
}
}
impl<T: ByteViewType + ?Sized> Array for GenericByteViewArray<T> {
fn as_any(&self) -> &dyn Any {
self
}
fn to_data(&self) -> ArrayData {
self.clone().into()
}
fn into_data(self) -> ArrayData {
self.into()
}
fn data_type(&self) -> &DataType {
&self.data_type
}
fn slice(&self, offset: usize, length: usize) -> ArrayRef {
Arc::new(self.slice(offset, length))
}
fn len(&self) -> usize {
self.views.len()
}
fn is_empty(&self) -> bool {
self.views.is_empty()
}
fn shrink_to_fit(&mut self) {
self.views.shrink_to_fit();
self.buffers.iter_mut().for_each(|b| b.shrink_to_fit());
self.buffers.shrink_to_fit();
if let Some(nulls) = &mut self.nulls {
nulls.shrink_to_fit();
}
}
fn offset(&self) -> usize {
0
}
fn nulls(&self) -> Option<&NullBuffer> {
self.nulls.as_ref()
}
fn logical_null_count(&self) -> usize {
self.null_count()
}
fn get_buffer_memory_size(&self) -> usize {
let mut sum = self.buffers.iter().map(|b| b.capacity()).sum::<usize>();
sum += self.views.inner().capacity();
if let Some(x) = &self.nulls {
sum += x.buffer().capacity()
}
sum
}
fn get_array_memory_size(&self) -> usize {
std::mem::size_of::<Self>() + self.get_buffer_memory_size()
}
}
impl<'a, T: ByteViewType + ?Sized> ArrayAccessor for &'a GenericByteViewArray<T> {
type Item = &'a T::Native;
fn value(&self, index: usize) -> Self::Item {
GenericByteViewArray::value(self, index)
}
unsafe fn value_unchecked(&self, index: usize) -> Self::Item {
unsafe { GenericByteViewArray::value_unchecked(self, index) }
}
}
impl<'a, T: ByteViewType + ?Sized> IntoIterator for &'a GenericByteViewArray<T> {
type Item = Option<&'a T::Native>;
type IntoIter = ArrayIter<Self>;
fn into_iter(self) -> Self::IntoIter {
ArrayIter::new(self)
}
}
impl<T: ByteViewType + ?Sized> From<ArrayData> for GenericByteViewArray<T> {
fn from(value: ArrayData) -> Self {
let views = value.buffers()[0].clone();
let views = ScalarBuffer::new(views, value.offset(), value.len());
let buffers = value.buffers()[1..].to_vec();
Self {
data_type: T::DATA_TYPE,
views,
buffers,
nulls: value.nulls().cloned(),
phantom: Default::default(),
}
}
}
impl<FROM, V> From<&GenericByteArray<FROM>> for GenericByteViewArray<V>
where
FROM: ByteArrayType,
FROM::Offset: OffsetSizeTrait + ToPrimitive,
V: ByteViewType<Native = FROM::Native>,
{
fn from(byte_array: &GenericByteArray<FROM>) -> Self {
let offsets = byte_array.offsets();
let can_reuse_buffer = match offsets.last() {
Some(offset) => offset.as_usize() < u32::MAX as usize,
None => true,
};
if can_reuse_buffer {
let len = byte_array.len();
let mut views_builder = GenericByteViewBuilder::<V>::with_capacity(len);
let str_values_buf = byte_array.values().clone();
let block = views_builder.append_block(str_values_buf);
for (i, w) in offsets.windows(2).enumerate() {
let offset = w[0].as_usize();
let end = w[1].as_usize();
let length = end - offset;
if byte_array.is_null(i) {
views_builder.append_null();
} else {
unsafe {
views_builder.append_view_unchecked(block, offset as u32, length as u32)
}
}
}
assert_eq!(views_builder.len(), len);
views_builder.finish()
} else {
GenericByteViewArray::<V>::from_iter(byte_array.iter())
}
}
}
impl<T: ByteViewType + ?Sized> From<GenericByteViewArray<T>> for ArrayData {
fn from(mut array: GenericByteViewArray<T>) -> Self {
let len = array.len();
array.buffers.insert(0, array.views.into_inner());
let builder = ArrayDataBuilder::new(T::DATA_TYPE)
.len(len)
.buffers(array.buffers)
.nulls(array.nulls);
unsafe { builder.build_unchecked() }
}
}
impl<'a, Ptr, T> FromIterator<&'a Option<Ptr>> for GenericByteViewArray<T>
where
Ptr: AsRef<T::Native> + 'a,
T: ByteViewType + ?Sized,
{
fn from_iter<I: IntoIterator<Item = &'a Option<Ptr>>>(iter: I) -> Self {
iter.into_iter()
.map(|o| o.as_ref().map(|p| p.as_ref()))
.collect()
}
}
impl<Ptr, T: ByteViewType + ?Sized> FromIterator<Option<Ptr>> for GenericByteViewArray<T>
where
Ptr: AsRef<T::Native>,
{
fn from_iter<I: IntoIterator<Item = Option<Ptr>>>(iter: I) -> Self {
let iter = iter.into_iter();
let mut builder = GenericByteViewBuilder::<T>::with_capacity(iter.size_hint().0);
builder.extend(iter);
builder.finish()
}
}
pub type BinaryViewArray = GenericByteViewArray<BinaryViewType>;
impl BinaryViewArray {
pub fn to_string_view(self) -> Result<StringViewArray, ArrowError> {
StringViewType::validate(self.views(), self.data_buffers())?;
unsafe { Ok(self.to_string_view_unchecked()) }
}
pub unsafe fn to_string_view_unchecked(self) -> StringViewArray {
unsafe { StringViewArray::new_unchecked(self.views, self.buffers, self.nulls) }
}
}
impl From<Vec<&[u8]>> for BinaryViewArray {
fn from(v: Vec<&[u8]>) -> Self {
Self::from_iter_values(v)
}
}
impl From<Vec<Option<&[u8]>>> for BinaryViewArray {
fn from(v: Vec<Option<&[u8]>>) -> Self {
v.into_iter().collect()
}
}
pub type StringViewArray = GenericByteViewArray<StringViewType>;
impl StringViewArray {
pub fn to_binary_view(self) -> BinaryViewArray {
unsafe { BinaryViewArray::new_unchecked(self.views, self.buffers, self.nulls) }
}
pub fn is_ascii(&self) -> bool {
self.iter().all(|v| match v {
Some(v) => v.is_ascii(),
None => true,
})
}
}
impl From<Vec<&str>> for StringViewArray {
fn from(v: Vec<&str>) -> Self {
Self::from_iter_values(v)
}
}
impl From<Vec<Option<&str>>> for StringViewArray {
fn from(v: Vec<Option<&str>>) -> Self {
v.into_iter().collect()
}
}
impl From<Vec<String>> for StringViewArray {
fn from(v: Vec<String>) -> Self {
Self::from_iter_values(v)
}
}
impl From<Vec<Option<String>>> for StringViewArray {
fn from(v: Vec<Option<String>>) -> Self {
v.into_iter().collect()
}
}
#[cfg(test)]
mod tests {
use crate::builder::{BinaryViewBuilder, StringViewBuilder};
use crate::types::BinaryViewType;
use crate::{
Array, BinaryViewArray, GenericBinaryArray, GenericByteViewArray, StringViewArray,
};
use arrow_buffer::{Buffer, ScalarBuffer};
use arrow_data::{ByteView, MAX_INLINE_VIEW_LEN};
use rand::prelude::StdRng;
use rand::{Rng, SeedableRng};
const BLOCK_SIZE: u32 = 8;
#[test]
fn try_new_string() {
let array = StringViewArray::from_iter_values(vec![
"hello",
"world",
"lulu",
"large payload over 12 bytes",
]);
assert_eq!(array.value(0), "hello");
assert_eq!(array.value(3), "large payload over 12 bytes");
}
#[test]
fn try_new_binary() {
let array = BinaryViewArray::from_iter_values(vec![
b"hello".as_slice(),
b"world".as_slice(),
b"lulu".as_slice(),
b"large payload over 12 bytes".as_slice(),
]);
assert_eq!(array.value(0), b"hello");
assert_eq!(array.value(3), b"large payload over 12 bytes");
}
#[test]
fn try_new_empty_string() {
let array = {
let mut builder = StringViewBuilder::new();
builder.finish()
};
assert!(array.is_empty());
}
#[test]
fn try_new_empty_binary() {
let array = {
let mut builder = BinaryViewBuilder::new();
builder.finish()
};
assert!(array.is_empty());
}
#[test]
fn test_append_string() {
let array = {
let mut builder = StringViewBuilder::new();
builder.append_value("hello");
builder.append_null();
builder.append_option(Some("large payload over 12 bytes"));
builder.finish()
};
assert_eq!(array.value(0), "hello");
assert!(array.is_null(1));
assert_eq!(array.value(2), "large payload over 12 bytes");
}
#[test]
fn test_append_binary() {
let array = {
let mut builder = BinaryViewBuilder::new();
builder.append_value(b"hello");
builder.append_null();
builder.append_option(Some(b"large payload over 12 bytes"));
builder.finish()
};
assert_eq!(array.value(0), b"hello");
assert!(array.is_null(1));
assert_eq!(array.value(2), b"large payload over 12 bytes");
}
#[test]
fn test_in_progress_recreation() {
let array = {
let mut builder = StringViewBuilder::new().with_fixed_block_size(14);
builder.append_value("large payload over 12 bytes");
builder.append_option(Some("another large payload over 12 bytes that double than the first one, so that we can trigger the in_progress in builder re-created"));
builder.finish()
};
assert_eq!(array.value(0), "large payload over 12 bytes");
assert_eq!(
array.value(1),
"another large payload over 12 bytes that double than the first one, so that we can trigger the in_progress in builder re-created"
);
assert_eq!(2, array.buffers.len());
}
#[test]
#[should_panic(expected = "Invalid buffer index at 0: got index 3 but only has 1 buffers")]
fn new_with_invalid_view_data() {
let v = "large payload over 12 bytes";
let view = ByteView::new(13, &v.as_bytes()[0..4])
.with_buffer_index(3)
.with_offset(1);
let views = ScalarBuffer::from(vec![view.into()]);
let buffers = vec![Buffer::from_slice_ref(v)];
StringViewArray::new(views, buffers, None);
}
#[test]
#[should_panic(
expected = "Encountered non-UTF-8 data at index 0: invalid utf-8 sequence of 1 bytes from index 0"
)]
fn new_with_invalid_utf8_data() {
let v: Vec<u8> = vec![
0xf0, 0x80, 0x80, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
];
let view = ByteView::new(v.len() as u32, &v[0..4]);
let views = ScalarBuffer::from(vec![view.into()]);
let buffers = vec![Buffer::from_slice_ref(v)];
StringViewArray::new(views, buffers, None);
}
#[test]
#[should_panic(expected = "View at index 0 contained non-zero padding for string of length 1")]
fn new_with_invalid_zero_padding() {
let mut data = [0; 12];
data[0] = b'H';
data[11] = 1;
let mut view_buffer = [0; 16];
view_buffer[0..4].copy_from_slice(&1u32.to_le_bytes());
view_buffer[4..].copy_from_slice(&data);
let view = ByteView::from(u128::from_le_bytes(view_buffer));
let views = ScalarBuffer::from(vec![view.into()]);
let buffers = vec![];
StringViewArray::new(views, buffers, None);
}
#[test]
#[should_panic(expected = "Mismatch between embedded prefix and data")]
fn test_mismatch_between_embedded_prefix_and_data() {
let input_str_1 = "Hello, Rustaceans!";
let input_str_2 = "Hallo, Rustaceans!";
let length = input_str_1.len() as u32;
assert!(input_str_1.len() > 12);
let mut view_buffer = [0; 16];
view_buffer[0..4].copy_from_slice(&length.to_le_bytes());
view_buffer[4..8].copy_from_slice(&input_str_1.as_bytes()[0..4]);
view_buffer[8..12].copy_from_slice(&0u32.to_le_bytes());
view_buffer[12..].copy_from_slice(&0u32.to_le_bytes());
let view = ByteView::from(u128::from_le_bytes(view_buffer));
let views = ScalarBuffer::from(vec![view.into()]);
let buffers = vec![Buffer::from_slice_ref(input_str_2.as_bytes())];
StringViewArray::new(views, buffers, None);
}
#[test]
fn test_gc() {
let test_data = [
Some("longer than 12 bytes"),
Some("short"),
Some("t"),
Some("longer than 12 bytes"),
None,
Some("short"),
];
let array = {
let mut builder = StringViewBuilder::new().with_fixed_block_size(8); test_data.into_iter().for_each(|v| builder.append_option(v));
builder.finish()
};
assert!(array.buffers.len() > 1);
fn check_gc(to_test: &StringViewArray) {
let gc = to_test.gc();
assert_ne!(to_test.data_buffers().len(), gc.data_buffers().len());
to_test.iter().zip(gc.iter()).for_each(|(a, b)| {
assert_eq!(a, b);
});
assert_eq!(to_test.len(), gc.len());
}
check_gc(&array);
check_gc(&array.slice(1, 3));
check_gc(&array.slice(2, 1));
check_gc(&array.slice(2, 2));
check_gc(&array.slice(3, 1));
}
#[test]
fn test_gc_empty_array() {
let array = StringViewBuilder::new()
.with_fixed_block_size(BLOCK_SIZE)
.finish();
let gced = array.gc();
assert_eq!(gced.len(), 0);
assert_eq!(gced.null_count(), 0);
assert!(
gced.data_buffers().is_empty(),
"Expected no data buffers for empty array"
);
}
#[test]
fn test_gc_all_inline() {
let mut builder = StringViewBuilder::new().with_fixed_block_size(BLOCK_SIZE);
for _ in 0..100 {
let s = "A".repeat(MAX_INLINE_VIEW_LEN as usize);
builder.append_option(Some(&s));
}
let array = builder.finish();
let gced = array.gc();
assert_eq!(
gced.data_buffers().len(),
0,
"Should have no data buffers for inline values"
);
assert_eq!(gced.len(), 100);
array.iter().zip(gced.iter()).for_each(|(orig, got)| {
assert_eq!(orig, got, "Inline value mismatch after gc");
});
}
#[test]
fn test_gc_all_large() {
let mut builder = StringViewBuilder::new().with_fixed_block_size(BLOCK_SIZE);
let large_str = "X".repeat(MAX_INLINE_VIEW_LEN as usize + 5);
for _ in 0..50 {
builder.append_option(Some(&large_str));
}
let array = builder.finish();
let gced = array.gc();
assert!(
!gced.data_buffers().is_empty(),
"Expected data buffers for large values"
);
assert_eq!(gced.len(), 50);
array.iter().zip(gced.iter()).for_each(|(orig, got)| {
assert_eq!(orig, got, "Large view mismatch after gc");
});
}
#[test]
fn test_gc_all_nulls() {
let mut builder = StringViewBuilder::new().with_fixed_block_size(BLOCK_SIZE);
for _ in 0..20 {
builder.append_null();
}
let array = builder.finish();
let gced = array.gc();
assert_eq!(gced.len(), 20);
assert_eq!(gced.null_count(), 20);
assert!(
gced.data_buffers().is_empty(),
"No data should be stored for nulls"
);
}
#[test]
fn test_gc_random_mixed_and_slices() {
let mut rng = StdRng::seed_from_u64(42);
let mut builder = StringViewBuilder::new().with_fixed_block_size(BLOCK_SIZE);
let mut original: Vec<Option<String>> = Vec::new();
for _ in 0..200 {
if rng.random_bool(0.1) {
builder.append_null();
original.push(None);
} else {
let len = rng.random_range(0..(MAX_INLINE_VIEW_LEN * 2));
let s: String = "A".repeat(len as usize);
builder.append_option(Some(&s));
original.push(Some(s));
}
}
let array = builder.finish();
for (offset, slice_len) in &[(0, 50), (10, 100), (150, 30)] {
let sliced = array.slice(*offset, *slice_len);
let gced = sliced.gc();
let expected: Vec<Option<&str>> = original[*offset..(*offset + *slice_len)]
.iter()
.map(|opt| opt.as_deref())
.collect();
assert_eq!(gced.len(), *slice_len, "Slice length mismatch");
gced.iter().zip(expected.iter()).for_each(|(got, expect)| {
assert_eq!(got, *expect, "Value mismatch in mixed slice after gc");
});
}
}
#[test]
#[cfg_attr(miri, ignore)] fn test_gc_huge_array() {
let block_len: usize = 128 * 1024 * 1024; let num_views: usize = 36;
let buffer = Buffer::from_vec(vec![0xAB; block_len]);
let buffer2 = Buffer::from_vec(vec![0xFF; block_len]);
let mut builder = BinaryViewBuilder::new();
let block_id = builder.append_block(buffer);
for _ in 0..num_views / 2 {
builder
.try_append_view(block_id, 0, block_len as u32)
.expect("append view into 128MiB block");
}
let block_id2 = builder.append_block(buffer2);
for _ in 0..num_views / 2 {
builder
.try_append_view(block_id2, 0, block_len as u32)
.expect("append view into 128MiB block");
}
let array = builder.finish();
let total = array.total_buffer_bytes_used();
assert!(
total > u32::MAX as usize,
"Expected total non-inline bytes to exceed 4 GiB, got {}",
total
);
let gced = array.gc();
assert_eq!(gced.len(), num_views, "Length mismatch after gc");
assert_eq!(gced.null_count(), 0, "Null count mismatch after gc");
assert_ne!(
gced.data_buffers().len(),
1,
"gc with huge buffer should not consolidate data into a single buffer"
);
array.iter().zip(gced.iter()).for_each(|(orig, got)| {
assert_eq!(orig, got, "Value mismatch after gc on huge array");
});
}
#[test]
fn test_eq() {
let test_data = [
Some("longer than 12 bytes"),
None,
Some("short"),
Some("again, this is longer than 12 bytes"),
];
let array1 = {
let mut builder = StringViewBuilder::new().with_fixed_block_size(8);
test_data.into_iter().for_each(|v| builder.append_option(v));
builder.finish()
};
let array2 = {
let mut builder = StringViewBuilder::new().with_fixed_block_size(100);
test_data.into_iter().for_each(|v| builder.append_option(v));
builder.finish()
};
assert_eq!(array1, array1.clone());
assert_eq!(array2, array2.clone());
assert_eq!(array1, array2);
}
#[test]
fn test_inline_key_fast_various_lengths_and_lexical() {
fn make_raw_inline(length: u32, data: &[u8]) -> u128 {
assert!(length as usize <= 12, "Inline length must be ≤ 12");
assert!(
data.len() == length as usize,
"Data length must match `length`"
);
let mut raw_bytes = [0u8; 16];
raw_bytes[0..4].copy_from_slice(&length.to_le_bytes()); raw_bytes[4..(4 + data.len())].copy_from_slice(data); u128::from_le_bytes(raw_bytes)
}
let test_inputs: Vec<&[u8]> = vec![
b"a",
b"aa",
b"aaa",
b"aab",
b"abcd",
b"abcde",
b"abcdef",
b"abcdefg",
b"abcdefgh",
b"abcdefghi",
b"abcdefghij",
b"abcdefghijk",
b"abcdefghijkl",
b"backend one",
b"backend two",
b"bar",
b"bar\0",
b"than12Byt",
b"than12Bytes",
b"than12Bytes\0",
b"than12Bytesx",
b"than12Bytex",
b"than12Bytez",
b"xyy",
b"xyz",
b"xza",
];
let array: GenericBinaryArray<i32> =
GenericBinaryArray::from(test_inputs.iter().map(|s| Some(*s)).collect::<Vec<_>>());
for i in 0..array.len() - 1 {
let v1 = array.value(i);
let v2 = array.value(i + 1);
assert!(v1 < v2, "Array compare failed: {v1:?} !< {v2:?}");
let key1 = GenericByteViewArray::<BinaryViewType>::inline_key_fast(make_raw_inline(
v1.len() as u32,
v1,
));
let key2 = GenericByteViewArray::<BinaryViewType>::inline_key_fast(make_raw_inline(
v2.len() as u32,
v2,
));
assert!(
key1 < key2,
"Key compare failed: key({v1:?})=0x{key1:032x} !< key({v2:?})=0x{key2:032x}",
);
}
}
}