use super::Buffer;
use crate::alloc::Deallocation;
use crate::{
alloc,
bytes::Bytes,
native::{ArrowNativeType, ToByteSlice},
util::bit_util,
};
use std::mem;
use std::ptr::NonNull;
#[derive(Debug)]
pub struct MutableBuffer {
data: NonNull<u8>,
len: usize,
capacity: usize,
}
impl MutableBuffer {
#[inline]
pub fn new(capacity: usize) -> Self {
Self::with_capacity(capacity)
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
let capacity = bit_util::round_upto_multiple_of_64(capacity);
let ptr = alloc::allocate_aligned(capacity);
Self {
data: ptr,
len: 0,
capacity,
}
}
pub fn from_len_zeroed(len: usize) -> Self {
let new_capacity = bit_util::round_upto_multiple_of_64(len);
let ptr = alloc::allocate_aligned_zeroed(new_capacity);
Self {
data: ptr,
len,
capacity: new_capacity,
}
}
pub(crate) fn from_bytes(bytes: Bytes) -> Result<Self, Bytes> {
if !matches!(bytes.deallocation(), Deallocation::Arrow(_)) {
return Err(bytes);
}
let len = bytes.len();
let capacity = bytes.capacity();
let ptr = bytes.ptr();
mem::forget(bytes);
Ok(Self {
data: ptr,
len,
capacity,
})
}
pub fn new_null(len: usize) -> Self {
let num_bytes = bit_util::ceil(len, 8);
MutableBuffer::from_len_zeroed(num_bytes)
}
pub fn with_bitset(mut self, end: usize, val: bool) -> Self {
assert!(end <= self.capacity);
let v = if val { 255 } else { 0 };
unsafe {
std::ptr::write_bytes(self.data.as_ptr(), v, end);
self.len = end;
}
self
}
pub fn set_null_bits(&mut self, start: usize, count: usize) {
assert!(start + count <= self.capacity);
unsafe {
std::ptr::write_bytes(self.data.as_ptr().add(start), 0, count);
}
}
#[inline(always)]
pub fn reserve(&mut self, additional: usize) {
let required_cap = self.len + additional;
if required_cap > self.capacity {
let (ptr, new_capacity) =
unsafe { reallocate(self.data, self.capacity, required_cap) };
self.data = ptr;
self.capacity = new_capacity;
}
}
#[inline(always)]
pub fn truncate(&mut self, len: usize) {
if len > self.len {
return;
}
self.len = len;
}
#[inline(always)]
pub fn resize(&mut self, new_len: usize, value: u8) {
if new_len > self.len {
let diff = new_len - self.len;
self.reserve(diff);
unsafe { self.data.as_ptr().add(self.len).write_bytes(value, diff) };
}
self.len = new_len;
}
pub fn shrink_to_fit(&mut self) {
let new_capacity = bit_util::round_upto_multiple_of_64(self.len);
if new_capacity < self.capacity {
let ptr =
unsafe { alloc::reallocate(self.data, self.capacity, new_capacity) };
self.data = ptr;
self.capacity = new_capacity;
}
}
#[inline]
pub const fn is_empty(&self) -> bool {
self.len == 0
}
#[inline]
pub const fn len(&self) -> usize {
self.len
}
#[inline]
pub const fn capacity(&self) -> usize {
self.capacity
}
pub fn clear(&mut self) {
self.len = 0
}
pub fn as_slice(&self) -> &[u8] {
self
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
self
}
#[inline]
pub const fn as_ptr(&self) -> *const u8 {
self.data.as_ptr()
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut u8 {
self.data.as_ptr()
}
#[deprecated(
since = "2.0.0",
note = "This method is deprecated in favour of `into` from the trait `Into`."
)]
pub fn freeze(self) -> Buffer {
self.into_buffer()
}
#[inline]
pub(super) fn into_buffer(self) -> Buffer {
let bytes = unsafe {
Bytes::new(self.data, self.len, Deallocation::Arrow(self.capacity))
};
std::mem::forget(self);
Buffer::from_bytes(bytes)
}
pub fn typed_data_mut<T: ArrowNativeType>(&mut self) -> &mut [T] {
let (prefix, offsets, suffix) =
unsafe { self.as_slice_mut().align_to_mut::<T>() };
assert!(prefix.is_empty() && suffix.is_empty());
offsets
}
pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
assert!(prefix.is_empty() && suffix.is_empty());
offsets
}
#[inline]
pub fn extend_from_slice<T: ArrowNativeType>(&mut self, items: &[T]) {
let len = items.len();
let additional = len * std::mem::size_of::<T>();
self.reserve(additional);
unsafe {
let src = items.as_ptr() as *const u8;
let dst = self.data.as_ptr().add(self.len);
std::ptr::copy_nonoverlapping(src, dst, additional)
}
self.len += additional;
}
#[inline]
pub fn push<T: ToByteSlice>(&mut self, item: T) {
let additional = std::mem::size_of::<T>();
self.reserve(additional);
unsafe {
let src = item.to_byte_slice().as_ptr();
let dst = self.data.as_ptr().add(self.len);
std::ptr::copy_nonoverlapping(src, dst, additional);
}
self.len += additional;
}
#[inline]
pub unsafe fn push_unchecked<T: ToByteSlice>(&mut self, item: T) {
let additional = std::mem::size_of::<T>();
let src = item.to_byte_slice().as_ptr();
let dst = self.data.as_ptr().add(self.len);
std::ptr::copy_nonoverlapping(src, dst, additional);
self.len += additional;
}
#[inline]
pub fn extend_zeros(&mut self, additional: usize) {
self.resize(self.len + additional, 0);
}
#[inline]
pub unsafe fn set_len(&mut self, len: usize) {
assert!(len <= self.capacity());
self.len = len;
}
#[inline]
pub fn collect_bool<F: FnMut(usize) -> bool>(len: usize, mut f: F) -> Self {
let mut buffer = Self::new(bit_util::ceil(len, 64) * 8);
let chunks = len / 64;
let remainder = len % 64;
for chunk in 0..chunks {
let mut packed = 0;
for bit_idx in 0..64 {
let i = bit_idx + chunk * 64;
packed |= (f(i) as u64) << bit_idx;
}
unsafe { buffer.push_unchecked(packed) }
}
if remainder != 0 {
let mut packed = 0;
for bit_idx in 0..remainder {
let i = bit_idx + chunks * 64;
packed |= (f(i) as u64) << bit_idx;
}
unsafe { buffer.push_unchecked(packed) }
}
buffer.truncate(bit_util::ceil(len, 8));
buffer
}
}
#[cold]
unsafe fn reallocate(
ptr: NonNull<u8>,
old_capacity: usize,
new_capacity: usize,
) -> (NonNull<u8>, usize) {
let new_capacity = bit_util::round_upto_multiple_of_64(new_capacity);
let new_capacity = std::cmp::max(new_capacity, old_capacity * 2);
let ptr = alloc::reallocate(ptr, old_capacity, new_capacity);
(ptr, new_capacity)
}
impl<A: ArrowNativeType> Extend<A> for MutableBuffer {
#[inline]
fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
let iterator = iter.into_iter();
self.extend_from_iter(iterator)
}
}
impl MutableBuffer {
#[inline]
pub(super) fn extend_from_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
&mut self,
mut iterator: I,
) {
let item_size = std::mem::size_of::<T>();
let (lower, _) = iterator.size_hint();
let additional = lower * item_size;
self.reserve(additional);
let mut len = SetLenOnDrop::new(&mut self.len);
let mut dst = unsafe { self.data.as_ptr().add(len.local_len) };
let capacity = self.capacity;
while len.local_len + item_size <= capacity {
if let Some(item) = iterator.next() {
unsafe {
let src = item.to_byte_slice().as_ptr();
std::ptr::copy_nonoverlapping(src, dst, item_size);
dst = dst.add(item_size);
}
len.local_len += item_size;
} else {
break;
}
}
drop(len);
iterator.for_each(|item| self.push(item));
}
#[inline]
pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
iterator: I,
) -> Self {
let item_size = std::mem::size_of::<T>();
let (_, upper) = iterator.size_hint();
let upper = upper.expect("from_trusted_len_iter requires an upper limit");
let len = upper * item_size;
let mut buffer = MutableBuffer::new(len);
let mut dst = buffer.data.as_ptr();
for item in iterator {
let src = item.to_byte_slice().as_ptr();
std::ptr::copy_nonoverlapping(src, dst, item_size);
dst = dst.add(item_size);
}
assert_eq!(
dst.offset_from(buffer.data.as_ptr()) as usize,
len,
"Trusted iterator length was not accurately reported"
);
buffer.len = len;
buffer
}
#[inline]
pub unsafe fn from_trusted_len_iter_bool<I: Iterator<Item = bool>>(
mut iterator: I,
) -> Self {
let (_, upper) = iterator.size_hint();
let len = upper.expect("from_trusted_len_iter requires an upper limit");
Self::collect_bool(len, |_| iterator.next().unwrap())
}
#[inline]
pub unsafe fn try_from_trusted_len_iter<
E,
T: ArrowNativeType,
I: Iterator<Item = std::result::Result<T, E>>,
>(
iterator: I,
) -> std::result::Result<Self, E> {
let item_size = std::mem::size_of::<T>();
let (_, upper) = iterator.size_hint();
let upper = upper.expect("try_from_trusted_len_iter requires an upper limit");
let len = upper * item_size;
let mut buffer = MutableBuffer::new(len);
let mut dst = buffer.data.as_ptr();
for item in iterator {
let item = item?;
let src = item.to_byte_slice().as_ptr();
std::ptr::copy_nonoverlapping(src, dst, item_size);
dst = dst.add(item_size);
}
unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) {
assert_eq!(
dst.offset_from(buffer.data.as_ptr()) as usize,
len,
"Trusted iterator length was not accurately reported"
);
buffer.len = len;
}
finalize_buffer(dst, &mut buffer, len);
Ok(buffer)
}
}
impl std::ops::Deref for MutableBuffer {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len) }
}
}
impl std::ops::DerefMut for MutableBuffer {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
}
}
impl Drop for MutableBuffer {
fn drop(&mut self) {
unsafe { alloc::free_aligned(self.data, self.capacity) };
}
}
impl PartialEq for MutableBuffer {
fn eq(&self, other: &MutableBuffer) -> bool {
if self.len != other.len {
return false;
}
if self.capacity != other.capacity {
return false;
}
self.as_slice() == other.as_slice()
}
}
unsafe impl Sync for MutableBuffer {}
unsafe impl Send for MutableBuffer {}
struct SetLenOnDrop<'a> {
len: &'a mut usize,
local_len: usize,
}
impl<'a> SetLenOnDrop<'a> {
#[inline]
fn new(len: &'a mut usize) -> Self {
SetLenOnDrop {
local_len: *len,
len,
}
}
}
impl Drop for SetLenOnDrop<'_> {
#[inline]
fn drop(&mut self) {
*self.len = self.local_len;
}
}
impl std::iter::FromIterator<bool> for MutableBuffer {
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = bool>,
{
let mut iterator = iter.into_iter();
let mut result = {
let byte_capacity: usize = iterator.size_hint().0.saturating_add(7) / 8;
MutableBuffer::new(byte_capacity)
};
loop {
let mut exhausted = false;
let mut byte_accum: u8 = 0;
let mut mask: u8 = 1;
while mask != 0 {
if let Some(value) = iterator.next() {
byte_accum |= match value {
true => mask,
false => 0,
};
mask <<= 1;
} else {
exhausted = true;
break;
}
}
if exhausted && mask == 1 {
break;
}
if result.len() == result.capacity() {
let additional_byte_capacity = 1usize.saturating_add(
iterator.size_hint().0.saturating_add(7) / 8, );
result.reserve(additional_byte_capacity)
}
unsafe { result.push_unchecked(byte_accum) };
if exhausted {
break;
}
}
result
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_mutable_new() {
let buf = MutableBuffer::new(63);
assert_eq!(64, buf.capacity());
assert_eq!(0, buf.len());
assert!(buf.is_empty());
}
#[test]
fn test_mutable_extend_from_slice() {
let mut buf = MutableBuffer::new(100);
buf.extend_from_slice(b"hello");
assert_eq!(5, buf.len());
assert_eq!(b"hello", buf.as_slice());
buf.extend_from_slice(b" world");
assert_eq!(11, buf.len());
assert_eq!(b"hello world", buf.as_slice());
buf.clear();
assert_eq!(0, buf.len());
buf.extend_from_slice(b"hello arrow");
assert_eq!(11, buf.len());
assert_eq!(b"hello arrow", buf.as_slice());
}
#[test]
fn mutable_extend_from_iter() {
let mut buf = MutableBuffer::new(0);
buf.extend(vec![1u32, 2]);
assert_eq!(8, buf.len());
assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
buf.extend(vec![3u32, 4]);
assert_eq!(16, buf.len());
assert_eq!(
&[1u8, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0],
buf.as_slice()
);
}
#[test]
fn mutable_extend_from_iter_unaligned_u64() {
let mut buf = MutableBuffer::new(16);
buf.push(1_u8);
buf.extend([1_u64]);
assert_eq!(9, buf.len());
assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
}
#[test]
fn mutable_extend_from_slice_unaligned_u64() {
let mut buf = MutableBuffer::new(16);
buf.extend_from_slice(&[1_u8]);
buf.extend_from_slice(&[1_u64]);
assert_eq!(9, buf.len());
assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
}
#[test]
fn mutable_push_unaligned_u64() {
let mut buf = MutableBuffer::new(16);
buf.push(1_u8);
buf.push(1_u64);
assert_eq!(9, buf.len());
assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
}
#[test]
fn mutable_push_unchecked_unaligned_u64() {
let mut buf = MutableBuffer::new(16);
unsafe {
buf.push_unchecked(1_u8);
buf.push_unchecked(1_u64);
}
assert_eq!(9, buf.len());
assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
}
#[test]
fn test_from_trusted_len_iter() {
let iter = vec![1u32, 2].into_iter();
let buf = unsafe { Buffer::from_trusted_len_iter(iter) };
assert_eq!(8, buf.len());
assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
}
#[test]
fn test_mutable_reserve() {
let mut buf = MutableBuffer::new(1);
assert_eq!(64, buf.capacity());
buf.reserve(10);
assert_eq!(64, buf.capacity());
buf.reserve(80);
assert_eq!(128, buf.capacity());
buf.reserve(129);
assert_eq!(256, buf.capacity());
}
#[test]
fn test_mutable_resize() {
let mut buf = MutableBuffer::new(1);
assert_eq!(64, buf.capacity());
assert_eq!(0, buf.len());
buf.resize(20, 0);
assert_eq!(64, buf.capacity());
assert_eq!(20, buf.len());
buf.resize(10, 0);
assert_eq!(64, buf.capacity());
assert_eq!(10, buf.len());
buf.resize(100, 0);
assert_eq!(128, buf.capacity());
assert_eq!(100, buf.len());
buf.resize(30, 0);
assert_eq!(128, buf.capacity());
assert_eq!(30, buf.len());
buf.resize(0, 0);
assert_eq!(128, buf.capacity());
assert_eq!(0, buf.len());
}
#[test]
fn test_mutable_into() {
let mut buf = MutableBuffer::new(1);
buf.extend_from_slice(b"aaaa bbbb cccc dddd");
assert_eq!(19, buf.len());
assert_eq!(64, buf.capacity());
assert_eq!(b"aaaa bbbb cccc dddd", buf.as_slice());
let immutable_buf: Buffer = buf.into();
assert_eq!(19, immutable_buf.len());
assert_eq!(64, immutable_buf.capacity());
assert_eq!(b"aaaa bbbb cccc dddd", immutable_buf.as_slice());
}
#[test]
fn test_mutable_equal() {
let mut buf = MutableBuffer::new(1);
let mut buf2 = MutableBuffer::new(1);
buf.extend_from_slice(&[0xaa]);
buf2.extend_from_slice(&[0xaa, 0xbb]);
assert!(buf != buf2);
buf.extend_from_slice(&[0xbb]);
assert_eq!(buf, buf2);
buf2.reserve(65);
assert!(buf != buf2);
}
#[test]
fn test_mutable_shrink_to_fit() {
let mut buffer = MutableBuffer::new(128);
assert_eq!(buffer.capacity(), 128);
buffer.push(1);
buffer.push(2);
buffer.shrink_to_fit();
assert!(buffer.capacity() >= 64 && buffer.capacity() < 128);
}
}