1use std::alloc::{handle_alloc_error, Layout};
19use std::mem;
20use std::ptr::NonNull;
21
22use crate::alloc::{Deallocation, ALIGNMENT};
23use crate::{
24 bytes::Bytes,
25 native::{ArrowNativeType, ToByteSlice},
26 util::bit_util,
27};
28
29use super::Buffer;
30
31#[derive(Debug)]
54pub struct MutableBuffer {
55 data: NonNull<u8>,
57 len: usize,
59 layout: Layout,
60}
61
62impl MutableBuffer {
63 #[inline]
67 pub fn new(capacity: usize) -> Self {
68 Self::with_capacity(capacity)
69 }
70
71 #[inline]
78 pub fn with_capacity(capacity: usize) -> Self {
79 let capacity = bit_util::round_upto_multiple_of_64(capacity);
80 let layout = Layout::from_size_align(capacity, ALIGNMENT)
81 .expect("failed to create layout for MutableBuffer");
82 let data = match layout.size() {
83 0 => dangling_ptr(),
84 _ => {
85 let raw_ptr = unsafe { std::alloc::alloc(layout) };
87 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
88 }
89 };
90 Self {
91 data,
92 len: 0,
93 layout,
94 }
95 }
96
97 pub fn from_len_zeroed(len: usize) -> Self {
109 let layout = Layout::from_size_align(len, ALIGNMENT).unwrap();
110 let data = match layout.size() {
111 0 => dangling_ptr(),
112 _ => {
113 let raw_ptr = unsafe { std::alloc::alloc_zeroed(layout) };
115 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
116 }
117 };
118 Self { data, len, layout }
119 }
120
121 #[inline]
123 #[deprecated(note = "Use From<Vec<T>>")]
124 pub fn from_vec<T: ArrowNativeType>(vec: Vec<T>) -> Self {
125 Self::from(vec)
126 }
127
128 pub(crate) fn from_bytes(bytes: Bytes) -> Result<Self, Bytes> {
130 let layout = match bytes.deallocation() {
131 Deallocation::Standard(layout) => *layout,
132 _ => return Err(bytes),
133 };
134
135 let len = bytes.len();
136 let data = bytes.ptr();
137 mem::forget(bytes);
138
139 Ok(Self { data, len, layout })
140 }
141
142 pub fn new_null(len: usize) -> Self {
145 let num_bytes = bit_util::ceil(len, 8);
146 MutableBuffer::from_len_zeroed(num_bytes)
147 }
148
149 pub fn with_bitset(mut self, end: usize, val: bool) -> Self {
156 assert!(end <= self.layout.size());
157 let v = if val { 255 } else { 0 };
158 unsafe {
159 std::ptr::write_bytes(self.data.as_ptr(), v, end);
160 self.len = end;
161 }
162 self
163 }
164
165 pub fn set_null_bits(&mut self, start: usize, count: usize) {
171 assert!(
172 start.saturating_add(count) <= self.layout.size(),
173 "range start index {start} and count {count} out of bounds for \
174 buffer of length {}",
175 self.layout.size(),
176 );
177
178 unsafe {
180 std::ptr::write_bytes(self.data.as_ptr().add(start), 0, count);
181 }
182 }
183
184 #[inline(always)]
198 pub fn reserve(&mut self, additional: usize) {
199 let required_cap = self.len + additional;
200 if required_cap > self.layout.size() {
201 let new_capacity = bit_util::round_upto_multiple_of_64(required_cap);
202 let new_capacity = std::cmp::max(new_capacity, self.layout.size() * 2);
203 self.reallocate(new_capacity)
204 }
205 }
206
207 #[cold]
208 fn reallocate(&mut self, capacity: usize) {
209 let new_layout = Layout::from_size_align(capacity, self.layout.align()).unwrap();
210 if new_layout.size() == 0 {
211 if self.layout.size() != 0 {
212 unsafe { std::alloc::dealloc(self.as_mut_ptr(), self.layout) };
214 self.layout = new_layout
215 }
216 return;
217 }
218
219 let data = match self.layout.size() {
220 0 => unsafe { std::alloc::alloc(new_layout) },
222 _ => unsafe { std::alloc::realloc(self.as_mut_ptr(), self.layout, capacity) },
224 };
225 self.data = NonNull::new(data).unwrap_or_else(|| handle_alloc_error(new_layout));
226 self.layout = new_layout;
227 }
228
229 #[inline(always)]
233 pub fn truncate(&mut self, len: usize) {
234 if len > self.len {
235 return;
236 }
237 self.len = len;
238 }
239
240 #[inline(always)]
252 pub fn resize(&mut self, new_len: usize, value: u8) {
253 if new_len > self.len {
254 let diff = new_len - self.len;
255 self.reserve(diff);
256 unsafe { self.data.as_ptr().add(self.len).write_bytes(value, diff) };
258 }
259 self.len = new_len;
261 }
262
263 pub fn shrink_to_fit(&mut self) {
279 let new_capacity = bit_util::round_upto_multiple_of_64(self.len);
280 if new_capacity < self.layout.size() {
281 self.reallocate(new_capacity)
282 }
283 }
284
285 #[inline]
287 pub const fn is_empty(&self) -> bool {
288 self.len == 0
289 }
290
291 #[inline]
294 pub const fn len(&self) -> usize {
295 self.len
296 }
297
298 #[inline]
301 pub const fn capacity(&self) -> usize {
302 self.layout.size()
303 }
304
305 pub fn clear(&mut self) {
307 self.len = 0
308 }
309
310 pub fn as_slice(&self) -> &[u8] {
312 self
313 }
314
315 pub fn as_slice_mut(&mut self) -> &mut [u8] {
317 self
318 }
319
320 #[inline]
323 pub const fn as_ptr(&self) -> *const u8 {
324 self.data.as_ptr()
325 }
326
327 #[inline]
330 pub fn as_mut_ptr(&mut self) -> *mut u8 {
331 self.data.as_ptr()
332 }
333
334 #[deprecated(
335 since = "2.0.0",
336 note = "This method is deprecated in favour of `into` from the trait `Into`."
337 )]
338 pub fn freeze(self) -> Buffer {
340 self.into_buffer()
341 }
342
343 #[inline]
344 pub(super) fn into_buffer(self) -> Buffer {
345 let bytes = unsafe { Bytes::new(self.data, self.len, Deallocation::Standard(self.layout)) };
346 std::mem::forget(self);
347 Buffer::from_bytes(bytes)
348 }
349
350 pub fn typed_data_mut<T: ArrowNativeType>(&mut self) -> &mut [T] {
357 let (prefix, offsets, suffix) = unsafe { self.as_slice_mut().align_to_mut::<T>() };
361 assert!(prefix.is_empty() && suffix.is_empty());
362 offsets
363 }
364
365 pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
372 let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
376 assert!(prefix.is_empty() && suffix.is_empty());
377 offsets
378 }
379
380 #[inline]
389 pub fn extend_from_slice<T: ArrowNativeType>(&mut self, items: &[T]) {
390 let additional = mem::size_of_val(items);
391 self.reserve(additional);
392 unsafe {
393 let src = items.as_ptr() as *const u8;
397 let dst = self.data.as_ptr().add(self.len);
398 std::ptr::copy_nonoverlapping(src, dst, additional)
399 }
400 self.len += additional;
401 }
402
403 #[inline]
412 pub fn push<T: ToByteSlice>(&mut self, item: T) {
413 let additional = std::mem::size_of::<T>();
414 self.reserve(additional);
415 unsafe {
416 let src = item.to_byte_slice().as_ptr();
417 let dst = self.data.as_ptr().add(self.len);
418 std::ptr::copy_nonoverlapping(src, dst, additional);
419 }
420 self.len += additional;
421 }
422
423 #[inline]
427 pub unsafe fn push_unchecked<T: ToByteSlice>(&mut self, item: T) {
428 let additional = std::mem::size_of::<T>();
429 let src = item.to_byte_slice().as_ptr();
430 let dst = self.data.as_ptr().add(self.len);
431 std::ptr::copy_nonoverlapping(src, dst, additional);
432 self.len += additional;
433 }
434
435 #[inline]
437 pub fn extend_zeros(&mut self, additional: usize) {
438 self.resize(self.len + additional, 0);
439 }
440
441 #[inline]
444 pub unsafe fn set_len(&mut self, len: usize) {
445 assert!(len <= self.capacity());
446 self.len = len;
447 }
448
449 #[inline]
454 pub fn collect_bool<F: FnMut(usize) -> bool>(len: usize, mut f: F) -> Self {
455 let mut buffer = Self::new(bit_util::ceil(len, 64) * 8);
456
457 let chunks = len / 64;
458 let remainder = len % 64;
459 for chunk in 0..chunks {
460 let mut packed = 0;
461 for bit_idx in 0..64 {
462 let i = bit_idx + chunk * 64;
463 packed |= (f(i) as u64) << bit_idx;
464 }
465
466 unsafe { buffer.push_unchecked(packed) }
468 }
469
470 if remainder != 0 {
471 let mut packed = 0;
472 for bit_idx in 0..remainder {
473 let i = bit_idx + chunks * 64;
474 packed |= (f(i) as u64) << bit_idx;
475 }
476
477 unsafe { buffer.push_unchecked(packed) }
479 }
480
481 buffer.truncate(bit_util::ceil(len, 8));
482 buffer
483 }
484}
485
486#[inline]
490pub(crate) fn dangling_ptr() -> NonNull<u8> {
491 #[cfg(miri)]
495 {
496 unsafe { NonNull::new_unchecked(std::ptr::without_provenance_mut(ALIGNMENT)) }
498 }
499 #[cfg(not(miri))]
500 {
501 unsafe { NonNull::new_unchecked(ALIGNMENT as *mut u8) }
502 }
503}
504
505impl<A: ArrowNativeType> Extend<A> for MutableBuffer {
506 #[inline]
507 fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
508 let iterator = iter.into_iter();
509 self.extend_from_iter(iterator)
510 }
511}
512
513impl<T: ArrowNativeType> From<Vec<T>> for MutableBuffer {
514 fn from(value: Vec<T>) -> Self {
515 let data = unsafe { NonNull::new_unchecked(value.as_ptr() as _) };
518 let len = value.len() * mem::size_of::<T>();
519 let layout = unsafe { Layout::array::<T>(value.capacity()).unwrap_unchecked() };
523 mem::forget(value);
524 Self { data, len, layout }
525 }
526}
527
528impl MutableBuffer {
529 #[inline]
530 pub(super) fn extend_from_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
531 &mut self,
532 mut iterator: I,
533 ) {
534 let item_size = std::mem::size_of::<T>();
535 let (lower, _) = iterator.size_hint();
536 let additional = lower * item_size;
537 self.reserve(additional);
538
539 let mut len = SetLenOnDrop::new(&mut self.len);
541 let mut dst = unsafe { self.data.as_ptr().add(len.local_len) };
542 let capacity = self.layout.size();
543
544 while len.local_len + item_size <= capacity {
545 if let Some(item) = iterator.next() {
546 unsafe {
547 let src = item.to_byte_slice().as_ptr();
548 std::ptr::copy_nonoverlapping(src, dst, item_size);
549 dst = dst.add(item_size);
550 }
551 len.local_len += item_size;
552 } else {
553 break;
554 }
555 }
556 drop(len);
557
558 iterator.for_each(|item| self.push(item));
559 }
560
561 #[inline]
579 pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
580 iterator: I,
581 ) -> Self {
582 let item_size = std::mem::size_of::<T>();
583 let (_, upper) = iterator.size_hint();
584 let upper = upper.expect("from_trusted_len_iter requires an upper limit");
585 let len = upper * item_size;
586
587 let mut buffer = MutableBuffer::new(len);
588
589 let mut dst = buffer.data.as_ptr();
590 for item in iterator {
591 let src = item.to_byte_slice().as_ptr();
593 std::ptr::copy_nonoverlapping(src, dst, item_size);
594 dst = dst.add(item_size);
595 }
596 assert_eq!(
597 dst.offset_from(buffer.data.as_ptr()) as usize,
598 len,
599 "Trusted iterator length was not accurately reported"
600 );
601 buffer.len = len;
602 buffer
603 }
604
605 #[inline]
623 pub unsafe fn from_trusted_len_iter_bool<I: Iterator<Item = bool>>(mut iterator: I) -> Self {
624 let (_, upper) = iterator.size_hint();
625 let len = upper.expect("from_trusted_len_iter requires an upper limit");
626
627 Self::collect_bool(len, |_| iterator.next().unwrap())
628 }
629
630 #[inline]
637 pub unsafe fn try_from_trusted_len_iter<
638 E,
639 T: ArrowNativeType,
640 I: Iterator<Item = Result<T, E>>,
641 >(
642 iterator: I,
643 ) -> Result<Self, E> {
644 let item_size = std::mem::size_of::<T>();
645 let (_, upper) = iterator.size_hint();
646 let upper = upper.expect("try_from_trusted_len_iter requires an upper limit");
647 let len = upper * item_size;
648
649 let mut buffer = MutableBuffer::new(len);
650
651 let mut dst = buffer.data.as_ptr();
652 for item in iterator {
653 let item = item?;
654 let src = item.to_byte_slice().as_ptr();
656 std::ptr::copy_nonoverlapping(src, dst, item_size);
657 dst = dst.add(item_size);
658 }
659 unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) {
662 assert_eq!(
663 dst.offset_from(buffer.data.as_ptr()) as usize,
664 len,
665 "Trusted iterator length was not accurately reported"
666 );
667 buffer.len = len;
668 }
669 finalize_buffer(dst, &mut buffer, len);
670 Ok(buffer)
671 }
672}
673
674impl Default for MutableBuffer {
675 fn default() -> Self {
676 Self::with_capacity(0)
677 }
678}
679
680impl std::ops::Deref for MutableBuffer {
681 type Target = [u8];
682
683 fn deref(&self) -> &[u8] {
684 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len) }
685 }
686}
687
688impl std::ops::DerefMut for MutableBuffer {
689 fn deref_mut(&mut self) -> &mut [u8] {
690 unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
691 }
692}
693
694impl Drop for MutableBuffer {
695 fn drop(&mut self) {
696 if self.layout.size() != 0 {
697 unsafe { std::alloc::dealloc(self.data.as_ptr() as _, self.layout) };
699 }
700 }
701}
702
703impl PartialEq for MutableBuffer {
704 fn eq(&self, other: &MutableBuffer) -> bool {
705 if self.len != other.len {
706 return false;
707 }
708 if self.layout != other.layout {
709 return false;
710 }
711 self.as_slice() == other.as_slice()
712 }
713}
714
715unsafe impl Sync for MutableBuffer {}
716unsafe impl Send for MutableBuffer {}
717
718struct SetLenOnDrop<'a> {
719 len: &'a mut usize,
720 local_len: usize,
721}
722
723impl<'a> SetLenOnDrop<'a> {
724 #[inline]
725 fn new(len: &'a mut usize) -> Self {
726 SetLenOnDrop {
727 local_len: *len,
728 len,
729 }
730 }
731}
732
733impl Drop for SetLenOnDrop<'_> {
734 #[inline]
735 fn drop(&mut self) {
736 *self.len = self.local_len;
737 }
738}
739
740impl std::iter::FromIterator<bool> for MutableBuffer {
742 fn from_iter<I>(iter: I) -> Self
743 where
744 I: IntoIterator<Item = bool>,
745 {
746 let mut iterator = iter.into_iter();
747 let mut result = {
748 let byte_capacity: usize = iterator.size_hint().0.saturating_add(7) / 8;
749 MutableBuffer::new(byte_capacity)
750 };
751
752 loop {
753 let mut exhausted = false;
754 let mut byte_accum: u8 = 0;
755 let mut mask: u8 = 1;
756
757 while mask != 0 {
759 if let Some(value) = iterator.next() {
760 byte_accum |= match value {
761 true => mask,
762 false => 0,
763 };
764 mask <<= 1;
765 } else {
766 exhausted = true;
767 break;
768 }
769 }
770
771 if exhausted && mask == 1 {
773 break;
774 }
775
776 if result.len() == result.capacity() {
778 let additional_byte_capacity = 1usize.saturating_add(
780 iterator.size_hint().0.saturating_add(7) / 8, );
782 result.reserve(additional_byte_capacity)
783 }
784
785 unsafe { result.push_unchecked(byte_accum) };
787 if exhausted {
788 break;
789 }
790 }
791 result
792 }
793}
794
795impl<T: ArrowNativeType> std::iter::FromIterator<T> for MutableBuffer {
796 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
797 let mut buffer = Self::default();
798 buffer.extend_from_iter(iter.into_iter());
799 buffer
800 }
801}
802
803#[cfg(test)]
804mod tests {
805 use super::*;
806
807 #[test]
808 fn test_mutable_new() {
809 let buf = MutableBuffer::new(63);
810 assert_eq!(64, buf.capacity());
811 assert_eq!(0, buf.len());
812 assert!(buf.is_empty());
813 }
814
815 #[test]
816 fn test_mutable_default() {
817 let buf = MutableBuffer::default();
818 assert_eq!(0, buf.capacity());
819 assert_eq!(0, buf.len());
820 assert!(buf.is_empty());
821
822 let mut buf = MutableBuffer::default();
823 buf.extend_from_slice(b"hello");
824 assert_eq!(5, buf.len());
825 assert_eq!(b"hello", buf.as_slice());
826 }
827
828 #[test]
829 fn test_mutable_extend_from_slice() {
830 let mut buf = MutableBuffer::new(100);
831 buf.extend_from_slice(b"hello");
832 assert_eq!(5, buf.len());
833 assert_eq!(b"hello", buf.as_slice());
834
835 buf.extend_from_slice(b" world");
836 assert_eq!(11, buf.len());
837 assert_eq!(b"hello world", buf.as_slice());
838
839 buf.clear();
840 assert_eq!(0, buf.len());
841 buf.extend_from_slice(b"hello arrow");
842 assert_eq!(11, buf.len());
843 assert_eq!(b"hello arrow", buf.as_slice());
844 }
845
846 #[test]
847 fn mutable_extend_from_iter() {
848 let mut buf = MutableBuffer::new(0);
849 buf.extend(vec![1u32, 2]);
850 assert_eq!(8, buf.len());
851 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
852
853 buf.extend(vec![3u32, 4]);
854 assert_eq!(16, buf.len());
855 assert_eq!(
856 &[1u8, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0],
857 buf.as_slice()
858 );
859 }
860
861 #[test]
862 fn mutable_extend_from_iter_unaligned_u64() {
863 let mut buf = MutableBuffer::new(16);
864 buf.push(1_u8);
865 buf.extend([1_u64]);
866 assert_eq!(9, buf.len());
867 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
868 }
869
870 #[test]
871 fn mutable_extend_from_slice_unaligned_u64() {
872 let mut buf = MutableBuffer::new(16);
873 buf.extend_from_slice(&[1_u8]);
874 buf.extend_from_slice(&[1_u64]);
875 assert_eq!(9, buf.len());
876 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
877 }
878
879 #[test]
880 fn mutable_push_unaligned_u64() {
881 let mut buf = MutableBuffer::new(16);
882 buf.push(1_u8);
883 buf.push(1_u64);
884 assert_eq!(9, buf.len());
885 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
886 }
887
888 #[test]
889 fn mutable_push_unchecked_unaligned_u64() {
890 let mut buf = MutableBuffer::new(16);
891 unsafe {
892 buf.push_unchecked(1_u8);
893 buf.push_unchecked(1_u64);
894 }
895 assert_eq!(9, buf.len());
896 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
897 }
898
899 #[test]
900 fn test_from_trusted_len_iter() {
901 let iter = vec![1u32, 2].into_iter();
902 let buf = unsafe { MutableBuffer::from_trusted_len_iter(iter) };
903 assert_eq!(8, buf.len());
904 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
905 }
906
907 #[test]
908 fn test_mutable_reserve() {
909 let mut buf = MutableBuffer::new(1);
910 assert_eq!(64, buf.capacity());
911
912 buf.reserve(10);
914 assert_eq!(64, buf.capacity());
915
916 buf.reserve(80);
917 assert_eq!(128, buf.capacity());
918
919 buf.reserve(129);
920 assert_eq!(256, buf.capacity());
921 }
922
923 #[test]
924 fn test_mutable_resize() {
925 let mut buf = MutableBuffer::new(1);
926 assert_eq!(64, buf.capacity());
927 assert_eq!(0, buf.len());
928
929 buf.resize(20, 0);
930 assert_eq!(64, buf.capacity());
931 assert_eq!(20, buf.len());
932
933 buf.resize(10, 0);
934 assert_eq!(64, buf.capacity());
935 assert_eq!(10, buf.len());
936
937 buf.resize(100, 0);
938 assert_eq!(128, buf.capacity());
939 assert_eq!(100, buf.len());
940
941 buf.resize(30, 0);
942 assert_eq!(128, buf.capacity());
943 assert_eq!(30, buf.len());
944
945 buf.resize(0, 0);
946 assert_eq!(128, buf.capacity());
947 assert_eq!(0, buf.len());
948 }
949
950 #[test]
951 fn test_mutable_into() {
952 let mut buf = MutableBuffer::new(1);
953 buf.extend_from_slice(b"aaaa bbbb cccc dddd");
954 assert_eq!(19, buf.len());
955 assert_eq!(64, buf.capacity());
956 assert_eq!(b"aaaa bbbb cccc dddd", buf.as_slice());
957
958 let immutable_buf: Buffer = buf.into();
959 assert_eq!(19, immutable_buf.len());
960 assert_eq!(64, immutable_buf.capacity());
961 assert_eq!(b"aaaa bbbb cccc dddd", immutable_buf.as_slice());
962 }
963
964 #[test]
965 fn test_mutable_equal() {
966 let mut buf = MutableBuffer::new(1);
967 let mut buf2 = MutableBuffer::new(1);
968
969 buf.extend_from_slice(&[0xaa]);
970 buf2.extend_from_slice(&[0xaa, 0xbb]);
971 assert!(buf != buf2);
972
973 buf.extend_from_slice(&[0xbb]);
974 assert_eq!(buf, buf2);
975
976 buf2.reserve(65);
977 assert!(buf != buf2);
978 }
979
980 #[test]
981 fn test_mutable_shrink_to_fit() {
982 let mut buffer = MutableBuffer::new(128);
983 assert_eq!(buffer.capacity(), 128);
984 buffer.push(1);
985 buffer.push(2);
986
987 buffer.shrink_to_fit();
988 assert!(buffer.capacity() >= 64 && buffer.capacity() < 128);
989 }
990
991 #[test]
992 fn test_mutable_set_null_bits() {
993 let mut buffer = MutableBuffer::new(8).with_bitset(8, true);
994
995 for i in 0..=buffer.capacity() {
996 buffer.set_null_bits(i, 0);
997 assert_eq!(buffer[..8], [255; 8][..]);
998 }
999
1000 buffer.set_null_bits(1, 4);
1001 assert_eq!(buffer[..8], [255, 0, 0, 0, 0, 255, 255, 255][..]);
1002 }
1003
1004 #[test]
1005 #[should_panic = "out of bounds for buffer of length"]
1006 fn test_mutable_set_null_bits_oob() {
1007 let mut buffer = MutableBuffer::new(64);
1008 buffer.set_null_bits(1, buffer.capacity());
1009 }
1010
1011 #[test]
1012 #[should_panic = "out of bounds for buffer of length"]
1013 fn test_mutable_set_null_bits_oob_by_overflow() {
1014 let mut buffer = MutableBuffer::new(0);
1015 buffer.set_null_bits(1, usize::MAX);
1016 }
1017
1018 #[test]
1019 fn from_iter() {
1020 let buffer = [1u16, 2, 3, 4].into_iter().collect::<MutableBuffer>();
1021 assert_eq!(buffer.len(), 4 * mem::size_of::<u16>());
1022 assert_eq!(buffer.as_slice(), &[1, 0, 2, 0, 3, 0, 4, 0]);
1023 }
1024
1025 #[test]
1026 #[should_panic(expected = "failed to create layout for MutableBuffer: LayoutError")]
1027 fn test_with_capacity_panics_above_max_capacity() {
1028 let max_capacity = isize::MAX as usize - (isize::MAX as usize % ALIGNMENT);
1029 let _ = MutableBuffer::with_capacity(max_capacity + 1);
1030 }
1031}