1use std::any::type_name;
5use std::cmp::Ordering;
6use std::collections::Bound;
7use std::fmt::{Debug, Formatter};
8use std::hash::{Hash, Hasher};
9use std::marker::PhantomData;
10use std::ops::{Deref, RangeBounds};
11
12use bytes::{Buf, Bytes};
13use vortex_error::{VortexExpect, vortex_panic};
14
15use crate::debug::TruncatedDebug;
16use crate::trusted_len::TrustedLen;
17use crate::{Alignment, BufferMut, ByteBuffer};
18
19pub struct Buffer<T> {
21 pub(crate) bytes: Bytes,
22 pub(crate) length: usize,
23 pub(crate) alignment: Alignment,
24 pub(crate) _marker: PhantomData<T>,
25}
26
27impl<T> Clone for Buffer<T> {
28 #[inline]
29 fn clone(&self) -> Self {
30 Self {
31 bytes: self.bytes.clone(),
32 length: self.length,
33 alignment: self.alignment,
34 _marker: PhantomData,
35 }
36 }
37}
38
39impl<T> Default for Buffer<T> {
40 fn default() -> Self {
41 Self {
42 bytes: Default::default(),
43 length: 0,
44 alignment: Alignment::of::<T>(),
45 _marker: PhantomData,
46 }
47 }
48}
49
50impl<T> PartialEq for Buffer<T> {
51 #[inline]
52 fn eq(&self, other: &Self) -> bool {
53 self.bytes == other.bytes
54 }
55}
56
57impl<T: PartialEq> PartialEq<Vec<T>> for Buffer<T> {
58 fn eq(&self, other: &Vec<T>) -> bool {
59 self.as_ref() == other.as_slice()
60 }
61}
62
63impl<T: PartialEq> PartialEq<Buffer<T>> for Vec<T> {
64 fn eq(&self, other: &Buffer<T>) -> bool {
65 self.as_slice() == other.as_ref()
66 }
67}
68
69impl<T> Eq for Buffer<T> {}
70
71impl<T> Ord for Buffer<T> {
72 #[inline]
73 fn cmp(&self, other: &Self) -> Ordering {
74 self.bytes.cmp(&other.bytes)
75 }
76}
77
78impl<T> PartialOrd for Buffer<T> {
79 #[inline]
80 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
81 Some(self.cmp(other))
82 }
83}
84
85impl<T> Hash for Buffer<T> {
86 #[inline]
87 fn hash<H: Hasher>(&self, state: &mut H) {
88 self.bytes.as_ref().hash(state)
89 }
90}
91
92impl<T> Buffer<T> {
93 pub fn copy_from(values: impl AsRef<[T]>) -> Self {
100 BufferMut::copy_from(values).freeze()
101 }
102
103 pub fn copy_from_aligned(values: impl AsRef<[T]>, alignment: Alignment) -> Self {
105 BufferMut::copy_from_aligned(values, alignment).freeze()
106 }
107
108 pub fn zeroed(len: usize) -> Self {
110 Self::zeroed_aligned(len, Alignment::of::<T>())
111 }
112
113 pub fn zeroed_aligned(len: usize, alignment: Alignment) -> Self {
115 BufferMut::zeroed_aligned(len, alignment).freeze()
116 }
117
118 pub fn empty() -> Self {
120 BufferMut::empty().freeze()
121 }
122
123 pub fn empty_aligned(alignment: Alignment) -> Self {
125 BufferMut::empty_aligned(alignment).freeze()
126 }
127
128 pub fn full(item: T, len: usize) -> Self
130 where
131 T: Copy,
132 {
133 BufferMut::full(item, len).freeze()
134 }
135
136 pub fn from_byte_buffer(buffer: ByteBuffer) -> Self {
143 Self::from_byte_buffer_aligned(buffer, Alignment::of::<T>())
145 }
146
147 pub fn from_byte_buffer_aligned(buffer: ByteBuffer, alignment: Alignment) -> Self {
154 Self::from_bytes_aligned(buffer.into_inner(), alignment)
155 }
156
157 pub fn from_bytes_aligned(bytes: Bytes, alignment: Alignment) -> Self {
164 if !alignment.is_aligned_to(Alignment::of::<T>()) {
165 vortex_panic!(
166 "Alignment {} must be compatible with the scalar type's alignment {}",
167 alignment,
168 Alignment::of::<T>(),
169 );
170 }
171 if bytes.as_ptr().align_offset(*alignment) != 0 {
172 vortex_panic!(
173 "Bytes alignment must align to the requested alignment {}",
174 alignment,
175 );
176 }
177 if bytes.len() % size_of::<T>() != 0 {
178 vortex_panic!(
179 "Bytes length {} must be a multiple of the scalar type's size {}",
180 bytes.len(),
181 size_of::<T>()
182 );
183 }
184 let length = bytes.len() / size_of::<T>();
185 Self {
186 bytes,
187 length,
188 alignment,
189 _marker: Default::default(),
190 }
191 }
192
193 pub fn from_trusted_len_iter<I: TrustedLen<Item = T>>(iter: I) -> Self {
196 let (_, high) = iter.size_hint();
197 let mut buffer =
198 BufferMut::with_capacity(high.vortex_expect("TrustedLen iterator has no upper bound"));
199 buffer.extend_trusted(iter);
200 buffer.freeze()
201 }
202
203 #[inline(always)]
205 pub fn len(&self) -> usize {
206 self.length
207 }
208
209 #[inline(always)]
211 pub fn is_empty(&self) -> bool {
212 self.length == 0
213 }
214
215 #[inline(always)]
217 pub fn alignment(&self) -> Alignment {
218 self.alignment
219 }
220
221 #[inline(always)]
223 pub fn as_slice(&self) -> &[T] {
224 let raw_slice = self.bytes.as_ref();
225 unsafe { std::slice::from_raw_parts(raw_slice.as_ptr().cast(), self.length) }
227 }
228
229 pub fn iter(&self) -> Iter<'_, T> {
231 Iter {
232 inner: self.as_slice().iter(),
233 }
234 }
235
236 #[inline(always)]
243 pub fn slice(&self, range: impl RangeBounds<usize>) -> Self {
244 self.slice_with_alignment(range, self.alignment)
245 }
246
247 #[inline(always)]
254 pub fn slice_unaligned(&self, range: impl RangeBounds<usize>) -> Self {
255 self.slice_with_alignment(range, Alignment::of::<u8>())
256 }
257
258 pub fn slice_with_alignment(
266 &self,
267 range: impl RangeBounds<usize>,
268 alignment: Alignment,
269 ) -> Self {
270 let len = self.len();
271 let begin = match range.start_bound() {
272 Bound::Included(&n) => n,
273 Bound::Excluded(&n) => n.checked_add(1).vortex_expect("out of range"),
274 Bound::Unbounded => 0,
275 };
276 let end = match range.end_bound() {
277 Bound::Included(&n) => n.checked_add(1).vortex_expect("out of range"),
278 Bound::Excluded(&n) => n,
279 Bound::Unbounded => len,
280 };
281
282 if begin > end {
283 vortex_panic!(
284 "range start must not be greater than end: {:?} <= {:?}",
285 begin,
286 end
287 );
288 }
289 if end > len {
290 vortex_panic!("range end out of bounds: {:?} <= {:?}", end, len);
291 }
292
293 if end == begin {
294 return Self::empty_aligned(alignment);
297 }
298
299 let begin_byte = begin * size_of::<T>();
300 let end_byte = end * size_of::<T>();
301
302 if !begin_byte.is_multiple_of(*alignment) {
303 vortex_panic!("range start must be aligned to {:?}", alignment);
304 }
305 if !end_byte.is_multiple_of(*alignment) {
306 vortex_panic!("range end must be aligned to {:?}", alignment);
307 }
308 if !alignment.is_aligned_to(Alignment::of::<T>()) {
309 vortex_panic!("Slice alignment must at least align to type T")
310 }
311
312 Self {
313 bytes: self.bytes.slice(begin_byte..end_byte),
314 length: end - begin,
315 alignment,
316 _marker: Default::default(),
317 }
318 }
319
320 #[inline(always)]
329 pub fn slice_ref(&self, subset: &[T]) -> Self {
330 self.slice_ref_with_alignment(subset, Alignment::of::<T>())
331 }
332
333 pub fn slice_ref_with_alignment(&self, subset: &[T], alignment: Alignment) -> Self {
343 if !alignment.is_aligned_to(Alignment::of::<T>()) {
344 vortex_panic!("slice_ref alignment must at least align to type T")
345 }
346
347 if !self.alignment.is_aligned_to(alignment) {
348 vortex_panic!("slice_ref subset alignment must at least align to the buffer alignment")
349 }
350
351 if subset.as_ptr().align_offset(*alignment) != 0 {
352 vortex_panic!("slice_ref subset must be aligned to {:?}", alignment);
353 }
354
355 let subset_u8 =
356 unsafe { std::slice::from_raw_parts(subset.as_ptr().cast(), size_of_val(subset)) };
357
358 Self {
359 bytes: self.bytes.slice_ref(subset_u8),
360 length: subset.len(),
361 alignment,
362 _marker: Default::default(),
363 }
364 }
365
366 pub fn inner(&self) -> &Bytes {
368 debug_assert_eq!(
369 self.length * size_of::<T>(),
370 self.bytes.len(),
371 "Own length has to be the same as the underlying bytes length"
372 );
373 &self.bytes
374 }
375
376 pub fn into_inner(self) -> Bytes {
378 debug_assert_eq!(
379 self.length * size_of::<T>(),
380 self.bytes.len(),
381 "Own length has to be the same as the underlying bytes length"
382 );
383 self.bytes
384 }
385
386 pub fn into_byte_buffer(self) -> ByteBuffer {
388 ByteBuffer {
389 bytes: self.bytes,
390 length: self.length * size_of::<T>(),
391 alignment: self.alignment,
392 _marker: Default::default(),
393 }
394 }
395
396 pub fn into_mut(self) -> BufferMut<T> {
398 self.try_into_mut()
399 .unwrap_or_else(|buffer| BufferMut::<T>::copy_from(&buffer))
400 }
401
402 pub fn try_into_mut(self) -> Result<BufferMut<T>, Self> {
404 self.bytes
405 .try_into_mut()
406 .map(|bytes| BufferMut {
407 bytes,
408 length: self.length,
409 alignment: self.alignment,
410 _marker: Default::default(),
411 })
412 .map_err(|bytes| Self {
413 bytes,
414 length: self.length,
415 alignment: self.alignment,
416 _marker: Default::default(),
417 })
418 }
419
420 pub fn is_aligned(&self, alignment: Alignment) -> bool {
422 self.bytes.as_ptr().align_offset(*alignment) == 0
423 }
424
425 pub fn aligned(mut self, alignment: Alignment) -> Self {
427 if self.as_ptr().align_offset(*alignment) == 0 {
428 self.alignment = alignment;
429 self
430 } else {
431 #[cfg(feature = "warn-copy")]
432 {
433 let bt = std::backtrace::Backtrace::capture();
434 log::warn!(
435 "Buffer is not aligned to requested alignment {alignment}, copying: {bt}"
436 )
437 }
438 Self::copy_from_aligned(self, alignment)
439 }
440 }
441
442 pub fn ensure_aligned(mut self, alignment: Alignment) -> Self {
444 if self.as_ptr().align_offset(*alignment) == 0 {
445 self.alignment = alignment;
446 self
447 } else {
448 vortex_panic!("Buffer is not aligned to requested alignment {}", alignment)
449 }
450 }
451}
452
453pub struct Iter<'a, T> {
457 inner: std::slice::Iter<'a, T>,
458}
459
460impl<'a, T> Iterator for Iter<'a, T> {
461 type Item = &'a T;
462
463 #[inline]
464 fn next(&mut self) -> Option<Self::Item> {
465 self.inner.next()
466 }
467
468 #[inline]
469 fn size_hint(&self) -> (usize, Option<usize>) {
470 self.inner.size_hint()
471 }
472
473 #[inline]
474 fn count(self) -> usize {
475 self.inner.count()
476 }
477
478 #[inline]
479 fn last(self) -> Option<Self::Item> {
480 self.inner.last()
481 }
482
483 #[inline]
484 fn nth(&mut self, n: usize) -> Option<Self::Item> {
485 self.inner.nth(n)
486 }
487}
488
489impl<T> ExactSizeIterator for Iter<'_, T> {
490 #[inline]
491 fn len(&self) -> usize {
492 self.inner.len()
493 }
494}
495
496impl<T: Debug> Debug for Buffer<T> {
497 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
498 f.debug_struct(&format!("Buffer<{}>", type_name::<T>()))
499 .field("length", &self.length)
500 .field("alignment", &self.alignment)
501 .field("as_slice", &TruncatedDebug(self.as_slice()))
502 .finish()
503 }
504}
505
506impl<T> Deref for Buffer<T> {
507 type Target = [T];
508
509 #[inline]
510 fn deref(&self) -> &Self::Target {
511 self.as_slice()
512 }
513}
514
515impl<T> AsRef<[T]> for Buffer<T> {
516 #[inline]
517 fn as_ref(&self) -> &[T] {
518 self.as_slice()
519 }
520}
521
522impl<T> FromIterator<T> for Buffer<T> {
523 #[inline]
524 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
525 BufferMut::from_iter(iter).freeze()
526 }
527}
528
529#[repr(transparent)]
531struct Wrapper<T>(Vec<T>);
532
533impl<T> AsRef<[u8]> for Wrapper<T> {
534 fn as_ref(&self) -> &[u8] {
535 let data = self.0.as_ptr().cast::<u8>();
536 let len = self.0.len() * size_of::<T>();
537 unsafe { std::slice::from_raw_parts(data, len) }
538 }
539}
540
541impl<T> From<Vec<T>> for Buffer<T>
542where
543 T: Send + 'static,
544{
545 fn from(value: Vec<T>) -> Self {
546 let original_len = value.len();
547 let wrapped_vec = Wrapper(value);
548
549 let bytes = Bytes::from_owner(wrapped_vec);
550
551 assert_eq!(bytes.as_ptr().align_offset(align_of::<T>()), 0);
552
553 Self {
554 bytes,
555 length: original_len,
556 alignment: Alignment::of::<T>(),
557 _marker: PhantomData,
558 }
559 }
560}
561
562impl From<Bytes> for ByteBuffer {
563 fn from(bytes: Bytes) -> Self {
564 let length = bytes.len();
565 Self {
566 bytes,
567 length,
568 alignment: Alignment::of::<u8>(),
569 _marker: Default::default(),
570 }
571 }
572}
573
574impl Buf for ByteBuffer {
575 #[inline]
576 fn remaining(&self) -> usize {
577 self.len()
578 }
579
580 #[inline]
581 fn chunk(&self) -> &[u8] {
582 self.as_slice()
583 }
584
585 #[inline]
586 fn advance(&mut self, cnt: usize) {
587 if !cnt.is_multiple_of(*self.alignment) {
588 vortex_panic!(
589 "Cannot advance buffer by {} items, resulting alignment is not {}",
590 cnt,
591 self.alignment
592 );
593 }
594 self.bytes.advance(cnt);
595 self.length -= cnt;
596 }
597}
598
599pub struct BufferIterator<T> {
601 buffer: Buffer<T>,
602 index: usize,
603}
604
605impl<T: Copy> Iterator for BufferIterator<T> {
606 type Item = T;
607
608 #[inline]
609 fn next(&mut self) -> Option<Self::Item> {
610 (self.index < self.buffer.len()).then(move || {
611 let value = self.buffer[self.index];
612 self.index += 1;
613 value
614 })
615 }
616
617 #[inline]
618 fn size_hint(&self) -> (usize, Option<usize>) {
619 let remaining = self.buffer.len() - self.index;
620 (remaining, Some(remaining))
621 }
622}
623
624impl<T: Copy> IntoIterator for Buffer<T> {
625 type Item = T;
626 type IntoIter = BufferIterator<T>;
627
628 #[inline]
629 fn into_iter(self) -> Self::IntoIter {
630 BufferIterator {
631 buffer: self,
632 index: 0,
633 }
634 }
635}
636
637impl<T> From<BufferMut<T>> for Buffer<T> {
638 #[inline]
639 fn from(value: BufferMut<T>) -> Self {
640 value.freeze()
641 }
642}
643
644#[cfg(test)]
645mod test {
646 use bytes::Buf;
647
648 use crate::{Alignment, Buffer, ByteBuffer, buffer};
649
650 #[test]
651 fn align() {
652 let buf = buffer![0u8, 1, 2];
653 let aligned = buf.aligned(Alignment::new(32));
654 assert_eq!(aligned.alignment(), Alignment::new(32));
655 assert_eq!(aligned.as_slice(), &[0, 1, 2]);
656 }
657
658 #[test]
659 fn slice() {
660 let buf = buffer![0, 1, 2, 3, 4];
661 assert_eq!(buf.slice(1..3).as_slice(), &[1, 2]);
662 assert_eq!(buf.slice(1..=3).as_slice(), &[1, 2, 3]);
663 }
664
665 #[test]
666 fn slice_unaligned() {
667 let buf = buffer![0i32, 1, 2, 3, 4].into_byte_buffer();
668 let sliced = buf.slice_unaligned(1..2);
670 assert_eq!(sliced.len(), 1);
672 assert_eq!(sliced.as_slice(), &[0]);
675 }
676
677 #[test]
678 #[should_panic]
679 fn slice_bad_alignment() {
680 let buf = buffer![0i32, 1, 2, 3, 4].into_byte_buffer();
681 buf.slice(1..2);
683 }
684
685 #[test]
686 fn bytes_buf() {
687 let mut buf = ByteBuffer::copy_from("helloworld".as_bytes());
688 assert_eq!(buf.remaining(), 10);
689 assert_eq!(buf.chunk(), b"helloworld");
690
691 Buf::advance(&mut buf, 5);
692 assert_eq!(buf.remaining(), 5);
693 assert_eq!(buf.as_slice(), b"world");
694 assert_eq!(buf.chunk(), b"world");
695 }
696
697 #[test]
698 fn from_vec() {
699 let vec = vec![1, 2, 3, 4, 5];
700 let buff = Buffer::from(vec.clone());
701 assert!(buff.is_aligned(Alignment::of::<i32>()));
702 assert_eq!(vec, buff);
703 }
704}