1use std::marker::PhantomData;
5use std::num::NonZero;
6use std::ops::{Bound, RangeBounds};
7use std::{iter, mem};
8
9use nm::{Event, Magnitude};
10use smallvec::SmallVec;
11
12use crate::mem::{BlockMeta, BlockSize, Memory};
13use crate::{BytesViewReader, MAX_INLINE_SPANS, MemoryGuard, Span};
14
15#[doc = include_str!("../doc/snippets/sequence_memory_layout.md")]
32#[derive(Clone, Debug)]
59pub struct BytesView {
60 pub(crate) spans_reversed: SmallVec<[Span; MAX_INLINE_SPANS]>,
63
64 len: usize,
66}
67
68impl BytesView {
69 #[cfg_attr(test, mutants::skip)] #[must_use]
76 pub const fn new() -> Self {
77 Self {
78 spans_reversed: SmallVec::new_const(),
79 len: 0,
80 }
81 }
82
83 pub(crate) fn from_spans_reversed(spans_reversed: SmallVec<[Span; MAX_INLINE_SPANS]>) -> Self {
84 #[cfg(debug_assertions)]
85 spans_reversed.iter().for_each(|span| assert!(!span.is_empty()));
86
87 VIEW_CREATED_SPANS.with(|x| x.observe(spans_reversed.len()));
89
90 let len = spans_reversed.iter().fold(0_usize, |acc, span: &Span| {
91 acc.checked_add(span.len() as usize)
92 .expect("attempted to create a BytesView larger than usize::MAX bytes")
93 });
94
95 Self { spans_reversed, len }
96 }
97
98 #[cfg(test)]
102 pub(crate) fn from_spans<I>(spans: I) -> Self
103 where
104 I: IntoIterator<Item = Span>,
105 <I as IntoIterator>::IntoIter: iter::DoubleEndedIterator,
106 {
107 let spans_reversed = spans.into_iter().rev().collect::<SmallVec<_>>();
108
109 Self::from_spans_reversed(spans_reversed)
110 }
111
112 pub fn from_views<I>(views: I) -> Self
134 where
135 I: IntoIterator<Item = Self>,
136 <I as IntoIterator>::IntoIter: iter::DoubleEndedIterator,
137 {
138 let spans_reversed: SmallVec<_> = views
144 .into_iter()
145 .rev()
147 .flat_map(|view| view.spans_reversed)
149 .collect();
151
152 Self::from_spans_reversed(spans_reversed)
153 }
154
155 #[must_use]
187 pub fn copied_from_slice(bytes: &[u8], memory: &impl Memory) -> Self {
188 let mut buf = memory.reserve(bytes.len());
189 buf.put_slice(bytes);
190 buf.consume_all()
191 }
192
193 pub(crate) fn into_spans_reversed(self) -> SmallVec<[Span; MAX_INLINE_SPANS]> {
194 self.spans_reversed
195 }
196
197 #[cfg_attr(test, mutants::skip)] #[must_use]
218 pub fn len(&self) -> usize {
219 debug_assert_eq!(self.len, self.spans_reversed.iter().map(|x| x.len() as usize).sum::<usize>());
221
222 self.len
223 }
224
225 #[cfg_attr(test, mutants::skip)] #[must_use]
228 pub fn is_empty(&self) -> bool {
229 self.len() == 0
230 }
231
232 pub fn extend_lifetime(&self) -> MemoryGuard {
238 MemoryGuard::new(self.spans_reversed.iter().map(Span::block_ref).map(Clone::clone))
239 }
240
241 #[must_use]
264 pub fn range<R>(&self, range: R) -> Self
265 where
266 R: RangeBounds<usize>,
267 {
268 self.range_checked(range).expect("provided range out of view bounds")
269 }
270
271 #[must_use]
276 #[expect(clippy::missing_panics_doc, reason = "only unreachable panics")]
277 #[expect(clippy::too_many_lines, reason = "acceptable for now")]
278 #[cfg_attr(test, mutants::skip)] pub fn range_checked<R>(&self, range: R) -> Option<Self>
280 where
281 R: RangeBounds<usize>,
282 {
283 let bytes_until_range = match range.start_bound() {
284 Bound::Included(&x) => x,
285 Bound::Excluded(&x) => x.checked_add(1)?,
286 Bound::Unbounded => 0,
287 };
288
289 let bytes_in_range = match range.end_bound() {
290 Bound::Included(&x) => x.checked_add(1)?.checked_sub(bytes_until_range)?,
291 Bound::Excluded(&x) => x.checked_sub(bytes_until_range)?,
292 Bound::Unbounded => self.len().checked_sub(bytes_until_range)?,
293 };
294
295 let required_len = bytes_until_range
296 .checked_add(bytes_in_range)
297 .expect("overflowing usize is impossible because we are calculating offset into usize-bounded range");
298
299 if required_len > self.len() {
300 return None;
302 }
303
304 if bytes_in_range == 0 {
305 return Some(Self::new());
307 }
308
309 let mut spans_until_range: usize = 0;
321 let mut spans_in_range: usize = 0;
322 let mut bytes_to_skip_in_first_relevant_span: BlockSize = 0;
323 let mut bytes_to_leave_in_last_relevant_span: BlockSize = 0;
324
325 {
326 let mut pass1_bytes_until_range = bytes_until_range;
327 let mut pass1_bytes_in_range = bytes_in_range;
328
329 for span in self.spans_reversed.iter().rev() {
330 let bytes_in_span = span.len();
331 let bytes_in_span_usize = bytes_in_span as usize;
332
333 if pass1_bytes_until_range > 0 && bytes_in_span_usize <= pass1_bytes_until_range {
334 spans_until_range = spans_until_range
336 .checked_add(1)
337 .expect("overflowing usize is impossible because we are calculating chunks within usize-bounded range");
338 pass1_bytes_until_range = pass1_bytes_until_range
339 .checked_sub(bytes_in_span_usize)
340 .expect("somehow ended up with negative bytes remaining until range start - only possible if the math is wrong");
341 continue;
342 }
343
344 bytes_to_leave_in_last_relevant_span = bytes_in_span;
348
349 let pass1_bytes_until_range_block_size = pass1_bytes_until_range.try_into().expect("we are supposedly indicating a position inside a span but the offset is larger than a memory block range - algorithm error");
352
353 if pass1_bytes_until_range != 0 {
355 bytes_to_skip_in_first_relevant_span = pass1_bytes_until_range_block_size;
356
357 bytes_to_leave_in_last_relevant_span = bytes_to_leave_in_last_relevant_span
359 .checked_sub(bytes_to_skip_in_first_relevant_span)
360 .expect("somehow ended up with negative bytes remaining in span - only possible if the math is wrong");
361 }
362
363 #[expect(
364 clippy::cast_possible_truncation,
365 reason = "the usize never contains a value outside bounds of BlockSize - guarded by min()"
366 )]
367 let relevant_bytes_in_span = ((bytes_in_span
368 .checked_sub(pass1_bytes_until_range_block_size)
369 .expect("somehow ended up with negative bytes remaining in span - only possible if the math is wrong")
370 as usize)
371 .min(pass1_bytes_in_range)) as BlockSize;
372
373 bytes_to_leave_in_last_relevant_span = bytes_to_leave_in_last_relevant_span
374 .checked_sub(relevant_bytes_in_span)
375 .expect("somehow ended up with negative bytes remaining in span - only possible if the math is wrong");
376
377 spans_in_range = spans_in_range
379 .checked_add(1)
380 .expect("overflowing usize is impossible because we are calculating chunks within usize-bounded range");
381
382 pass1_bytes_until_range = 0;
383
384 pass1_bytes_in_range = pass1_bytes_in_range
385 .checked_sub(relevant_bytes_in_span as usize)
386 .expect("somehow ended up with negative bytes remaining in range - only possible if the math is wrong");
387
388 if pass1_bytes_in_range == 0 {
389 break;
391 }
392 }
393 }
394
395 let relevant_spans = self.spans_reversed.iter().rev().skip(spans_until_range).take(spans_in_range);
396
397 let mut bytes_remaining_in_range = bytes_in_range;
398
399 let mut slice_spans = SmallVec::with_capacity(spans_in_range);
407
408 for span in relevant_spans.rev() {
410 let mut bytes_to_even_consider = span.len();
411
412 if bytes_to_leave_in_last_relevant_span > 0 {
414 bytes_to_even_consider = bytes_to_even_consider
415 .checked_sub(bytes_to_leave_in_last_relevant_span)
416 .expect("somehow ended up with negative bytes remaining in span - only possible if the math is wrong");
417
418 bytes_to_leave_in_last_relevant_span = 0;
419 }
420
421 #[expect(
422 clippy::cast_possible_truncation,
423 reason = "the usize never contains a value outside bounds of BlockSize - guarded by min()"
424 )]
425 let mut max_take_bytes = (bytes_to_even_consider as usize).min(bytes_remaining_in_range) as BlockSize;
426
427 let is_first_span = bytes_remaining_in_range <= max_take_bytes as usize;
431
432 if is_first_span && bytes_to_skip_in_first_relevant_span > 0 {
433 let remainder_in_span = bytes_to_even_consider
434 .checked_sub(bytes_to_skip_in_first_relevant_span)
435 .expect("somehow ended up with negative bytes remaining in span - only possible if the math is wrong");
436
437 max_take_bytes = max_take_bytes.min(remainder_in_span);
438
439 bytes_remaining_in_range = bytes_remaining_in_range
440 .checked_sub(max_take_bytes as usize)
441 .expect("somehow ended up with negative bytes remaining - only possible if the math is wrong");
442
443 let start = bytes_to_skip_in_first_relevant_span;
444 let end = bytes_to_skip_in_first_relevant_span
445 .checked_add(max_take_bytes)
446 .expect("overflowing usize is impossible because we are calculating slice within usize-bounded range");
447
448 bytes_to_skip_in_first_relevant_span = 0;
449
450 slice_spans.push(span.slice(start..end));
451 } else {
452 bytes_remaining_in_range = bytes_remaining_in_range
453 .checked_sub(max_take_bytes as usize)
454 .expect("somehow ended up with negative bytes remaining - only possible if the math is wrong");
455
456 slice_spans.push(span.slice(0..max_take_bytes));
457 }
458 }
459
460 Some(Self {
461 spans_reversed: slice_spans,
462 len: bytes_in_range,
463 })
464 }
465
466 pub fn consume_all_slices<F>(&mut self, mut f: F)
490 where
491 F: FnMut(&[u8]),
492 {
493 while !self.is_empty() {
496 let slice = self.first_slice();
497 f(slice);
498 self.advance(slice.len());
499 }
500 }
501
502 #[doc = include_str!("../doc/snippets/sequence_memory_layout.md")]
506 #[cfg_attr(test, mutants::skip)] #[must_use]
531 pub fn first_slice(&self) -> &[u8] {
532 self.spans_reversed.last().map_or::<&[u8], _>(&[], |span| span)
533 }
534
535 #[doc = include_str!("../doc/snippets/sequence_memory_layout.md")]
540 pub fn slices(&self) -> BytesViewSlices<'_> {
564 BytesViewSlices::new(self)
565 }
566
567 #[must_use]
593 pub fn first_slice_meta(&self) -> Option<&dyn BlockMeta> {
594 self.spans_reversed.last().and_then(|span| span.block_ref().meta())
595 }
596
597 #[cfg_attr(test, mutants::skip)] pub fn advance(&mut self, mut count: usize) {
632 self.len = self.len.checked_sub(count).expect("attempted to advance past end of the view");
633
634 while count > 0 {
635 let front = self
636 .spans_reversed
637 .last_mut()
638 .expect("logic error - ran out of spans before advancing over their contents");
639 let span_len = front.len() as usize;
640
641 if count < span_len {
642 unsafe {
644 front.advance(count);
645 }
646 break;
647 }
648
649 self.spans_reversed.pop();
650 count = count.wrapping_sub(span_len);
652 }
653 }
654
655 pub fn append(&mut self, other: Self) {
677 self.len = self
678 .len
679 .checked_add(other.len)
680 .expect("attempted to create a BytesView larger than usize::MAX bytes");
681
682 self.spans_reversed.insert_many(0, other.spans_reversed);
683 }
684
685 #[must_use]
710 pub fn concat(&self, other: Self) -> Self {
711 let mut new_view = self.clone();
712 new_view.append(other);
713 new_view
714 }
715
716 #[must_use]
737 pub fn as_read(&mut self) -> impl std::io::Read {
738 BytesViewReader::new(self)
739 }
740}
741
742impl Default for BytesView {
743 fn default() -> Self {
744 Self::new()
745 }
746}
747
748impl PartialEq for BytesView {
749 fn eq(&self, other: &Self) -> bool {
750 if self.len() != other.len() {
752 return false;
753 }
754
755 let mut remaining_bytes = self.len();
756
757 let mut self_view = self.clone();
762 let mut other_view = other.clone();
763
764 while remaining_bytes > 0 {
765 let self_slice = self_view.first_slice();
766 let other_slice = other_view.first_slice();
767
768 let comparison_len = NonZero::new(self_slice.len().min(other_slice.len()))
769 .expect("both views said there are remaining bytes but we got an empty slice from at least one of them");
770
771 let self_slice = self_slice.get(..comparison_len.get()).expect("already checked that remaining > 0");
772 let other_slice = other_slice.get(..comparison_len.get()).expect("already checked that remaining > 0");
773
774 if self_slice != other_slice {
775 return false;
777 }
778
779 self_view.advance(comparison_len.get());
781 other_view.advance(comparison_len.get());
782
783 remaining_bytes = remaining_bytes
784 .checked_sub(comparison_len.get())
785 .expect("impossible to consume more bytes from the sequences than are remaining");
786 }
787
788 debug_assert_eq!(remaining_bytes, 0);
789 debug_assert_eq!(self_view.len(), 0);
790 debug_assert_eq!(other_view.len(), 0);
791
792 true
793 }
794}
795
796impl PartialEq<&[u8]> for BytesView {
797 fn eq(&self, other: &&[u8]) -> bool {
798 let mut other = *other;
799
800 if self.len() != other.len() {
803 return false;
804 }
805
806 let mut remaining_bytes = self.len();
807
808 let mut self_view = self.clone();
810
811 while remaining_bytes > 0 {
812 let self_slice = self_view.first_slice();
813 let slice_size = NonZero::new(self_slice.len())
814 .expect("both sides of the comparison said there are remaining bytes but we got an empty slice from at least one of them");
815
816 let self_slice = self_slice.get(..slice_size.get()).expect("already checked that remaining > 0");
817 let other_slice = other.get(..slice_size.get()).expect("already checked that remaining > 0");
818
819 if self_slice != other_slice {
820 return false;
822 }
823
824 self_view.advance(slice_size.get());
826 other = other.get(slice_size.get()..).expect("guarded by min() above");
827
828 remaining_bytes = remaining_bytes
829 .checked_sub(slice_size.get())
830 .expect("impossible to consume more bytes from the sequences than are remaining");
831 }
832
833 debug_assert_eq!(remaining_bytes, 0);
834 debug_assert_eq!(self_view.len(), 0);
835 debug_assert_eq!(other.len(), 0);
836
837 true
838 }
839}
840
841impl PartialEq<BytesView> for &[u8] {
842 fn eq(&self, other: &BytesView) -> bool {
843 other.eq(self)
844 }
845}
846
847impl<const LEN: usize> PartialEq<&[u8; LEN]> for BytesView {
848 fn eq(&self, other: &&[u8; LEN]) -> bool {
849 self.eq(&other.as_slice())
850 }
851}
852
853impl<const LEN: usize> PartialEq<BytesView> for &[u8; LEN] {
854 fn eq(&self, other: &BytesView) -> bool {
855 other.eq(&self.as_slice())
856 }
857}
858
859#[must_use]
864#[derive(Debug)]
865pub struct BytesViewSlices<'s> {
866 view: BytesView,
869
870 _parent: PhantomData<&'s BytesView>,
873}
874
875impl<'s> BytesViewSlices<'s> {
876 pub(crate) fn new(view: &'s BytesView) -> Self {
877 Self {
878 view: view.clone(),
879 _parent: PhantomData,
880 }
881 }
882}
883
884impl<'s> Iterator for BytesViewSlices<'s> {
885 type Item = (&'s [u8], Option<&'s dyn BlockMeta>);
886
887 #[cfg_attr(test, mutants::skip)] fn next(&mut self) -> Option<Self::Item> {
889 if self.view.is_empty() {
890 return None;
891 }
892
893 let slice = self.view.first_slice();
894 let meta = self.view.first_slice_meta();
895
896 let slice_with_s = unsafe { mem::transmute::<&[u8], &'s [u8]>(slice) };
906 let meta_with_s = unsafe { mem::transmute::<Option<&dyn BlockMeta>, Option<&'s dyn BlockMeta>>(meta) };
908
909 self.view.advance(self.view.first_slice().len());
911
912 Some((slice_with_s, meta_with_s))
913 }
914}
915
916const SPAN_COUNT_BUCKETS: &[Magnitude] = &[0, 1, 2, 4, 8, 16, 32];
917
918thread_local! {
919 static VIEW_CREATED_SPANS: Event = Event::builder()
920 .name("bytesbuf_view_created_spans")
921 .histogram(SPAN_COUNT_BUCKETS)
922 .build();
923}
924
925#[cfg_attr(coverage_nightly, coverage(off))]
926#[cfg(test)]
927mod tests {
928 #![allow(
929 clippy::indexing_slicing,
930 clippy::needless_range_loop,
931 clippy::arithmetic_side_effects,
932 reason = "This is all fine in test code"
933 )]
934
935 use std::pin::pin;
936 use std::thread;
937
938 use new_zealand::nz;
939 use static_assertions::{assert_impl_all, assert_not_impl_any};
940 use testing_aids::assert_panic;
941
942 use super::*;
943 use crate::BytesBuf;
944 use crate::mem::testing::{TestMemoryBlock, TransparentMemory, std_alloc_block};
945
946 assert_impl_all!(BytesView: Send, Sync);
947
948 assert_not_impl_any!(BytesView: From<&'static [u8]>);
953
954 #[test]
955 fn smoke_test() {
956 let mut span_builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
957
958 span_builder.put_slice(&1234_u64.to_ne_bytes());
959 span_builder.put_slice(&16_u16.to_ne_bytes());
960
961 let span1 = span_builder.consume(nz!(4));
962 let span2 = span_builder.consume(nz!(3));
963 let span3 = span_builder.consume(nz!(3));
964
965 assert_eq!(0, span_builder.remaining_capacity());
966 assert_eq!(span1.len(), 4);
967 assert_eq!(span2.len(), 3);
968 assert_eq!(span3.len(), 3);
969
970 let mut view = BytesView::from_spans(vec![span1, span2, span3]);
971
972 assert!(!view.is_empty());
973 assert_eq!(10, view.len());
974
975 let slice = view.first_slice();
976 assert_eq!(4, slice.len());
977
978 assert_eq!(view.get_num_ne::<u64>(), 1234);
980
981 assert_eq!(2, view.len());
982
983 let slice = view.first_slice();
984 assert_eq!(2, slice.len());
985
986 assert_eq!(view.get_num_ne::<u16>(), 16);
987
988 assert_eq!(0, view.len());
989 assert!(view.is_empty());
990 }
991
992 #[test]
993 fn oob_is_panic() {
994 let mut span_builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
995
996 span_builder.put_slice(&1234_u64.to_ne_bytes());
997 span_builder.put_slice(&16_u16.to_ne_bytes());
998
999 let span1 = span_builder.consume(nz!(4));
1000 let span2 = span_builder.consume(nz!(3));
1001 let span3 = span_builder.consume(nz!(3));
1002
1003 let mut view = BytesView::from_spans(vec![span1, span2, span3]);
1004
1005 assert_eq!(10, view.len());
1006
1007 assert_eq!(view.get_num_ne::<u64>(), 1234);
1008 assert_panic!(_ = view.get_num_ne::<u32>()); }
1010
1011 #[test]
1012 fn extend_lifetime_references_all_blocks() {
1013 let block1 = unsafe { TestMemoryBlock::new(nz!(8), None) };
1018 let block1 = pin!(block1);
1019
1020 let block2 = unsafe { TestMemoryBlock::new(nz!(8), None) };
1023 let block2 = pin!(block2);
1024
1025 let guard = {
1026 let mut span_builder1 = unsafe { block1.as_ref().to_block() }.into_span_builder();
1028 let mut span_builder2 = unsafe { block2.as_ref().to_block() }.into_span_builder();
1030
1031 span_builder1.put_slice(&1234_u64.to_ne_bytes());
1032 span_builder2.put_slice(&1234_u64.to_ne_bytes());
1033
1034 let span1 = span_builder1.consume(nz!(8));
1035 let span2 = span_builder2.consume(nz!(8));
1036
1037 let view = BytesView::from_spans(vec![span1, span2]);
1038
1039 view.extend_lifetime()
1040 };
1041
1042 assert_eq!(block1.ref_count(), 1);
1046 assert_eq!(block2.ref_count(), 1);
1047
1048 drop(guard);
1049
1050 assert_eq!(block1.ref_count(), 0);
1052 assert_eq!(block2.ref_count(), 0);
1053 }
1054
1055 #[test]
1056 fn from_views() {
1057 let mut span_builder = std_alloc_block::allocate(nz!(100)).into_span_builder();
1058
1059 span_builder.put_slice(&1234_u64.to_ne_bytes());
1060 span_builder.put_slice(&5678_u64.to_ne_bytes());
1061
1062 let span1 = span_builder.consume(nz!(8));
1063 let span2 = span_builder.consume(nz!(8));
1064
1065 let view1 = BytesView::from_spans(vec![span1]);
1066 let view2 = BytesView::from_spans(vec![span2]);
1067
1068 let mut combined_view = BytesView::from_views(vec![view1, view2]);
1069
1070 assert_eq!(16, combined_view.len());
1071
1072 assert_eq!(combined_view.get_num_ne::<u64>(), 1234);
1073 assert_eq!(combined_view.get_num_ne::<u64>(), 5678);
1074 }
1075
1076 #[test]
1077 fn empty_view() {
1078 let view = BytesView::default();
1079
1080 assert!(view.is_empty());
1081 assert_eq!(0, view.len());
1082 assert_eq!(0, view.first_slice().len());
1083 }
1084
1085 #[test]
1086 fn slice_from_single_span_view() {
1087 let span_builder = std_alloc_block::allocate(nz!(100)).into_span_builder();
1089
1090 let mut buf = BytesBuf::from_span_builders([span_builder]);
1091
1092 for i in 0..100 {
1093 buf.put_byte(i);
1094 }
1095
1096 let view = buf.consume_all();
1097
1098 let mut sliced_view = view.range(50..55);
1099
1100 assert_eq!(5, sliced_view.len());
1101 assert_eq!(100, view.len());
1102
1103 assert_eq!(50, sliced_view.get_byte());
1104
1105 assert_eq!(4, sliced_view.len());
1106 assert_eq!(100, view.len());
1107
1108 assert_eq!(51, sliced_view.get_byte());
1109 assert_eq!(52, sliced_view.get_byte());
1110 assert_eq!(53, sliced_view.get_byte());
1111 assert_eq!(54, sliced_view.get_byte());
1112
1113 assert_eq!(0, sliced_view.len());
1114
1115 assert!(view.range_checked(0..101).is_none());
1116 assert!(view.range_checked(100..101).is_none());
1117 assert!(view.range_checked(101..101).is_none());
1118 }
1119
1120 #[test]
1121 fn slice_from_multi_span_view() {
1122 const SPAN_SIZE: NonZero<BlockSize> = nz!(10);
1123
1124 let span_builders = iter::repeat_with(|| std_alloc_block::allocate(SPAN_SIZE).into_span_builder())
1126 .take(10)
1127 .collect::<Vec<_>>();
1128
1129 let mut buf = BytesBuf::from_span_builders(span_builders);
1130
1131 for i in 0..100 {
1132 buf.put_byte(i);
1133 }
1134
1135 let view = buf.consume_all();
1136
1137 let mut first5 = view.range(0..5);
1138 assert_eq!(5, first5.len());
1139 assert_eq!(100, view.len());
1140 assert_eq!(0, first5.get_byte());
1141
1142 let mut last5 = view.range(95..100);
1143 assert_eq!(5, last5.len());
1144 assert_eq!(100, view.len());
1145 assert_eq!(95, last5.get_byte());
1146
1147 let mut middle5 = view.range(49..54);
1148 assert_eq!(5, middle5.len());
1149 assert_eq!(100, view.len());
1150 assert_eq!(49, middle5.get_byte());
1151 assert_eq!(50, middle5.get_byte());
1152 assert_eq!(51, middle5.get_byte());
1153 assert_eq!(52, middle5.get_byte());
1154 assert_eq!(53, middle5.get_byte());
1155
1156 assert!(view.range_checked(0..101).is_none());
1157 assert!(view.range_checked(100..101).is_none());
1158 assert!(view.range_checked(101..101).is_none());
1159 }
1160
1161 #[test]
1162 fn slice_indexing_kinds() {
1163 let span_builder = std_alloc_block::allocate(nz!(10)).into_span_builder();
1164
1165 let mut buf = BytesBuf::from_span_builders([span_builder]);
1166 buf.put_byte(0);
1167 buf.put_byte(1);
1168 buf.put_byte(2);
1169 buf.put_byte(3);
1170 buf.put_byte(4);
1171 buf.put_byte(5);
1172
1173 let data = buf.consume_all();
1174
1175 let mut middle_four = data.range(1..5);
1176 assert_eq!(4, middle_four.len());
1177 assert_eq!(1, middle_four.get_byte());
1178 assert_eq!(2, middle_four.get_byte());
1179 assert_eq!(3, middle_four.get_byte());
1180 assert_eq!(4, middle_four.get_byte());
1181
1182 let mut middle_four = data.range(1..=4);
1183 assert_eq!(4, middle_four.len());
1184 assert_eq!(1, middle_four.get_byte());
1185 assert_eq!(2, middle_four.get_byte());
1186 assert_eq!(3, middle_four.get_byte());
1187 assert_eq!(4, middle_four.get_byte());
1188
1189 let mut last_two = data.range(4..);
1190 assert_eq!(2, last_two.len());
1191 assert_eq!(4, last_two.get_byte());
1192 assert_eq!(5, last_two.get_byte());
1193
1194 let mut first_two = data.range(..2);
1195 assert_eq!(2, first_two.len());
1196 assert_eq!(0, first_two.get_byte());
1197 assert_eq!(1, first_two.get_byte());
1198
1199 let mut first_two = data.range(..=1);
1200 assert_eq!(2, first_two.len());
1201 assert_eq!(0, first_two.get_byte());
1202 assert_eq!(1, first_two.get_byte());
1203 }
1204
1205 #[test]
1206 fn slice_checked_with_excluded_start_bound() {
1207 let span_builder = std_alloc_block::allocate(nz!(100)).into_span_builder();
1208
1209 let mut buf = BytesBuf::from_span_builders([span_builder]);
1210 buf.put_byte(0);
1211 buf.put_byte(1);
1212 buf.put_byte(2);
1213 buf.put_byte(3);
1214 buf.put_byte(4);
1215 buf.put_byte(5);
1216 buf.put_byte(6);
1217 buf.put_byte(7);
1218 buf.put_byte(8);
1219
1220 let view = buf.consume_all();
1221
1222 let sliced = view.range_checked((Bound::Excluded(1), Bound::Excluded(5)));
1225 assert!(sliced.is_some());
1226 let mut sliced = sliced.unwrap();
1227 assert_eq!(3, sliced.len());
1228 assert_eq!(2, sliced.get_byte());
1229 assert_eq!(3, sliced.get_byte());
1230 assert_eq!(4, sliced.get_byte());
1231
1232 let sliced = view.range_checked((Bound::Excluded(8), Bound::Unbounded));
1234 assert!(sliced.is_some());
1235 assert_eq!(0, sliced.unwrap().len());
1236
1237 let sliced = view.range_checked((Bound::Excluded(usize::MAX), Bound::Unbounded));
1239 assert!(sliced.is_none());
1240 }
1241
1242 #[test]
1243 fn slice_oob_is_panic() {
1244 let span_builder = std_alloc_block::allocate(nz!(1000)).into_span_builder();
1245
1246 let mut buf = BytesBuf::from_span_builders([span_builder]);
1247 buf.put_byte_repeated(0, 100);
1248
1249 let view = buf.consume_all();
1250
1251 assert_panic!(_ = view.range(0..101));
1252 assert_panic!(_ = view.range(0..=100));
1253 assert_panic!(_ = view.range(100..=100));
1254 assert_panic!(_ = view.range(100..101));
1255 assert_panic!(_ = view.range(101..));
1256 assert_panic!(_ = view.range(101..101));
1257 assert_panic!(_ = view.range(101..=101));
1258 }
1259
1260 #[test]
1261 fn slice_at_boundary_is_not_panic() {
1262 let span_builder = std_alloc_block::allocate(nz!(100)).into_span_builder();
1263
1264 let mut buf = BytesBuf::from_span_builders([span_builder]);
1265 buf.put_byte_repeated(0, 100);
1266
1267 let view = buf.consume_all();
1268
1269 assert_eq!(0, view.range(0..0).len());
1270 assert_eq!(1, view.range(0..=0).len());
1271 assert_eq!(0, view.range(..0).len());
1272 assert_eq!(1, view.range(..=0).len());
1273 assert_eq!(0, view.range(100..100).len());
1274 assert_eq!(0, view.range(99..99).len());
1275 assert_eq!(1, view.range(99..=99).len());
1276 assert_eq!(1, view.range(99..).len());
1277 assert_eq!(100, view.range(..).len());
1278 }
1279
1280 #[test]
1281 fn slice_empty_is_empty_if_not_oob() {
1282 let span_builder = std_alloc_block::allocate(nz!(100)).into_span_builder();
1283
1284 let mut buf = BytesBuf::from_span_builders([span_builder]);
1285
1286 for i in 0..100 {
1287 buf.put_byte(i);
1288 }
1289
1290 let view = buf.consume_all();
1291
1292 let sub_view = view.range(50..50);
1293 assert_eq!(0, sub_view.len());
1294
1295 let sub_view = view.range(100..100);
1297 assert_eq!(0, sub_view.len());
1298 assert!(view.range_checked(101..101).is_none());
1299 }
1300
1301 #[test]
1302 fn consume_all_slices() {
1303 const SPAN_SIZE: NonZero<BlockSize> = nz!(10);
1304
1305 let span_builders = iter::repeat_with(|| std_alloc_block::allocate(SPAN_SIZE).into_span_builder())
1307 .take(10)
1308 .collect::<Vec<_>>();
1309
1310 let mut buf = BytesBuf::from_span_builders(span_builders);
1311
1312 for i in 0..100 {
1313 buf.put_byte(i);
1314 }
1315
1316 let mut view = buf.consume_all();
1317
1318 let mut slice_index = 0;
1319 let mut bytes_consumed = 0;
1320
1321 view.consume_all_slices(|slice| {
1322 assert_eq!(slice.len(), 10);
1323 bytes_consumed += slice.len();
1324
1325 for i in 0..10 {
1326 assert_eq!(slice_index * 10 + i, slice[i] as usize);
1327 }
1328
1329 slice_index += 1;
1330 });
1331
1332 assert_eq!(bytes_consumed, 100);
1333
1334 view.consume_all_slices(|_| unreachable!("view should now be empty"));
1335 }
1336
1337 #[test]
1338 fn multithreaded_usage() {
1339 fn post_to_another_thread(view: BytesView) {
1340 thread::spawn(move || {
1341 let mut view = view;
1342 assert_eq!(view.get_byte(), b'H');
1343 assert_eq!(view.get_byte(), b'e');
1344 assert_eq!(view.get_byte(), b'l');
1345 assert_eq!(view.get_byte(), b'l');
1346 assert_eq!(view.get_byte(), b'o');
1347 })
1348 .join()
1349 .unwrap();
1350 }
1351
1352 let memory = TransparentMemory::new();
1353 let view = BytesView::copied_from_slice(b"Hello, world!", &memory);
1354
1355 post_to_another_thread(view);
1356 }
1357
1358 #[test]
1359 fn slices_iterator() {
1360 let memory = TransparentMemory::new();
1361 let segment1 = BytesView::copied_from_slice(b"Hello, world!", &memory);
1362 let segment2 = BytesView::copied_from_slice(b"Hello, another world!", &memory);
1363
1364 let view = BytesView::from_views(vec![segment1.clone(), segment2.clone()]);
1365
1366 let slices: Vec<_> = view.slices().collect();
1367
1368 assert_eq!(slices.len(), 2);
1369 assert_eq!(slices[0].0.len(), segment1.len());
1370 assert_eq!(slices[1].0.len(), segment2.len());
1371 }
1372
1373 #[test]
1374 fn slices_iterator_empty() {
1375 let view = BytesView::new();
1376 assert_eq!(view.slices().count(), 0);
1377 }
1378
1379 #[test]
1380 fn eq_view() {
1381 let memory = TransparentMemory::new();
1382
1383 let view1 = BytesView::copied_from_slice(b"Hello, world!", &memory);
1384 let view2 = BytesView::copied_from_slice(b"Hello, world!", &memory);
1385
1386 assert_eq!(view1, view2);
1387
1388 let view3 = BytesView::copied_from_slice(b"Jello, world!", &memory);
1389
1390 assert_ne!(view1, view3);
1391
1392 let view4 = BytesView::copied_from_slice(b"Hello, world! ", &memory);
1393
1394 assert_ne!(view1, view4);
1395
1396 let view5_part1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1397 let view5_part2 = BytesView::copied_from_slice(b"world!", &memory);
1398 let view5 = BytesView::from_views([view5_part1, view5_part2]);
1399
1400 assert_eq!(view1, view5);
1401 assert_ne!(view5, view3);
1402
1403 let view6 = BytesView::copied_from_slice(b"Hello, ", &memory);
1404
1405 assert_ne!(view1, view6);
1406 assert_ne!(view5, view6);
1407 }
1408
1409 #[test]
1410 fn eq_slice() {
1411 let memory = TransparentMemory::new();
1412
1413 let view1 = BytesView::copied_from_slice(b"Hello, world!", &memory);
1414
1415 assert_eq!(view1, b"Hello, world!".as_slice());
1416 assert_ne!(view1, b"Jello, world!".as_slice());
1417 assert_ne!(view1, b"Hello, world! ".as_slice());
1418
1419 assert_eq!(b"Hello, world!".as_slice(), view1);
1420 assert_ne!(b"Jello, world!".as_slice(), view1);
1421 assert_ne!(b"Hello, world! ".as_slice(), view1);
1422
1423 let view2_part1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1424 let view2_part2 = BytesView::copied_from_slice(b"world!", &memory);
1425 let view2 = BytesView::from_views([view2_part1, view2_part2]);
1426
1427 assert_eq!(view2, b"Hello, world!".as_slice());
1428 assert_ne!(view2, b"Jello, world!".as_slice());
1429 assert_ne!(view2, b"Hello, world! ".as_slice());
1430 assert_ne!(view2, b"Hello, ".as_slice());
1431
1432 assert_eq!(b"Hello, world!".as_slice(), view2);
1433 assert_ne!(b"Jello, world!".as_slice(), view2);
1434 assert_ne!(b"Hello, world! ".as_slice(), view2);
1435 assert_ne!(b"Hello, ".as_slice(), view2);
1436 }
1437
1438 #[test]
1439 fn eq_array() {
1440 let memory = TransparentMemory::new();
1441
1442 let view1 = BytesView::copied_from_slice(b"Hello, world!", &memory);
1443
1444 assert_eq!(view1, b"Hello, world!");
1445 assert_ne!(view1, b"Jello, world!");
1446 assert_ne!(view1, b"Hello, world! ");
1447
1448 assert_eq!(b"Hello, world!", view1);
1449 assert_ne!(b"Jello, world!", view1);
1450 assert_ne!(b"Hello, world! ", view1);
1451
1452 let view2_part1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1453 let view2_part2 = BytesView::copied_from_slice(b"world!", &memory);
1454 let view2 = BytesView::from_views([view2_part1, view2_part2]);
1455
1456 assert_eq!(view2, b"Hello, world!");
1457 assert_ne!(view2, b"Jello, world!");
1458 assert_ne!(view2, b"Hello, world! ");
1459 assert_ne!(view2, b"Hello, ");
1460
1461 assert_eq!(b"Hello, world!", view2);
1462 assert_ne!(b"Jello, world!", view2);
1463 assert_ne!(b"Hello, world! ", view2);
1464 assert_ne!(b"Hello, ", view2);
1465 }
1466
1467 #[test]
1468 fn meta_none() {
1469 let memory = TransparentMemory::new();
1470
1471 let view1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1472 let view2 = BytesView::copied_from_slice(b"world!", &memory);
1473
1474 let view = BytesView::from_views([view1, view2]);
1475
1476 let mut slices_iter = view.slices();
1477
1478 let (data1, meta1) = slices_iter.next().expect("should have first slice");
1480 assert!(!data1.is_empty());
1481 assert!(meta1.is_none());
1482
1483 let (data2, meta2) = slices_iter.next().expect("should have second slice");
1484 assert!(!data2.is_empty());
1485 assert!(meta2.is_none());
1486
1487 assert!(slices_iter.next().is_none());
1488 }
1489
1490 #[test]
1491 fn meta_some() {
1492 #[derive(Debug)]
1493 struct GreenMeta;
1494 #[derive(Debug)]
1495 struct BlueMeta;
1496
1497 impl BlockMeta for GreenMeta {}
1498 impl BlockMeta for BlueMeta {}
1499
1500 let block1 = unsafe { TestMemoryBlock::new(nz!(100), Some(Box::new(GreenMeta {}))) };
1503 let block1 = pin!(block1);
1504
1505 let block2 = unsafe { TestMemoryBlock::new(nz!(100), Some(Box::new(BlueMeta {}))) };
1508 let block2 = pin!(block2);
1509
1510 let block1 = unsafe { block1.as_ref().to_block() };
1512 let block2 = unsafe { block2.as_ref().to_block() };
1514
1515 let mut buf = BytesBuf::from_blocks([block1, block2]);
1516
1517 buf.put_byte_repeated(123, 166);
1519
1520 let view = buf.consume_all();
1521
1522 let mut slices_iter = view.slices();
1523
1524 let (data1, meta1) = slices_iter.next().expect("should have first block");
1529 assert!(!data1.is_empty());
1530 assert!(meta1.is_some());
1531 assert!(meta1.unwrap().is::<BlueMeta>());
1532 assert!(!meta1.unwrap().is::<GreenMeta>());
1533
1534 let (data2, meta2) = slices_iter.next().expect("should have second block");
1535 assert!(!data2.is_empty());
1536 assert!(meta2.is_some());
1537 assert!(meta2.unwrap().is::<GreenMeta>());
1538 assert!(!meta2.unwrap().is::<BlueMeta>());
1539
1540 assert!(slices_iter.next().is_none(), "should have no more slices");
1541 }
1542
1543 #[test]
1544 fn append_single_span() {
1545 let memory = TransparentMemory::new();
1546
1547 let mut view1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1549 let view2 = BytesView::copied_from_slice(b"world!", &memory);
1550
1551 assert_eq!(view1.len(), 7);
1552 assert_eq!(view2.len(), 6);
1553
1554 view1.append(view2);
1555
1556 assert_eq!(view1.len(), 13);
1557 assert_eq!(view1, b"Hello, world!");
1558 }
1559
1560 #[test]
1561 fn append_multi_span() {
1562 let memory = TransparentMemory::new();
1563
1564 let view1_part1 = BytesView::copied_from_slice(b"AAA", &memory);
1566 let view1_part2 = BytesView::copied_from_slice(b"BBB", &memory);
1567 let mut view1 = BytesView::from_views([view1_part1, view1_part2]);
1568
1569 let view2_part1 = BytesView::copied_from_slice(b"CCC", &memory);
1570 let view2_part2 = BytesView::copied_from_slice(b"DDD", &memory);
1571 let view2 = BytesView::from_views([view2_part1, view2_part2]);
1572
1573 assert_eq!(view1.len(), 6);
1574 assert_eq!(view2.len(), 6);
1575
1576 view1.append(view2);
1577
1578 assert_eq!(view1.len(), 12);
1579 assert_eq!(view1, b"AAABBBCCCDDD");
1580 }
1581
1582 #[test]
1583 fn append_empty_view() {
1584 let memory = TransparentMemory::new();
1585
1586 let mut view1 = BytesView::copied_from_slice(b"Hello", &memory);
1587 let view2 = BytesView::new();
1588
1589 view1.append(view2);
1590 assert_eq!(view1.len(), 5);
1591 assert_eq!(view1, b"Hello");
1592
1593 let mut view3 = BytesView::new();
1594 let view4 = BytesView::copied_from_slice(b"world", &memory);
1595
1596 view3.append(view4);
1597 assert_eq!(view3.len(), 5);
1598 assert_eq!(view3, b"world");
1599 }
1600
1601 #[test]
1602 fn concat_single_span() {
1603 let memory = TransparentMemory::new();
1604
1605 let view1 = BytesView::copied_from_slice(b"Hello, ", &memory);
1607 let view2 = BytesView::copied_from_slice(b"world!", &memory);
1608
1609 assert_eq!(view1.len(), 7);
1610 assert_eq!(view2.len(), 6);
1611
1612 let view3 = view1.concat(view2);
1613
1614 assert_eq!(view1.len(), 7);
1616 assert_eq!(view1, b"Hello, ");
1617
1618 assert_eq!(view3.len(), 13);
1620 assert_eq!(view3, b"Hello, world!");
1621 }
1622
1623 #[test]
1624 fn concat_multi_span() {
1625 let memory = TransparentMemory::new();
1626
1627 let view1_part1 = BytesView::copied_from_slice(b"AAA", &memory);
1629 let view1_part2 = BytesView::copied_from_slice(b"BBB", &memory);
1630 let view1 = BytesView::from_views([view1_part1, view1_part2]);
1631
1632 let view2_part1 = BytesView::copied_from_slice(b"CCC", &memory);
1633 let view2_part2 = BytesView::copied_from_slice(b"DDD", &memory);
1634 let view2 = BytesView::from_views([view2_part1, view2_part2]);
1635
1636 assert_eq!(view1.len(), 6);
1637 assert_eq!(view2.len(), 6);
1638
1639 let view3 = view1.concat(view2);
1640
1641 assert_eq!(view1.len(), 6);
1643 assert_eq!(view1, b"AAABBB");
1644
1645 assert_eq!(view3.len(), 12);
1647 assert_eq!(view3, b"AAABBBCCCDDD");
1648 }
1649
1650 #[test]
1651 fn concat_empty_views() {
1652 let memory = TransparentMemory::new();
1653
1654 let view1 = BytesView::copied_from_slice(b"Hello", &memory);
1655 let view2 = BytesView::new();
1656
1657 let view3 = view1.concat(view2);
1658 assert_eq!(view3.len(), 5);
1659 assert_eq!(view3, b"Hello");
1660
1661 let view4 = BytesView::new();
1662 let view5 = BytesView::copied_from_slice(b"world", &memory);
1663
1664 let view6 = view4.concat(view5);
1665 assert_eq!(view6.len(), 5);
1666 assert_eq!(view6, b"world");
1667 }
1668
1669 #[test]
1670 fn size_change_detector() {
1671 assert_eq!(size_of::<BytesView>(), 272);
1675 }
1676}