1use std::{
6 mem::ManuallyDrop,
7 ops::Deref,
8 sync::{
9 atomic::{AtomicU64, Ordering},
10 Arc,
11 },
12};
13
14pub use crate::builder::Builder;
15
16#[cfg(target_pointer_width = "64")]
17const INLINE_SIZE: usize = 20;
18
19#[cfg(target_pointer_width = "32")]
20const INLINE_SIZE: usize = 16;
21
22const PREFIX_SIZE: usize = 4;
23
24#[repr(C)]
25struct HeapAllocationHeader {
26 ref_count: AtomicU64,
27}
28
29#[repr(C)]
30struct ShortRepr {
31 len: u32,
32 data: [u8; INLINE_SIZE],
33}
34
35#[repr(C)]
36struct LongRepr {
37 len: u32,
38 prefix: [u8; PREFIX_SIZE],
39 heap: *const u8,
40 original_len: u32,
41 offset: u32,
42}
43
44#[repr(C)]
45pub union Trailer {
46 short: ManuallyDrop<ShortRepr>,
47 long: ManuallyDrop<LongRepr>,
48}
49
50impl Default for Trailer {
51 fn default() -> Self {
52 Self {
53 short: ManuallyDrop::new(ShortRepr {
54 len: 0,
55 data: [0; INLINE_SIZE],
56 }),
57 }
58 }
59}
60
61#[repr(C)]
79#[derive(Default)]
80pub struct ByteView {
81 trailer: Trailer,
82}
83
84#[allow(clippy::non_send_fields_in_send_ty)]
85unsafe impl Send for ByteView {}
86#[allow(clippy::non_send_fields_in_send_ty)]
87unsafe impl Sync for ByteView {}
88
89impl Clone for ByteView {
90 fn clone(&self) -> Self {
91 self.slice(..)
92 }
93}
94
95impl Drop for ByteView {
96 fn drop(&mut self) {
97 if self.is_inline() {
98 return;
99 }
100
101 let heap_region = self.get_heap_region();
102
103 if heap_region.ref_count.fetch_sub(1, Ordering::AcqRel) != 1 {
104 return;
105 }
106
107 unsafe {
108 let header_size = std::mem::size_of::<HeapAllocationHeader>();
109 let alignment = std::mem::align_of::<HeapAllocationHeader>();
110 let total_size = header_size + self.trailer.long.original_len as usize;
111 let layout = std::alloc::Layout::from_size_align(total_size, alignment).unwrap();
112
113 let ptr = self.trailer.long.heap.cast_mut();
114 std::alloc::dealloc(ptr, layout);
115 }
116 }
117}
118
119impl Eq for ByteView {}
120
121impl std::cmp::PartialEq for ByteView {
122 fn eq(&self, other: &Self) -> bool {
123 unsafe {
124 let src_ptr = (self as *const Self).cast::<u8>();
125 let other_ptr: *const u8 = (other as *const Self).cast::<u8>();
126
127 let a = *src_ptr.cast::<u64>();
128 let b = *other_ptr.cast::<u64>();
129
130 if a != b {
131 return false;
132 }
133 }
134
135 if self.is_inline() {
141 self.get_short_slice() == other.get_short_slice()
142 } else {
143 self.get_long_slice() == other.get_long_slice()
144 }
145 }
146}
147
148impl std::cmp::Ord for ByteView {
149 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
150 self.prefix()
151 .cmp(other.prefix())
152 .then_with(|| self.deref().cmp(&**other))
153 }
154}
155
156impl std::cmp::PartialOrd for ByteView {
157 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
158 Some(self.cmp(other))
159 }
160}
161
162impl std::fmt::Debug for ByteView {
163 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
164 write!(f, "{:?}", &**self)
165 }
166}
167
168impl Deref for ByteView {
169 type Target = [u8];
170
171 fn deref(&self) -> &Self::Target {
172 if self.is_inline() {
173 self.get_short_slice()
174 } else {
175 self.get_long_slice()
176 }
177 }
178}
179
180impl std::hash::Hash for ByteView {
181 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
182 self.deref().hash(state);
183 }
184}
185
186pub struct Mutator<'a>(pub(crate) &'a mut ByteView);
189
190impl std::ops::Deref for Mutator<'_> {
191 type Target = [u8];
192
193 fn deref(&self) -> &Self::Target {
194 self.0
195 }
196}
197
198impl std::ops::DerefMut for Mutator<'_> {
199 fn deref_mut(&mut self) -> &mut Self::Target {
200 self.0.get_mut_slice()
201 }
202}
203
204impl Drop for Mutator<'_> {
205 fn drop(&mut self) {
206 self.0.update_prefix();
207 }
208}
209
210impl ByteView {
211 #[doc(hidden)]
212 #[must_use]
213 pub unsafe fn builder_unzeroed(len: usize) -> Builder {
214 Builder::new(Self::with_size_unzeroed(len))
215 }
216
217 #[doc(hidden)]
218 #[must_use]
219 pub fn builder(len: usize) -> Builder {
220 Builder::new(Self::with_size(len))
221 }
222
223 fn prefix(&self) -> &[u8] {
224 let len = PREFIX_SIZE.min(self.len());
225
226 unsafe { self.trailer.short.data.get_unchecked(..len) }
228 }
229
230 fn is_inline(&self) -> bool {
231 self.len() <= INLINE_SIZE
232 }
233
234 pub(crate) fn update_prefix(&mut self) {
235 if !self.is_inline() {
236 unsafe {
237 let slice_ptr: &[u8] = &*self;
238 let slice_ptr = slice_ptr.as_ptr();
239
240 (*self.trailer.long).prefix[0] = 0;
242 (*self.trailer.long).prefix[1] = 0;
243 (*self.trailer.long).prefix[2] = 0;
244 (*self.trailer.long).prefix[3] = 0;
245
246 let prefix = (*self.trailer.long).prefix.as_mut_ptr();
247 std::ptr::copy_nonoverlapping(slice_ptr, prefix, self.len().min(4));
248 }
249 }
250 }
251
252 pub fn get_mut(&mut self) -> Option<Mutator<'_>> {
254 if self.ref_count() == 1 {
255 Some(Mutator(self))
256 } else {
257 None
258 }
259 }
260
261 pub fn from_reader<R: std::io::Read>(reader: &mut R, len: usize) -> std::io::Result<Self> {
268 let mut s = unsafe { Self::with_size_unzeroed(len) };
272 {
273 let mut builder = Mutator(&mut s);
274 reader.read_exact(&mut builder)?;
275 }
276 Ok(s)
277 }
278
279 #[must_use]
281 pub fn fused(left: &[u8], right: &[u8]) -> Self {
282 let len = left.len() + right.len();
283 let mut builder = unsafe { Self::builder_unzeroed(len) };
284 builder[0..left.len()].copy_from_slice(left);
285 builder[left.len()..].copy_from_slice(right);
286 builder.freeze()
287 }
288
289 #[must_use]
297 pub fn with_size(slice_len: usize) -> Self {
298 Self::with_size_zeroed(slice_len)
299 }
300
301 fn with_size_zeroed(slice_len: usize) -> Self {
307 let view = if slice_len <= INLINE_SIZE {
308 Self {
309 trailer: Trailer {
310 short: ManuallyDrop::new(ShortRepr {
311 #[allow(clippy::cast_possible_truncation)]
314 len: slice_len as u32,
315 data: [0; INLINE_SIZE],
316 }),
317 },
318 }
319 } else {
320 let Ok(len) = u32::try_from(slice_len) else {
321 panic!("byte slice too long");
322 };
323
324 unsafe {
325 const HEADER_SIZE: usize = std::mem::size_of::<HeapAllocationHeader>();
326 const ALIGNMENT: usize = std::mem::align_of::<HeapAllocationHeader>();
327
328 let total_size = HEADER_SIZE + slice_len;
329 let layout = std::alloc::Layout::from_size_align(total_size, ALIGNMENT).unwrap();
330
331 let heap_ptr = std::alloc::alloc_zeroed(layout);
333 if heap_ptr.is_null() {
334 std::alloc::handle_alloc_error(layout);
335 }
336
337 let heap_region = heap_ptr as *const HeapAllocationHeader;
339 let heap_region = &*heap_region;
340 heap_region.ref_count.store(1, Ordering::Release);
341
342 Self {
343 trailer: Trailer {
344 long: ManuallyDrop::new(LongRepr {
345 len,
346 prefix: [0; PREFIX_SIZE],
347 heap: heap_ptr,
348 original_len: len,
349 offset: 0,
350 }),
351 },
352 }
353 }
354 };
355
356 debug_assert_eq!(1, view.ref_count());
357
358 view
359 }
360
361 #[doc(hidden)]
367 #[must_use]
368 pub unsafe fn with_size_unzeroed(slice_len: usize) -> Self {
369 let view = if slice_len <= INLINE_SIZE {
370 Self {
371 trailer: Trailer {
372 short: ManuallyDrop::new(ShortRepr {
373 #[allow(clippy::cast_possible_truncation)]
376 len: slice_len as u32,
377 data: [0; INLINE_SIZE],
378 }),
379 },
380 }
381 } else {
382 let Ok(len) = u32::try_from(slice_len) else {
383 panic!("byte slice too long");
384 };
385
386 unsafe {
387 const HEADER_SIZE: usize = std::mem::size_of::<HeapAllocationHeader>();
388 const ALIGNMENT: usize = std::mem::align_of::<HeapAllocationHeader>();
389
390 let total_size = HEADER_SIZE + slice_len;
391 let layout = std::alloc::Layout::from_size_align(total_size, ALIGNMENT).unwrap();
392
393 let heap_ptr = std::alloc::alloc(layout);
394 if heap_ptr.is_null() {
395 std::alloc::handle_alloc_error(layout);
396 }
397
398 let heap_region = heap_ptr as *const HeapAllocationHeader;
400 let heap_region = &*heap_region;
401 heap_region.ref_count.store(1, Ordering::Release);
402
403 Self {
404 trailer: Trailer {
405 long: ManuallyDrop::new(LongRepr {
406 len,
407 prefix: [0; PREFIX_SIZE],
408 heap: heap_ptr,
409 original_len: len,
410 offset: 0,
411 }),
412 },
413 }
414 }
415 };
416
417 debug_assert_eq!(1, view.ref_count());
418
419 view
420 }
421
422 #[must_use]
430 pub fn new(slice: &[u8]) -> Self {
431 let slice_len = slice.len();
432
433 let mut view = unsafe { Self::with_size_unzeroed(slice_len) };
434
435 if view.is_inline() {
436 unsafe {
439 let data_ptr = std::ptr::addr_of_mut!((*view.trailer.short).data).cast();
440 std::ptr::copy_nonoverlapping(slice.as_ptr(), data_ptr, slice_len);
441 }
442 } else {
443 let long_repr = unsafe { &mut *view.trailer.long };
444
445 #[allow(clippy::indexing_slicing)]
448 long_repr.prefix.copy_from_slice(&slice[0..PREFIX_SIZE]);
449
450 view.get_mut_slice().copy_from_slice(slice);
452 }
453
454 debug_assert_eq!(1, view.ref_count());
455
456 view
457 }
458
459 unsafe fn data_ptr(&self) -> *const u8 {
460 const HEADER_SIZE: usize = std::mem::size_of::<HeapAllocationHeader>();
461
462 debug_assert!(!self.is_inline());
463
464 self.trailer
465 .long
466 .heap
467 .add(HEADER_SIZE)
468 .add(self.trailer.long.offset as usize)
469 }
470
471 unsafe fn data_ptr_mut(&mut self) -> *mut u8 {
472 const HEADER_SIZE: usize = std::mem::size_of::<HeapAllocationHeader>();
473
474 debug_assert!(!self.is_inline());
475
476 self.trailer
477 .long
478 .heap
479 .add(HEADER_SIZE)
480 .add(self.trailer.long.offset as usize)
481 .cast_mut()
482 }
483
484 fn get_heap_region(&self) -> &HeapAllocationHeader {
485 debug_assert!(
486 !self.is_inline(),
487 "inline slice does not have a heap allocation"
488 );
489
490 unsafe {
491 let ptr = self.trailer.long.heap;
492 let heap_region: *const HeapAllocationHeader = ptr.cast::<HeapAllocationHeader>();
493 &*heap_region
494 }
495 }
496
497 #[doc(hidden)]
499 #[must_use]
500 pub fn ref_count(&self) -> u64 {
501 if self.is_inline() {
502 1
503 } else {
504 self.get_heap_region().ref_count.load(Ordering::Acquire)
505 }
506 }
507
508 #[must_use]
510 pub fn to_detached(&self) -> Self {
511 Self::new(self)
512 }
513
514 #[must_use]
529 pub fn slice(&self, range: impl std::ops::RangeBounds<usize>) -> Self {
530 use core::ops::Bound;
531
532 let self_len = self.len();
536
537 let begin = match range.start_bound() {
538 Bound::Included(&n) => n,
539 Bound::Excluded(&n) => n.checked_add(1).expect("out of range"),
540 Bound::Unbounded => 0,
541 };
542
543 let end = match range.end_bound() {
544 Bound::Included(&n) => n.checked_add(1).expect("out of range"),
545 Bound::Excluded(&n) => n,
546 Bound::Unbounded => self_len,
547 };
548
549 assert!(
550 begin <= end,
551 "range start must not be greater than end: {begin:?} <= {end:?}",
552 );
553 assert!(
554 end <= self_len,
555 "range end out of bounds: {end:?} <= {self_len:?}",
556 );
557
558 let new_len = end - begin;
559 let len = u32::try_from(new_len).unwrap();
560
561 if new_len <= INLINE_SIZE {
565 let mut child = Self {
566 trailer: Trailer {
567 short: ManuallyDrop::new(ShortRepr {
568 len,
569 data: [0; INLINE_SIZE],
570 }),
571 },
572 };
573
574 let slice = &self[begin..end];
575 debug_assert_eq!(slice.len(), new_len);
576
577 let data_ptr = unsafe { &mut (*child.trailer.short).data };
578
579 unsafe {
580 std::ptr::copy_nonoverlapping(slice.as_ptr(), data_ptr.as_mut_ptr(), new_len);
581 }
582
583 child
584 } else {
585 let heap_region = self.get_heap_region();
587 heap_region.ref_count.fetch_add(1, Ordering::Release);
588
589 let mut child = Self {
590 trailer: Trailer {
594 long: ManuallyDrop::new(LongRepr {
595 len,
596 prefix: [0; PREFIX_SIZE],
597 heap: unsafe { self.trailer.long.heap },
598 offset: unsafe { self.trailer.long.offset } + begin as u32,
599 original_len: unsafe { self.trailer.long.original_len },
600 }),
601 },
602 };
603
604 let prefix = &self[begin..(begin + 4)];
605 debug_assert_eq!(prefix.len(), 4);
606
607 unsafe {
608 (*child.trailer.long).prefix.copy_from_slice(prefix);
609 }
610
611 child
612 }
613 }
614
615 pub fn starts_with<T: AsRef<[u8]>>(&self, needle: T) -> bool {
617 let needle = needle.as_ref();
618
619 unsafe {
620 let len = PREFIX_SIZE.min(needle.len());
621 let needle_prefix: &[u8] = needle.get_unchecked(..len);
622
623 if !self.prefix().starts_with(needle_prefix) {
624 return false;
625 }
626 }
627
628 self.deref().starts_with(needle)
629 }
630
631 #[must_use]
633 pub fn is_empty(&self) -> bool {
634 self.len() == 0
635 }
636
637 #[must_use]
639 pub fn len(&self) -> usize {
640 unsafe { self.trailer.short.len as usize }
641 }
642
643 pub(crate) fn get_mut_slice(&mut self) -> &mut [u8] {
644 let len = self.len();
645
646 if self.is_inline() {
647 unsafe { std::slice::from_raw_parts_mut((*self.trailer.short).data.as_mut_ptr(), len) }
648 } else {
649 unsafe { std::slice::from_raw_parts_mut(self.data_ptr_mut(), len) }
650 }
651 }
652
653 fn get_short_slice(&self) -> &[u8] {
654 let len = self.len();
655
656 debug_assert!(
657 len <= INLINE_SIZE,
658 "cannot get short slice - slice is not inlined",
659 );
660
661 unsafe { std::slice::from_raw_parts((*self.trailer.short).data.as_ptr(), len) }
663 }
664
665 fn get_long_slice(&self) -> &[u8] {
666 let len = self.len();
667
668 debug_assert!(
669 len > INLINE_SIZE,
670 "cannot get long slice - slice is inlined"
671 );
672
673 unsafe { std::slice::from_raw_parts(self.data_ptr(), len) }
675 }
676}
677
678impl std::borrow::Borrow<[u8]> for ByteView {
679 fn borrow(&self) -> &[u8] {
680 self
681 }
682}
683
684impl AsRef<[u8]> for ByteView {
685 fn as_ref(&self) -> &[u8] {
686 self
687 }
688}
689
690impl FromIterator<u8> for ByteView {
691 fn from_iter<T>(iter: T) -> Self
692 where
693 T: IntoIterator<Item = u8>,
694 {
695 Self::from(iter.into_iter().collect::<Vec<u8>>())
696 }
697}
698
699impl From<&[u8]> for ByteView {
700 fn from(value: &[u8]) -> Self {
701 Self::new(value)
702 }
703}
704
705impl From<Arc<[u8]>> for ByteView {
706 fn from(value: Arc<[u8]>) -> Self {
707 Self::new(&value)
708 }
709}
710
711impl From<Vec<u8>> for ByteView {
712 fn from(value: Vec<u8>) -> Self {
713 Self::new(&value)
714 }
715}
716
717impl From<&str> for ByteView {
718 fn from(value: &str) -> Self {
719 Self::from(value.as_bytes())
720 }
721}
722
723impl From<String> for ByteView {
724 fn from(value: String) -> Self {
725 Self::from(value.as_bytes())
726 }
727}
728
729impl From<Arc<str>> for ByteView {
730 fn from(value: Arc<str>) -> Self {
731 Self::from(&*value)
732 }
733}
734
735impl<const N: usize> From<[u8; N]> for ByteView {
736 fn from(value: [u8; N]) -> Self {
737 Self::from(value.as_slice())
738 }
739}
740
741#[cfg(feature = "serde")]
742mod serde {
743 use super::ByteView;
744 use serde::de::{self, Visitor};
745 use serde::{Deserialize, Deserializer, Serialize, Serializer};
746 use std::fmt;
747
748 impl Serialize for ByteView {
749 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
750 where
751 S: Serializer,
752 {
753 serializer.serialize_bytes(self)
754 }
755 }
756
757 impl<'de> Deserialize<'de> for ByteView {
758 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
759 where
760 D: Deserializer<'de>,
761 {
762 struct ByteViewVisitor;
763
764 impl<'de> Visitor<'de> for ByteViewVisitor {
765 type Value = ByteView;
766
767 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
768 formatter.write_str("a byte array")
769 }
770
771 fn visit_bytes<E>(self, v: &[u8]) -> Result<ByteView, E>
772 where
773 E: de::Error,
774 {
775 Ok(ByteView::new(v))
776 }
777
778 fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
779 where
780 A: de::SeqAccess<'de>,
781 {
782 let bytes: Vec<u8> =
783 Deserialize::deserialize(de::value::SeqAccessDeserializer::new(seq))?;
784
785 Ok(ByteView::new(&bytes))
786 }
787 }
788
789 deserializer.deserialize_bytes(ByteViewVisitor)
790 }
791 }
792}
793
794#[cfg(test)]
795mod tests {
796 use super::{ByteView, HeapAllocationHeader};
797 use std::io::Cursor;
798
799 #[test]
800 #[cfg(not(miri))]
801 fn test_rykv() {
802 use rkyv::{rancor::Error, Archive, Deserialize, Serialize};
803
804 #[derive(Debug, Archive, Deserialize, Serialize, PartialEq)]
805 #[rkyv(archived = ArchivedPerson)]
806 struct Person {
807 id: i64,
808 name: String,
809 }
810
811 {
813 let a = Person {
814 id: 1,
815 name: "Alicia".to_string(),
816 };
817
818 let bytes = rkyv::to_bytes::<Error>(&a).unwrap();
819 let bytes = ByteView::from(&*bytes);
820
821 let archived: &ArchivedPerson = rkyv::access::<_, Error>(&bytes).unwrap();
822 assert_eq!(archived.id, a.id);
823 assert_eq!(archived.name, a.name);
824 }
825
826 {
828 let a = Person {
829 id: 1,
830 name: "Alicia I need a very long string for heap allocation".to_string(),
831 };
832
833 let bytes = rkyv::to_bytes::<Error>(&a).unwrap();
834 let bytes = ByteView::from(&*bytes);
835
836 let archived: &ArchivedPerson = rkyv::access::<_, Error>(&bytes).unwrap();
837 assert_eq!(archived.id, a.id);
838 assert_eq!(archived.name, a.name);
839 }
840 }
841
842 #[test]
843 #[cfg(target_pointer_width = "64")]
844 fn memsize() {
845 use crate::byteview::{LongRepr, ShortRepr, Trailer};
846
847 assert_eq!(
848 std::mem::size_of::<ShortRepr>(),
849 std::mem::size_of::<LongRepr>()
850 );
851 assert_eq!(
852 std::mem::size_of::<Trailer>(),
853 std::mem::size_of::<LongRepr>()
854 );
855
856 assert_eq!(24, std::mem::size_of::<ByteView>());
857 assert_eq!(
858 32,
859 std::mem::size_of::<ByteView>() + std::mem::size_of::<HeapAllocationHeader>()
860 );
861 }
862
863 #[test]
864 fn sliced_clone() {
865 let s = ByteView::from([
866 1, 255, 255, 255, 251, 255, 255, 255, 255, 255, 1, 21, 255, 255, 255, 255, 5, 255, 255,
867 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 4, 3, 255,
868 255, 0, 0, 255, 0, 0, 0, 254, 2, 0, 0, 0, 5, 2, 42, 0, 0, 0, 1, 0, 0, 0, 44, 0, 0, 0,
869 2, 0, 0, 0,
870 ]);
871 let slice = s.slice(12..(12 + 21));
872
873 #[allow(clippy::redundant_clone)]
874 let cloned = slice.clone();
875
876 assert_eq!(slice.prefix(), cloned.prefix());
877 assert_eq!(slice, cloned);
878 }
879
880 #[test]
881 fn fuse_empty() {
882 let bytes = ByteView::fused(&[], &[]);
883 assert_eq!(&*bytes, []);
884 }
885
886 #[test]
887 fn fuse_one() {
888 let bytes = ByteView::fused(b"abc", &[]);
889 assert_eq!(&*bytes, b"abc");
890 }
891
892 #[test]
893 fn fuse_two() {
894 let bytes = ByteView::fused(b"abc", b"def");
895 assert_eq!(&*bytes, b"abcdef");
896 }
897
898 #[test]
899 fn empty_slice() {
900 let bytes = ByteView::with_size_zeroed(0);
901 assert_eq!(&*bytes, []);
902 }
903
904 #[test]
905 fn dealloc_order() {
906 let bytes = ByteView::new(&(0..32).collect::<Vec<_>>());
907 let bytes_slice = bytes.slice(..31);
908 drop(bytes);
909 drop(bytes_slice);
910 }
911
912 #[test]
913 fn dealloc_order_2() {
914 let bytes = ByteView::new(&(0..32).collect::<Vec<_>>());
915 let bytes_slice = bytes.slice(..31);
916 let bytes_slice_2 = bytes.slice(..5);
917 let bytes_slice_3 = bytes.slice(..6);
918
919 drop(bytes);
920 drop(bytes_slice);
921 drop(bytes_slice_2);
922 drop(bytes_slice_3);
923 }
924
925 #[test]
926 fn from_reader_1() -> std::io::Result<()> {
927 let str = b"abcdef";
928 let mut cursor = Cursor::new(str);
929
930 let a = ByteView::from_reader(&mut cursor, 6)?;
931 assert!(&*a == b"abcdef");
932
933 Ok(())
934 }
935
936 #[test]
937 fn cmp_misc_1() {
938 let a = ByteView::from("abcdef");
939 let b = ByteView::from("abcdefhelloworldhelloworld");
940 assert!(a < b);
941 }
942
943 #[test]
944 fn get_mut() {
945 let mut slice = ByteView::with_size(4);
946 assert_eq!(4, slice.len());
947 assert_eq!([0, 0, 0, 0], &*slice);
948
949 {
950 let mut mutator = slice.get_mut().unwrap();
951 mutator[0] = 1;
952 mutator[1] = 2;
953 mutator[2] = 3;
954 mutator[3] = 4;
955 }
956
957 assert_eq!(4, slice.len());
958 assert_eq!([1, 2, 3, 4], &*slice);
959 assert_eq!([1, 2, 3, 4], slice.prefix());
960 }
961
962 #[test]
963 fn get_mut_long() {
964 let mut slice = ByteView::with_size(30);
965 assert_eq!(30, slice.len());
966 assert_eq!([0; 30], &*slice);
967
968 {
969 let mut mutator = slice.get_mut().unwrap();
970 mutator[0] = 1;
971 mutator[1] = 2;
972 mutator[2] = 3;
973 mutator[3] = 4;
974 }
975
976 assert_eq!(30, slice.len());
977 assert_eq!(
978 [
979 1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
980 0, 0
981 ],
982 &*slice
983 );
984 assert_eq!([1, 2, 3, 4], slice.prefix());
985 }
986
987 #[test]
988 fn nostr() {
989 let slice = ByteView::from("");
990 assert_eq!(0, slice.len());
991 assert_eq!(&*slice, b"");
992 assert_eq!(1, slice.ref_count());
993 assert!(slice.is_inline());
994 }
995
996 #[test]
997 fn default_str() {
998 let slice = ByteView::default();
999 assert_eq!(0, slice.len());
1000 assert_eq!(&*slice, b"");
1001 assert_eq!(1, slice.ref_count());
1002 assert!(slice.is_inline());
1003 }
1004
1005 #[test]
1006 fn short_str() {
1007 let slice = ByteView::from("abcdef");
1008 assert_eq!(6, slice.len());
1009 assert_eq!(&*slice, b"abcdef");
1010 assert_eq!(1, slice.ref_count());
1011 assert_eq!(&slice.prefix(), b"abcd");
1012 assert!(slice.is_inline());
1013 }
1014
1015 #[test]
1016 #[cfg(target_pointer_width = "64")]
1017 fn medium_str() {
1018 let slice = ByteView::from("abcdefabcdef");
1019 assert_eq!(12, slice.len());
1020 assert_eq!(&*slice, b"abcdefabcdef");
1021 assert_eq!(1, slice.ref_count());
1022 assert_eq!(&slice.prefix(), b"abcd");
1023 assert!(slice.is_inline());
1024 }
1025
1026 #[test]
1027 #[cfg(target_pointer_width = "64")]
1028 fn medium_long_str() {
1029 let slice = ByteView::from("abcdefabcdefabcdabcd");
1030 assert_eq!(20, slice.len());
1031 assert_eq!(&*slice, b"abcdefabcdefabcdabcd");
1032 assert_eq!(1, slice.ref_count());
1033 assert_eq!(&slice.prefix(), b"abcd");
1034 assert!(slice.is_inline());
1035 }
1036
1037 #[test]
1038 #[cfg(target_pointer_width = "64")]
1039 fn medium_str_clone() {
1040 let slice = ByteView::from("abcdefabcdefabcdefab");
1041 let copy = slice.clone();
1042 assert_eq!(slice, copy);
1043 assert_eq!(copy.prefix(), slice.prefix());
1044
1045 assert_eq!(1, slice.ref_count());
1046
1047 drop(copy);
1048 assert_eq!(1, slice.ref_count());
1049 }
1050
1051 #[test]
1052 fn long_str() {
1053 let slice = ByteView::from("abcdefabcdefabcdefababcd");
1054 assert_eq!(24, slice.len());
1055 assert_eq!(&*slice, b"abcdefabcdefabcdefababcd");
1056 assert_eq!(1, slice.ref_count());
1057 assert_eq!(&slice.prefix(), b"abcd");
1058 assert!(!slice.is_inline());
1059 }
1060
1061 #[test]
1062 fn long_str_clone() {
1063 let slice = ByteView::from("abcdefabcdefabcdefababcd");
1064 let copy = slice.clone();
1065 assert_eq!(slice, copy);
1066 assert_eq!(copy.prefix(), slice.prefix());
1067
1068 assert_eq!(2, slice.ref_count());
1069
1070 drop(copy);
1071 assert_eq!(1, slice.ref_count());
1072 }
1073
1074 #[test]
1075 fn long_str_slice_full() {
1076 let slice = ByteView::from("helloworld_thisisalongstring");
1077
1078 let copy = slice.slice(..);
1079 assert_eq!(copy, slice);
1080
1081 assert_eq!(2, slice.ref_count());
1082
1083 drop(copy);
1084 assert_eq!(1, slice.ref_count());
1085 }
1086
1087 #[test]
1088 #[cfg(target_pointer_width = "64")]
1089 fn long_str_slice() {
1090 let slice = ByteView::from("helloworld_thisisalongstring");
1091
1092 let copy = slice.slice(11..);
1093 assert_eq!(b"thisisalongstring", &*copy);
1094 assert_eq!(©.prefix(), b"this");
1095
1096 assert_eq!(1, slice.ref_count());
1097
1098 drop(copy);
1099 assert_eq!(1, slice.ref_count());
1100 }
1101
1102 #[test]
1103 #[cfg(target_pointer_width = "64")]
1104 fn long_str_slice_twice() {
1105 let slice = ByteView::from("helloworld_thisisalongstring");
1106
1107 let copy = slice.slice(11..);
1108 assert_eq!(b"thisisalongstring", &*copy);
1109
1110 let copycopy = copy.slice(..);
1111 assert_eq!(copy, copycopy);
1112
1113 assert_eq!(1, slice.ref_count());
1114
1115 drop(copy);
1116 assert_eq!(1, slice.ref_count());
1117
1118 drop(slice);
1119 assert_eq!(1, copycopy.ref_count());
1120 }
1121
1122 #[test]
1123 #[cfg(target_pointer_width = "64")]
1124 fn long_str_slice_downgrade() {
1125 let slice = ByteView::from("helloworld_thisisalongstring");
1126
1127 let copy = slice.slice(11..);
1128 assert_eq!(b"thisisalongstring", &*copy);
1129
1130 let copycopy = copy.slice(0..4);
1131 assert_eq!(b"this", &*copycopy);
1132
1133 {
1134 let copycopy = copy.slice(0..=4);
1135 assert_eq!(b"thisi", &*copycopy);
1136 assert_eq!(b't', *copycopy.first().unwrap());
1137 }
1138
1139 assert_eq!(1, slice.ref_count());
1140
1141 drop(copy);
1142 assert_eq!(1, slice.ref_count());
1143
1144 drop(copycopy);
1145 assert_eq!(1, slice.ref_count());
1146 }
1147
1148 #[test]
1149 fn short_str_clone() {
1150 let slice = ByteView::from("abcdef");
1151 let copy = slice.clone();
1152 assert_eq!(slice, copy);
1153
1154 assert_eq!(1, slice.ref_count());
1155
1156 drop(slice);
1157 assert_eq!(&*copy, b"abcdef");
1158
1159 assert_eq!(1, copy.ref_count());
1160 }
1161
1162 #[test]
1163 fn short_str_slice_full() {
1164 let slice = ByteView::from("abcdef");
1165 let copy = slice.slice(..);
1166 assert_eq!(slice, copy);
1167
1168 assert_eq!(1, slice.ref_count());
1169
1170 drop(slice);
1171 assert_eq!(&*copy, b"abcdef");
1172
1173 assert_eq!(1, copy.ref_count());
1174 }
1175
1176 #[test]
1177 fn short_str_slice_part() {
1178 let slice = ByteView::from("abcdef");
1179 let copy = slice.slice(3..);
1180
1181 assert_eq!(1, slice.ref_count());
1182
1183 drop(slice);
1184 assert_eq!(&*copy, b"def");
1185
1186 assert_eq!(1, copy.ref_count());
1187 }
1188
1189 #[test]
1190 fn short_str_slice_empty() {
1191 let slice = ByteView::from("abcdef");
1192 let copy = slice.slice(0..0);
1193
1194 assert_eq!(1, slice.ref_count());
1195
1196 drop(slice);
1197 assert_eq!(&*copy, b"");
1198
1199 assert_eq!(1, copy.ref_count());
1200 }
1201
1202 #[test]
1203 fn tiny_str_starts_with() {
1204 let a = ByteView::from("abc");
1205 assert!(a.starts_with(b"ab"));
1206 assert!(!a.starts_with(b"b"));
1207 }
1208
1209 #[test]
1210 fn long_str_starts_with() {
1211 let a = ByteView::from("abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdef");
1212 assert!(a.starts_with(b"abcdef"));
1213 assert!(!a.starts_with(b"def"));
1214 }
1215
1216 #[test]
1217 fn tiny_str_cmp() {
1218 let a = ByteView::from("abc");
1219 let b = ByteView::from("def");
1220 assert!(a < b);
1221 }
1222
1223 #[test]
1224 fn tiny_str_eq() {
1225 let a = ByteView::from("abc");
1226 let b = ByteView::from("def");
1227 assert!(a != b);
1228 }
1229
1230 #[test]
1231 fn long_str_eq() {
1232 let a = ByteView::from("abcdefabcdefabcdefabcdef");
1233 let b = ByteView::from("xycdefabcdefabcdefabcdef");
1234 assert!(a != b);
1235 }
1236
1237 #[test]
1238 fn long_str_cmp() {
1239 let a = ByteView::from("abcdefabcdefabcdefabcdef");
1240 let b = ByteView::from("xycdefabcdefabcdefabcdef");
1241 assert!(a < b);
1242 }
1243
1244 #[test]
1245 fn long_str_eq_2() {
1246 let a = ByteView::from("abcdefabcdefabcdefabcdef");
1247 let b = ByteView::from("abcdefabcdefabcdefabcdef");
1248 assert!(a == b);
1249 }
1250
1251 #[test]
1252 fn long_str_cmp_2() {
1253 let a = ByteView::from("abcdefabcdefabcdefabcdef");
1254 let b = ByteView::from("abcdefabcdefabcdefabcdeg");
1255 assert!(a < b);
1256 }
1257
1258 #[test]
1259 fn long_str_cmp_3() {
1260 let a = ByteView::from("abcdefabcdefabcdefabcde");
1261 let b = ByteView::from("abcdefabcdefabcdefabcdef");
1262 assert!(a < b);
1263 }
1264
1265 #[test]
1266 fn cmp_fuzz_1() {
1267 let a = ByteView::from([0]);
1268 let b = ByteView::from([]);
1269
1270 assert!(a > b);
1271 assert!(a != b);
1272 }
1273
1274 #[test]
1275 fn cmp_fuzz_2() {
1276 let a = ByteView::from([0, 0]);
1277 let b = ByteView::from([0]);
1278
1279 assert!(a > b);
1280 assert!(a != b);
1281 }
1282
1283 #[test]
1284 fn cmp_fuzz_3() {
1285 let a = ByteView::from([255, 255, 12, 255, 0]);
1286 let b = ByteView::from([255, 255, 12, 255]);
1287
1288 assert!(a > b);
1289 assert!(a != b);
1290 }
1291
1292 #[test]
1293 fn cmp_fuzz_4() {
1294 let a = ByteView::from([
1295 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
1296 ]);
1297 let b = ByteView::from([
1298 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0,
1299 ]);
1300
1301 assert!(a > b);
1302 assert!(a != b);
1303 }
1304}