1#![cfg_attr(feature = "unstable", feature(temporary_niche_types))]
2#![cfg_attr(feature = "unstable", feature(maybe_uninit_slice))]
3#![cfg_attr(not(feature = "unstable"), allow(unstable_name_collisions))]
4#![warn(clippy::pedantic)]
5#![warn(clippy::nursery)]
6
7#[cfg(feature = "bytes")]
8mod bytes;
9
10#[cfg(feature = "io")]
11mod io;
12
13#[cfg(feature = "serde")]
14mod serde;
15
16#[cfg(feature = "tokio-io")]
17mod tokio;
18
19#[cfg(not(feature = "unstable"))]
20mod stable;
21#[cfg(not(feature = "unstable"))]
22#[cfg_attr(not(feature = "unstable"), allow(unused_imports))]
23use stable::{MaybeUninitCompact, UsizeCompact};
24
25mod mirrored;
26use crate::mirrored::MAX_PHYSICAL_BUF_SIZE;
27use mirrored::{MirroredBuffer, mirrored_allocation_unit};
28use num::Zero;
29use std::{
30 cmp::Ordering,
31 iter::{FromIterator, FusedIterator},
32 marker::PhantomData,
33 mem::{MaybeUninit, needs_drop, zeroed},
34 ops::{Deref, DerefMut, Neg, RangeBounds},
35 ptr,
36 ptr::{NonNull, copy, copy_nonoverlapping, drop_in_place, read, write},
37};
38
39#[derive(Debug)]
40pub struct SliceRingBuffer<T> {
41 buf: MirroredBuffer<T>,
42 head: usize,
43 len: usize,
44}
45
46impl<T> SliceRingBuffer<T> {
47 const ELEM_IS_ZST: bool = MirroredBuffer::<T>::ELEM_IS_ZST;
48 const GROW_FACTOR: usize = 2;
50 pub const MAX_CAPACITY: usize = MAX_PHYSICAL_BUF_SIZE;
52
53 #[inline]
68 #[must_use]
69 pub fn new() -> Self { Self::with_capacity(if Self::ELEM_IS_ZST { MAX_PHYSICAL_BUF_SIZE } else { 0 }) }
70
71 #[inline]
94 #[must_use]
95 pub fn with_capacity(cap: usize) -> Self {
96 assert!(cap <= MAX_PHYSICAL_BUF_SIZE);
97 Self { buf: MirroredBuffer::with_capacity(cap), head: 0, len: 0 }
98 }
99
100 #[inline]
114 #[must_use]
115 pub fn capacity(&self) -> usize { self.buf.physical_capacity() }
116
117 #[inline]
130 #[must_use]
131 pub fn len(&self) -> usize {
132 let len = self.len;
133 debug_assert!(len <= self.capacity(), "len:{} > capacity:{}", len, self.capacity());
134 len
135 }
136
137 #[inline]
151 #[must_use]
152 pub fn is_empty(&self) -> bool { self.len().is_zero() }
153
154 #[inline]
167 #[must_use]
168 pub fn is_full(&self) -> bool {
169 let len = self.len();
170 let cap = self.capacity();
171 debug_assert!(len <= cap);
172 len == cap
173 }
174
175 #[inline]
179 #[must_use]
180 pub fn head(&self) -> usize {
181 let head = self.head;
182 debug_assert!(head.is_zero() || head < self.capacity());
183 head
184 }
185
186 #[inline]
190 #[must_use]
191 pub fn tail(&self) -> usize {
192 let cap = self.capacity();
193 let head = self.head();
194 let len = self.len();
195 debug_assert!(head.checked_add(len) <= Some(self.buf.virtual_size()));
196 (head + len) % cap
197 }
198
199 #[inline]
212 #[must_use]
213 pub fn as_slice(&self) -> &[T] {
214 let len = self.len();
215 let head = self.head();
216 unsafe { self.buf.virtual_uninit_slice_at(head, len).assume_init_ref() }
217 }
218
219 #[inline]
234 #[must_use]
235 pub fn as_mut_slice(&mut self) -> &mut [T] {
236 let head = self.head();
237 let len = self.len();
238 unsafe { self.buf.virtual_uninit_slice_mut_at(head, len).assume_init_mut() }
239 }
240
241 #[inline]
243 #[must_use]
244 pub fn as_uninit_slice(&self) -> &[MaybeUninit<T>] {
245 let len = self.len();
246 let head = self.head();
247 self.buf.virtual_uninit_slice_at(head, len)
248 }
249
250 #[inline]
252 #[must_use]
253 pub fn as_uninit_mut_slice(&mut self) -> &mut [MaybeUninit<T>] {
254 let len = self.len();
255 let head = self.head();
256 self.buf.virtual_uninit_slice_mut_at(head, len)
257 }
258
259 #[inline]
261 #[must_use]
262 pub fn uninit_slice(&mut self) -> &mut [MaybeUninit<T>] {
263 let cap = self.capacity();
264 let len = self.len();
265 let head = self.head();
266 debug_assert!(len.checked_add(head) < Some(self.buf.virtual_size()));
267 let uninit_start = self.tail();
268 let uninit_len = cap - len;
269 self.buf.virtual_uninit_slice_mut_at(uninit_start, uninit_len)
270 }
271
272 #[inline]
282 pub fn move_head(&mut self, mv: isize) {
283 let cap = self.capacity();
284 let head = &mut self.head();
285 let cap = cap.cast_signed();
286 assert!(cap > 0);
287 let new_head = ((*head).cast_signed() + mv).rem_euclid(cap);
288 self.head = new_head.cast_unsigned();
289 let new_len = self.len.strict_sub_signed(mv);
290 self.len = new_len;
291 }
292
293 #[inline]
303 pub fn move_tail(&mut self, mv: isize) {
304 let len = self.len().cast_signed();
305 let cap = self.capacity().cast_signed();
306 assert!(len.checked_add(mv) <= Some(cap) && len.checked_add(mv) >= Some(0));
307 self.len = (len + mv).cast_unsigned();
308 }
309
310 #[inline]
337 pub fn append_front(&mut self, other: &mut Self) {
338 let other_len = other.len();
339 self.reserve(other_len);
340 if Self::ELEM_IS_ZST {
341 self.len += other.len();
342 other.len = 0;
343 return;
344 }
345 if other_len == 0 {
346 return;
347 }
348 self.move_head(-other_len.cast_signed());
349 unsafe {
350 let src = other.as_slice().as_ptr();
351 let dst = self.as_mut_ptr();
352 copy_nonoverlapping(src, dst, other_len);
353 other.len = 0; }
355 }
356
357 #[inline]
365 pub fn append(&mut self, other: &mut Self) {
366 let other_len = other.len();
367 self.reserve(other_len);
368 if Self::ELEM_IS_ZST {
369 self.len += other_len;
370 return;
371 }
372 unsafe {
373 let uninit = self.uninit_slice();
374 let src = other.as_slice().as_ptr();
375 let dst = uninit.as_mut_ptr().cast();
376 copy_nonoverlapping(src, dst, other.len);
377 self.len += other.len;
378 other.len = 0; }
380 }
381
382 #[inline]
396 #[must_use]
397 pub fn front(&self) -> Option<&T> { self.as_slice().first() }
398
399 #[inline]
416 #[must_use]
417 pub fn front_mut(&mut self) -> Option<&mut T> { self.as_mut_slice().first_mut() }
418
419 #[inline]
434 #[must_use]
435 pub fn back(&self) -> Option<&T> { self.as_slice().last() }
436
437 #[inline]
455 #[must_use]
456 pub fn back_mut(&mut self) -> Option<&mut T> { self.as_mut_slice().last_mut() }
457
458 #[inline]
478 #[must_use]
479 pub fn get(&self, index: usize) -> Option<&T> {
480 if index >= self.len() {
481 None
482 } else {
483 let head = self.head();
484 let capacity = self.capacity();
485 let actual_index = (head + index) % capacity;
486 unsafe { Some(self.buf.virtual_uninit_slice_at(actual_index, 1).get_unchecked(0).assume_init_ref()) }
487 }
488 }
489
490 #[inline]
511 #[must_use]
512 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
513 if index >= self.len() {
514 None
515 } else {
516 let head = self.head();
517 let capacity = self.capacity();
518 let actual_index = (head + index) % capacity;
519 unsafe {
520 Some(self.buf.virtual_uninit_slice_mut_at(actual_index, 1).get_unchecked_mut(0).assume_init_mut())
521 }
522 }
523 }
524
525 fn realloc_and_restore_part(&mut self, new_p_cap: usize) {
533 debug_assert!(new_p_cap <= MAX_PHYSICAL_BUF_SIZE);
534 debug_assert!(!Self::ELEM_IS_ZST);
535 let mut new_buf = MirroredBuffer::<T>::with_capacity(new_p_cap);
536 let len = self.len();
537 let reserve_len = len.min(new_p_cap);
538 let old_slice = self.as_slice();
539 let new_uninit_slice = new_buf.as_uninit_virtual_mut_slice();
540 unsafe {
541 for i in 0..reserve_len {
542 let val = ptr::read(old_slice.get_unchecked(i));
543 new_uninit_slice.get_unchecked_mut(i).write(val);
544 }
545 }
546 self.len = 0;
547 self.buf = new_buf;
548 self.head = 0;
549 self.len = reserve_len;
550 }
551
552 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
580 where
581 R: RangeBounds<usize>,
582 {
583 use std::ops::Bound::{Excluded, Included, Unbounded};
585 let len = self.len();
586 let start = match range.start_bound() {
587 Included(&n) => n,
588 Excluded(&n) => n + 1,
589 Unbounded => 0,
590 };
591 let end = match range.end_bound() {
592 Included(&n) => n + 1,
593 Excluded(&n) => n,
594 Unbounded => len,
595 };
596 assert!(start <= end, "drain lower bound was too large");
597 assert!(end <= len, "drain upper bound was too large");
598 let drain_len = end - start;
599 let new_len = self.len - drain_len;
600 let drain_start = (self.head() + start) % self.capacity();
601 self.len = drain_start;
602 Drain {
603 len: drain_len,
604 head: 0,
605 remaining: drain_len,
606 new_len,
607 inner: NonNull::from(self),
608 _marker: PhantomData,
609 }
610 }
611
612 #[inline]
620 #[must_use]
621 pub fn try_push_front(&mut self, value: T) -> Option<&T> {
622 if self.is_full() {
623 return None;
624 }
625 self.move_head(-1);
626 unsafe {
627 if Self::ELEM_IS_ZST {
628 return Some(NonNull::dangling().as_ref());
629 }
630 let uninit = self.as_uninit_mut_slice().first_mut().unwrap_unchecked();
631 let init = uninit.write(value);
632 Some(init)
633 }
634 }
635
636 #[inline]
648 pub fn push_front(&mut self, value: T) -> &T {
649 unsafe {
650 assert!(self.len().checked_add(1) <= Some(MAX_PHYSICAL_BUF_SIZE));
651 let required_cap = self.len() + 1;
652
653 if Self::ELEM_IS_ZST && self.capacity() < required_cap {
654 self.buf.set_size_unchecked(required_cap * 2);
655 return self.try_push_front(value).unwrap_unchecked();
656 }
657 if self.capacity() < required_cap {
658 self.realloc_and_restore_part(required_cap.strict_mul(Self::GROW_FACTOR));
659 }
660 self.try_push_front(value).unwrap_unchecked()
661 }
662 }
663
664 #[inline]
672 #[must_use]
673 pub fn try_push_back(&mut self, value: T) -> Option<&T> {
674 if self.is_full() {
675 return None;
676 }
677 self.move_tail(1);
678 unsafe {
679 if Self::ELEM_IS_ZST {
680 return Some(NonNull::dangling().as_ref());
681 }
682 let uninit = self.as_uninit_mut_slice().last_mut().unwrap_unchecked();
683 let init = uninit.write(value);
684 Some(init)
685 }
686 }
687
688 #[inline]
700 pub fn push_back(&mut self, value: T) -> &T {
701 unsafe {
702 assert!(self.len().checked_add(1) <= Some(MAX_PHYSICAL_BUF_SIZE));
703 let required_cap = self.len() + 1;
704 if Self::ELEM_IS_ZST && self.capacity() < required_cap {
705 self.buf.set_size_unchecked(required_cap * 2);
706 return self.try_push_back(value).unwrap_unchecked();
707 }
708 if self.capacity() < required_cap {
709 self.realloc_and_restore_part(required_cap.strict_mul(Self::GROW_FACTOR));
710 }
711 self.try_push_back(value).unwrap_unchecked()
712 }
713 }
714
715 #[inline]
731 pub fn pop_back(&mut self) -> Option<T> {
732 if self.is_empty() {
733 return None;
734 }
735 if Self::ELEM_IS_ZST {
736 self.move_tail(-1);
737 return Some(unsafe { zeroed() });
738 }
739 unsafe {
740 let removed = self.last_mut().map(|p| read(ptr::from_mut(p)))?;
741 self.move_tail(-1);
742 Some(removed)
743 }
744 }
745
746 #[inline]
762 pub fn pop_front(&mut self) -> Option<T> {
763 if self.is_empty() {
764 return None;
765 }
766 if Self::ELEM_IS_ZST {
767 self.move_head(1);
768 return Some(unsafe { zeroed() });
769 }
770 unsafe {
771 let removed = self.first_mut().map(|p| read(ptr::from_mut(p)))?;
772 self.move_head(1);
773 Some(removed)
774 }
775 }
776
777 #[inline]
785 pub fn truncate_back(&mut self, n: usize) {
786 let current_len = self.len();
787 if n >= current_len {
788 return;
789 }
790 let mv = (current_len - n).cast_signed().neg();
792 if Self::ELEM_IS_ZST {
793 self.move_tail(mv);
794 debug_assert_eq!(self.len(), n);
795 return;
796 }
797 let s = unsafe { self.get_unchecked_mut(n..current_len) } as *mut [_];
798 unsafe {
799 drop_in_place(s);
800 self.move_tail(mv);
801 }
802 debug_assert_eq!(self.len(), n);
803 }
804
805 #[inline]
813 pub fn truncate(&mut self, n: usize) { self.truncate_back(n); }
814
815 #[inline]
824 #[must_use]
825 pub fn split_off(&mut self, at: usize) -> Self {
826 let tail_len = self.len().strict_sub(at);
827 let mut another = Self::with_capacity(tail_len);
828 if tail_len > 0 {
829 if Self::ELEM_IS_ZST {
830 another.len = tail_len;
831 self.len = at;
832 return another;
833 }
834 let tail_slice = unsafe { self.get_unchecked(at..) };
835 unsafe {
836 copy_nonoverlapping(tail_slice.as_ptr(), another.as_mut_ptr(), tail_len);
837 another.len = tail_len;
838 }
839 }
840 self.len = at;
841 another
842 }
843
844 #[inline]
853 #[must_use]
854 pub fn split_to(&mut self, at: usize) -> Self {
855 let prev_len = at;
856 let mut another = Self::with_capacity(prev_len);
857 if prev_len > 0 {
858 if Self::ELEM_IS_ZST {
859 self.len -= prev_len;
860 another.len = prev_len;
861 return another;
862 }
863 let prev_slice = unsafe { self.get_unchecked(..at) };
864 unsafe {
865 copy_nonoverlapping(prev_slice.as_ptr(), another.as_mut_ptr(), prev_len);
866 another.len = prev_len;
867 }
868 }
869 self.move_head(prev_len.cast_signed());
870 another
871 }
872
873 #[inline]
877 pub fn clear(&mut self) { self.truncate(0); }
878
879 #[inline]
888 pub fn swap_remove(&mut self, idx: usize) -> Option<T> {
889 if self.is_empty() {
890 return None;
891 }
892 if Self::ELEM_IS_ZST {
893 return self.pop_back();
894 }
895 let len = self.len();
896 if idx != len - 1 {
897 self.swap(idx, len - 1);
898 }
899 self.pop_back()
900 }
901
902 #[inline]
911 #[must_use]
912 pub fn try_insert(&mut self, idx: usize, elem: T) -> Option<&T> {
913 let len = self.len();
914 match len {
915 _ if idx > len || self.is_full() => {
916 return None;
917 }
918 _ if idx.is_zero() => {
919 return self.try_push_front(elem);
920 }
921 _ if idx == len => {
922 return self.try_push_back(elem);
923 }
924 _ => {}
925 }
926 self.move_tail(1);
927 if Self::ELEM_IS_ZST {
928 return Some(unsafe { NonNull::dangling().as_ref() });
929 }
930 unsafe {
931 let slice = self.as_mut_slice();
933 let count = len - idx;
936 let src = slice.as_mut_ptr().add(idx);
937 let dst = src.add(1);
939 copy(src, dst, count);
940 write(slice.as_mut_ptr().add(idx), elem);
941 Some(&*src)
942 }
943 }
944
945 pub fn insert(&mut self, idx: usize, elem: T) -> &T {
958 let len = self.len();
959 assert!(idx <= len, "index outbound");
960 if !self.is_full() {
961 return unsafe { self.try_insert(idx, elem).unwrap_unchecked() };
962 }
963 match idx {
964 0 => return self.push_front(elem),
965 _ if idx == len => return self.push_back(elem),
966 _ => {}
967 }
968 if Self::ELEM_IS_ZST {
969 return unsafe { NonNull::dangling().as_ref() };
970 }
971 let new_buf: MirroredBuffer<T> =
972 MirroredBuffer::with_capacity(self.capacity().checked_add(1).expect("cap overflow"));
973 let src = self.as_mut_slice().as_ptr();
974 let dst = new_buf.as_ptr();
975 unsafe {
976 copy(src, dst, idx);
977 let src = src.add(idx);
978 let target = dst.add(idx);
979 copy(&raw const elem, target, 1);
980 let dst = target.add(1);
981 copy(src, dst, len - idx);
982 self.buf = new_buf;
983 self.head = 0;
984 self.len = len + 1;
985 &*target
986 }
987 }
988
989 #[inline]
999 pub fn remove(&mut self, idx: usize) -> T {
1000 assert!(idx < self.len(), "index out of bounds: the len is {} but the index is {}", idx, self.len());
1001 unsafe {
1002 if Self::ELEM_IS_ZST {
1003 self.move_tail(-1);
1004 return zeroed();
1005 }
1006 let len = self.len();
1007 let ptr = self.as_mut_ptr();
1008 let target = ptr.add(idx);
1009 let removed = read(target);
1010 copy(ptr.add(idx + 1), target, len - idx - 1);
1011 self.move_tail(-1);
1012 removed
1013 }
1014 }
1015
1016 #[inline]
1028 pub fn reserve(&mut self, additional: usize) {
1029 let required_cap = self.len().checked_add(additional);
1030 assert!(required_cap <= Some(MAX_PHYSICAL_BUF_SIZE));
1031 let required_cap = unsafe { required_cap.unwrap_unchecked() };
1032
1033 let cap = self.capacity();
1034 if cap >= required_cap {
1035 return;
1036 }
1037 if Self::ELEM_IS_ZST {
1038 unsafe {
1039 self.buf.set_size_unchecked(required_cap * 2);
1040 }
1041 return;
1042 }
1043 let new_cap = required_cap.max(cap.saturating_mul(Self::GROW_FACTOR));
1044 self.realloc_and_restore_part(new_cap);
1045 }
1046
1047 #[inline]
1057 pub fn shrink_to(&mut self, min_cap: usize) {
1058 if Self::ELEM_IS_ZST {
1059 return;
1060 }
1061 let len = self.len();
1062 assert!(min_cap >= len, "min_capacity ({min_cap}) cannot be less than current length ({len})");
1063 if min_cap >= self.capacity() {
1065 return;
1066 }
1067 let ideal_virtual_size = mirrored_allocation_unit::<T>(min_cap);
1068 if ideal_virtual_size < self.buf.virtual_size() {
1069 self.realloc_and_restore_part(min_cap);
1070 }
1071 }
1072
1073 #[inline]
1077 pub fn shrink_to_fit(&mut self) {
1078 if Self::ELEM_IS_ZST {
1079 unsafe {
1080 self.buf.set_size_unchecked(self.len() * 2);
1081 }
1082 return;
1083 }
1084 let len = self.len();
1085 let ideal_virtual_size = mirrored_allocation_unit::<T>(len);
1086 if ideal_virtual_size >= self.buf.virtual_size() {
1087 return;
1088 }
1089 self.realloc_and_restore_part(len);
1090 }
1091
1092 #[inline]
1104 pub fn rotate(&mut self, mv: isize) {
1105 if Self::ELEM_IS_ZST {
1106 return;
1107 }
1108 assert!(!self.is_empty(), "call rotate while this is empty");
1109 if self.is_full() {
1110 self.head = (self.head().cast_signed() + mv).rem_euclid(self.capacity().cast_signed()) as usize;
1111 return;
1112 }
1113 let idx = mv.rem_euclid(self.len().cast_signed()) as usize;
1114 if idx.is_zero() {
1115 return;
1116 }
1117 self[..idx].reverse();
1118 self[idx..].reverse();
1119 self.reverse();
1120 }
1121}
1122
1123impl<T> Deref for SliceRingBuffer<T> {
1124 type Target = [T];
1125
1126 #[inline]
1127 fn deref(&self) -> &Self::Target { self.as_slice() }
1128}
1129
1130impl<T> DerefMut for SliceRingBuffer<T> {
1131 #[inline]
1132 fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut_slice() }
1133}
1134
1135impl<T> Default for SliceRingBuffer<T> {
1136 #[inline]
1137 fn default() -> Self { Self::new() }
1138}
1139
1140impl<T> AsRef<[T]> for SliceRingBuffer<T> {
1141 #[inline]
1142 fn as_ref(&self) -> &[T] { self }
1143}
1144
1145impl<T> AsMut<[T]> for SliceRingBuffer<T> {
1146 #[inline]
1147 fn as_mut(&mut self) -> &mut [T] { &mut *self }
1148}
1149
1150impl<T: PartialEq> PartialEq for SliceRingBuffer<T> {
1151 #[inline]
1152 fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() }
1153}
1154
1155impl<T: Eq> Eq for SliceRingBuffer<T> {}
1156
1157impl<T: PartialOrd> PartialOrd for SliceRingBuffer<T> {
1158 #[inline]
1159 fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.as_slice().partial_cmp(other.as_slice()) }
1160}
1161
1162impl<T: Ord> Ord for SliceRingBuffer<T> {
1163 #[inline]
1164 fn cmp(&self, other: &Self) -> Ordering { self.as_slice().cmp(other.as_slice()) }
1165}
1166
1167#[derive(Debug)]
1168pub struct IntoIter<T> {
1169 inner: SliceRingBuffer<T>,
1170}
1171
1172impl<T> ExactSizeIterator for IntoIter<T> {
1173 #[inline]
1174 fn len(&self) -> usize { self.inner.len() }
1175}
1176
1177impl<T> IntoIterator for SliceRingBuffer<T> {
1178 type IntoIter = IntoIter<T>;
1179 type Item = T;
1180
1181 #[inline]
1182 fn into_iter(self) -> Self::IntoIter { IntoIter { inner: self } }
1183}
1184
1185impl<T> Iterator for IntoIter<T> {
1186 type Item = T;
1187
1188 #[inline]
1189 fn next(&mut self) -> Option<Self::Item> { self.inner.pop_front() }
1190
1191 #[inline]
1192 fn size_hint(&self) -> (usize, Option<usize>) {
1193 let len = self.inner.len();
1194 (len, Some(len))
1195 }
1196}
1197
1198impl<T> DoubleEndedIterator for IntoIter<T> {
1199 #[inline]
1200 fn next_back(&mut self) -> Option<Self::Item> { self.inner.pop_back() }
1201}
1202
1203impl<T: Clone> Clone for SliceRingBuffer<T> {
1204 fn clone(&self) -> Self { self.iter().cloned().collect() }
1205
1206 fn clone_from(&mut self, src: &Self) {
1207 if ptr::eq(self, src) {
1208 return;
1209 }
1210 self.clear();
1211 self.extend(src.iter().cloned());
1212 }
1213}
1214
1215impl<T> FusedIterator for IntoIter<T> {}
1216
1217unsafe impl<T: Send> Send for IntoIter<T> {}
1218
1219unsafe impl<T: Sync> Sync for IntoIter<T> {}
1220
1221#[derive(Debug)]
1222pub struct Drain<'a, T> {
1223 len: usize,
1225 head: usize,
1226 remaining: usize,
1227 new_len: usize,
1228 inner: NonNull<SliceRingBuffer<T>>,
1229 _marker: PhantomData<&'a T>, }
1231
1232impl<T> Drain<'_, T> {
1233 const ELEM_IS_ZST: bool = SliceRingBuffer::<T>::ELEM_IS_ZST;
1234
1235 #[inline]
1236 const fn move_head_forward(&mut self, mv: usize) {
1237 self.head += mv;
1238 self.remaining -= mv;
1239 }
1240
1241 #[inline]
1242 const fn move_tail_backward(&mut self, mv: usize) { self.remaining -= mv; }
1243}
1244
1245impl<T> Drop for Drain<'_, T> {
1246 fn drop(&mut self) {
1247 use Ordering::{Equal, Greater, Less};
1248 let inner = unsafe { self.inner.as_mut() };
1249 if Self::ELEM_IS_ZST || self.len.is_zero() {
1250 inner.len = self.new_len;
1251 return;
1252 }
1253 let drain_start = inner.len();
1254 let drop_start = drain_start + self.head;
1255 let drop_len = self.remaining;
1256 if needs_drop::<T>() && !drop_len.is_zero() {
1257 unsafe {
1258 drop_in_place(self.inner.as_mut().as_mut_slice().get_unchecked_mut(drop_start..drop_start + drop_len));
1259 }
1260 }
1261 let front_len = drain_start;
1262 let back_len = self.new_len - drain_start; let ptr = unsafe { inner.buf.as_ptr().add(inner.head()) };
1264 match back_len.cmp(&front_len) {
1265 Less | Equal => {
1266 unsafe {
1267 let dst = ptr.add(drain_start);
1268 let src = dst.add(self.len).cast_const();
1269 copy_nonoverlapping(src, dst, back_len);
1270 inner.len = self.new_len;
1271 };
1272 }
1273 Greater => unsafe {
1274 let src = ptr.cast_const();
1275 let dst = ptr.add(self.len);
1276 copy_nonoverlapping(src, dst, front_len);
1277 inner.head += self.len;
1278 inner.len = self.new_len;
1279 },
1280 }
1281 }
1282}
1283
1284impl<T> Iterator for Drain<'_, T> {
1285 type Item = T;
1286
1287 fn next(&mut self) -> Option<T> {
1288 if self.remaining.is_zero() {
1289 return None;
1290 }
1291 if Self::ELEM_IS_ZST {
1292 self.move_head_forward(1);
1293 return Some(unsafe { zeroed() });
1294 }
1295 let drain_start = unsafe { self.inner.as_ref().len() };
1296 let pos = unsafe { self.inner.as_ref().head() } + drain_start + self.head;
1297 let target = unsafe { self.inner.as_mut().buf.get_unchecked_mut(pos) };
1298 self.move_head_forward(1);
1299 Some(unsafe { target.assume_init_read() })
1300 }
1301
1302 fn size_hint(&self) -> (usize, Option<usize>) { (self.remaining, Some(self.remaining)) }
1303}
1304
1305impl<T> DoubleEndedIterator for Drain<'_, T> {
1306 fn next_back(&mut self) -> Option<Self::Item> {
1307 if self.remaining.is_zero() {
1308 return None;
1309 }
1310 if Self::ELEM_IS_ZST {
1311 self.move_tail_backward(1);
1312 return Some(unsafe { zeroed() });
1313 }
1314 let drain_start = unsafe { self.inner.as_ref().len() };
1315 let pos = unsafe { self.inner.as_ref().head() } + drain_start + self.head + self.remaining - 1;
1316 let target = unsafe { self.inner.as_mut().buf.get_unchecked_mut(pos) };
1317 self.move_tail_backward(1);
1318 Some(unsafe { target.assume_init_read() })
1319 }
1320}
1321impl<T> ExactSizeIterator for Drain<'_, T> {}
1322
1323impl<T> FusedIterator for Drain<'_, T> {}
1324
1325impl<T> From<Vec<T>> for SliceRingBuffer<T> {
1326 #[inline]
1327 fn from(vec: Vec<T>) -> Self { Self::from_iter(vec) }
1328}
1329
1330impl<T> FromIterator<T> for SliceRingBuffer<T> {
1331 #[inline]
1332 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1333 let iterator = iter.into_iter();
1334 let (lower, upper) = iterator.size_hint();
1335 let cap = upper.unwrap_or(lower);
1337 let mut rb = Self::with_capacity(cap);
1338 rb.extend(iterator);
1339 rb
1340 }
1341}
1342
1343impl<T> Extend<T> for SliceRingBuffer<T> {
1344 #[inline]
1345 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1346 let iterator = iter.into_iter();
1347 let (lower, upper) = iterator.size_hint();
1348 let reserve_size = upper.unwrap_or(lower);
1350 self.reserve(reserve_size);
1351 for item in iterator {
1352 self.push_back(item);
1353 }
1354 }
1355}
1356
1357impl<T: Clone> From<&[T]> for SliceRingBuffer<T> {
1358 #[inline]
1359 fn from(slice: &[T]) -> Self {
1360 let mut buffer = Self::with_capacity(slice.len());
1361 buffer.extend(slice.iter().cloned());
1362 buffer
1363 }
1364}
1365
1366impl<T: Clone, const N: usize> From<[T; N]> for SliceRingBuffer<T> {
1367 #[inline]
1368 fn from(arr: [T; N]) -> Self {
1369 let mut buffer = Self::with_capacity(N);
1370 buffer.extend(arr.iter().cloned());
1371 buffer
1372 }
1373}
1374
1375unsafe impl<T> Send for SliceRingBuffer<T> where T: Send {}
1376unsafe impl<T> Sync for SliceRingBuffer<T> where T: Sync {}
1377
1378impl<T> Drop for SliceRingBuffer<T> {
1379 fn drop(&mut self) {
1380 let p = self.as_mut_slice() as *mut [_];
1381 unsafe { drop_in_place(p) };
1382 self.len = 0;
1383 }
1384}
1385
1386#[cfg(test)]
1387mod tests {
1388 use crate::mirrored::allocation_granularity;
1389
1390 use super::*;
1391 use core::{iter, ops::Not, sync::atomic::AtomicUsize};
1392 use num::Integer;
1393 use std::rc::Rc;
1394
1395 #[test]
1396 fn test_new() {
1397 let rb: SliceRingBuffer<i32> = SliceRingBuffer::new();
1398 assert_eq!(rb.len(), 0);
1399 assert!(rb.is_empty());
1400 assert_eq!(rb.capacity(), 0);
1401 }
1402
1403 #[test]
1404 fn test_new_zst() {
1405 let zst_rb: SliceRingBuffer<()> = SliceRingBuffer::new();
1406 assert_eq!(zst_rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
1407 assert_eq!(zst_rb.len(), 0);
1408 assert!(zst_rb.is_empty());
1409 }
1410
1411 #[test]
1412 fn test_with_capacity() {
1413 let rb: SliceRingBuffer<i32> = SliceRingBuffer::with_capacity(1);
1414 assert_eq!(rb.len(), 0);
1415 assert!(!rb.is_full());
1416 assert!(rb.is_empty());
1417 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
1418 }
1419
1420 #[test]
1421 fn test_with_capacity_zst() {
1422 let zst_rb: SliceRingBuffer<()> = SliceRingBuffer::with_capacity(1);
1423 assert!(zst_rb.is_empty());
1424 assert!(!zst_rb.is_full());
1425 assert_eq!(zst_rb.len(), 0);
1426 assert_eq!(zst_rb.capacity(), 1);
1427 }
1428
1429 #[test]
1430 fn test_push_back_and_pop_front() {
1431 let mut rb = SliceRingBuffer::with_capacity(3);
1432 rb.push_back(1i32);
1433 rb.push_back(2);
1434 rb.push_back(3);
1435
1436 assert_eq!(rb.len(), 3);
1437 assert!(!rb.is_empty());
1438 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>()); assert_eq!(rb.pop_front(), Some(1));
1441 assert_eq!(rb.pop_front(), Some(2));
1442 assert_eq!(rb.pop_front(), Some(3));
1443 assert_eq!(rb.pop_front(), None);
1444
1445 assert_eq!(rb.len(), 0);
1446 assert!(rb.is_full().not());
1447 assert!(rb.is_empty());
1448 }
1449
1450 #[test]
1451 fn test_push_back_and_pop_back() {
1452 let mut rb = SliceRingBuffer::with_capacity(3);
1453 rb.push_back(1i32);
1454 rb.push_back(2);
1455 rb.push_back(3);
1456
1457 assert_eq!(rb.len(), 3);
1458 assert!(!rb.is_empty());
1459 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>()); assert_eq!(rb.pop_back(), Some(3));
1462 assert_eq!(rb.pop_back(), Some(2));
1463 assert_eq!(rb.pop_back(), Some(1));
1464 assert_eq!(rb.pop_back(), None);
1465
1466 assert_eq!(rb.len(), 0);
1467 assert!(rb.is_full().not());
1468 assert!(rb.is_empty());
1469 }
1470
1471 #[test]
1472 fn test_push_front_and_pop_front() {
1473 let mut rb = SliceRingBuffer::with_capacity(3);
1474 rb.push_front(1i32);
1475 rb.push_front(2);
1476 rb.push_front(3);
1477
1478 assert_eq!(rb.len(), 3);
1479 assert!(!rb.is_empty());
1480 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>()); assert_eq!(rb.pop_front(), Some(3));
1483 assert_eq!(rb.pop_front(), Some(2));
1484 assert_eq!(rb.pop_front(), Some(1));
1485 assert_eq!(rb.pop_front(), None);
1486
1487 assert_eq!(rb.len(), 0);
1488 assert!(rb.is_full().not());
1489 assert!(rb.is_empty());
1490 }
1491
1492 #[test]
1493 fn test_push_front_and_pop_back() {
1494 let mut rb = SliceRingBuffer::with_capacity(3);
1495 rb.push_front(1i32);
1496 rb.push_front(2);
1497 rb.push_front(3);
1498
1499 assert_eq!(rb.len(), 3);
1500 assert!(!rb.is_empty());
1501 assert_eq!(rb.as_slice(), &[3, 2, 1]);
1502
1503 assert_eq!(rb.pop_back(), Some(1));
1504 assert_eq!(rb.pop_back(), Some(2));
1505 assert_eq!(rb.pop_back(), Some(3));
1506 assert_eq!(rb.pop_back(), None);
1507
1508 assert_eq!(rb.len(), 0);
1509 assert!(rb.is_full().not());
1510 assert!(rb.is_empty());
1511 }
1512
1513 #[test]
1514 fn test_push_back_and_pop_front_zst() {
1515 let mut rb = SliceRingBuffer::with_capacity(3);
1516 rb.push_back(());
1517 rb.push_back(());
1518 rb.push_back(());
1519
1520 assert_eq!(rb.len(), 3);
1521 assert!(!rb.is_empty());
1522 assert_eq!(rb.capacity(), 3);
1523 assert_eq!(rb.as_slice(), &[(), (), ()]);
1524
1525 assert_eq!(rb.pop_front(), Some(()));
1526 assert_eq!(rb.pop_front(), Some(()));
1527 assert_eq!(rb.pop_front(), Some(()));
1528 assert_eq!(rb.pop_front(), None);
1529
1530 assert_eq!(rb.len(), 0);
1531 assert!(rb.is_full().not());
1532 assert!(rb.is_empty());
1533 }
1534
1535 #[test]
1536 fn test_push_back_and_pop_back_zst() {
1537 let mut rb = SliceRingBuffer::with_capacity(3);
1538 rb.push_front(());
1539 rb.push_front(());
1540 rb.push_front(());
1541
1542 assert_eq!(rb.len(), 3);
1543 assert!(!rb.is_empty());
1544 assert_eq!(rb.capacity(), 3);
1545 assert_eq!(rb.as_slice(), &[(), (), ()]);
1546
1547 assert_eq!(rb.pop_back(), Some(()));
1548 assert_eq!(rb.pop_back(), Some(()));
1549 assert_eq!(rb.pop_back(), Some(()));
1550 assert_eq!(rb.pop_back(), None);
1551
1552 assert_eq!(rb.len(), 0);
1553 assert!(rb.is_full().not());
1554 assert!(rb.is_empty());
1555 }
1556
1557 #[test]
1558 fn test_push_front_and_pop_back_zst() {
1559 let mut rb = SliceRingBuffer::with_capacity(3);
1560 rb.push_back(());
1561 rb.push_back(());
1562 rb.push_back(());
1563
1564 assert_eq!(rb.len(), 3);
1565 assert!(!rb.is_empty());
1566 assert_eq!(rb.capacity(), 3);
1567 assert_eq!(rb.as_slice(), &[(), (), ()]);
1568
1569 assert_eq!(rb.pop_back(), Some(()));
1570 assert_eq!(rb.pop_back(), Some(()));
1571 assert_eq!(rb.pop_back(), Some(()));
1572 assert_eq!(rb.pop_back(), None);
1573
1574 assert_eq!(rb.len(), 0);
1575 assert!(rb.is_full().not());
1576 assert!(rb.is_empty());
1577 }
1578
1579 #[test]
1580 fn test_push_front_and_pop_front_zst() {
1581 let mut rb: SliceRingBuffer<()> = SliceRingBuffer::with_capacity(3);
1582 rb.push_front(());
1583 rb.push_front(());
1584 rb.push_front(());
1585
1586 assert_eq!(rb.len(), 3);
1587 assert!(!rb.is_empty());
1588 assert_eq!(rb.capacity(), 3);
1589 assert_eq!(rb.as_slice(), &[(), (), ()]);
1590
1591 assert_eq!(rb.pop_front(), Some(()));
1592 assert_eq!(rb.pop_front(), Some(()));
1593 assert_eq!(rb.pop_front(), Some(()));
1594 assert_eq!(rb.pop_front(), None);
1595
1596 assert_eq!(rb.len(), 0);
1597 assert!(rb.is_full().not());
1598 assert!(rb.is_empty());
1599 }
1600
1601 #[test]
1602 fn test_wrap_push_front_and_pop_back() {
1603 let ag = allocation_granularity();
1604 let mut rb = SliceRingBuffer::with_capacity(ag + 1);
1605 for _ in 0..ag - 1 {
1606 rb.push_front(false);
1607 }
1608 rb.push_front(true);
1609
1610 assert_eq!(rb.len(), ag);
1611 assert!(!rb.is_empty());
1612
1613 for _ in 0..ag - 1 {
1614 assert_eq!(rb.pop_back(), Some(false));
1615 }
1616 assert_eq!(rb.pop_back(), Some(true));
1617 assert_eq!(rb.pop_back(), None);
1618
1619 assert_eq!(rb.len(), 0);
1620 assert!(rb.is_full().not());
1621 assert!(rb.is_empty());
1622 }
1623
1624 #[test]
1625 fn test_wrap_push_back_and_pop_front() {
1626 let ag = allocation_granularity();
1627 let mut rb = SliceRingBuffer::with_capacity(ag + 1);
1628 for _ in 0..ag - 1 {
1629 rb.push_back(false);
1630 }
1631 rb.push_back(true);
1632
1633 assert_eq!(rb.len(), ag);
1634 assert!(!rb.is_empty());
1635
1636 for _ in 0..ag - 1 {
1637 assert_eq!(rb.pop_front(), Some(false));
1638 }
1639 assert_eq!(rb.pop_front(), Some(true));
1640 assert_eq!(rb.pop_front(), None);
1641
1642 assert_eq!(rb.len(), 0);
1643 assert!(rb.is_full().not());
1644 assert!(rb.is_empty());
1645 }
1646
1647 #[test]
1648 fn test_wrap_push_back_and_pop_front_zst() {
1649 let mut zst_rb = SliceRingBuffer::new();
1650 zst_rb.push_back(());
1651 assert!(!zst_rb.is_empty());
1652 assert!(!zst_rb.is_full());
1653 assert_eq!(zst_rb.len(), 1);
1654 assert_eq!(zst_rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
1655 zst_rb.pop_back();
1656 assert!(zst_rb.is_empty());
1657 assert_eq!(zst_rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
1658 assert!(!zst_rb.is_full());
1659 }
1660
1661 #[test]
1662 fn test_wrap_around() {
1663 let mut rb = SliceRingBuffer::with_capacity(3);
1664 rb.push_back(1);
1665 rb.push_back(2);
1666 rb.push_back(3);
1667 assert_eq!(rb.pop_front(), Some(1));
1668 rb.push_back(4);
1669
1670 assert_eq!(rb.as_slice(), &[2, 3, 4]);
1671 assert_eq!(rb.len(), 3);
1672
1673 assert_eq!(rb.pop_front(), Some(2));
1674 assert_eq!(rb.pop_front(), Some(3));
1675 assert_eq!(rb.pop_front(), Some(4));
1676 assert_eq!(rb.pop_front(), None);
1677 }
1678
1679 #[test]
1680 fn test_deref() {
1681 let mut rb = SliceRingBuffer::with_capacity(4);
1682 rb.push_back(10);
1683 rb.push_back(20);
1684 rb.push_back(30);
1685
1686 assert_eq!(&[10, 20, 30], &*rb);
1687 }
1688
1689 #[test]
1690 fn test_deref_zst() {
1691 let rb = iter::repeat_n((), 8).collect::<SliceRingBuffer<_>>();
1692 assert_eq!(&*rb, &[(); 8]);
1693 }
1694
1695 #[test]
1696 fn test_front_back() {
1697 let mut rb = SliceRingBuffer::with_capacity(5);
1698 assert!(rb.front().is_none());
1699 assert!(rb.back().is_none());
1700
1701 rb.push_back(1);
1702 assert_eq!(rb.front(), Some(&1));
1703 assert_eq!(rb.back(), Some(&1));
1704
1705 rb.push_back(2);
1706 assert_eq!(rb.front(), Some(&1));
1707 assert_eq!(rb.back(), Some(&2));
1708
1709 rb.push_front(0);
1710 assert_eq!(rb.front(), Some(&0));
1711 assert_eq!(rb.back(), Some(&2));
1712 }
1713
1714 #[test]
1715 fn test_front_back_zst() {
1716 let mut rb = SliceRingBuffer::new();
1717 rb.push_back(());
1718 rb.pop_back();
1719 assert!(rb.front().is_none());
1720 assert!(rb.back().is_none());
1721 rb.push_back(());
1722 assert!(rb.front().is_some());
1723 assert!(rb.back().is_some());
1724 assert_eq!(rb.pop_back(), Some(()));
1725 }
1726
1727 #[test]
1728 fn test_clear() {
1729 let mut rb = SliceRingBuffer::with_capacity(3);
1730 rb.push_back(1);
1731 rb.push_back(2);
1732 rb.clear();
1733 assert!(rb.is_empty());
1734 assert_eq!(rb.len(), 0);
1735 }
1736
1737 #[test]
1738 fn test_clear_zst() {
1739 let mut rb = SliceRingBuffer::with_capacity(3);
1740 rb.push_back(());
1741 rb.push_back(());
1742 rb.clear();
1743 assert!(rb.is_empty());
1744 assert_eq!(rb.len(), 0);
1745 }
1746
1747 #[test]
1748 fn test_into_iter() {
1749 let mut rb = SliceRingBuffer::with_capacity(4);
1750 rb.push_back(1);
1751 rb.push_back(2);
1752 rb.push_back(3);
1753
1754 let mut iter = rb.into_iter();
1755 assert_eq!(iter.next(), Some(1));
1756 assert_eq!(iter.next_back(), Some(3));
1757 assert_eq!(iter.next(), Some(2));
1758 assert_eq!(iter.next(), None);
1759 }
1760
1761 #[test]
1762 fn test_into_iter_zst() {
1763 let mut rb = SliceRingBuffer::with_capacity(4);
1764 rb.push_back(());
1765 rb.push_back(());
1766 rb.push_back(());
1767
1768 let mut iter = rb.into_iter();
1769 assert_eq!(iter.next(), Some(()));
1770 assert_eq!(iter.next_back(), Some(()));
1771 assert_eq!(iter.next(), Some(()));
1772 assert_eq!(iter.next(), None);
1773 }
1774
1775 #[test]
1776 fn test_from_iter() {
1777 let data = vec![1, 2, 3, 4, 5];
1778 let rb: SliceRingBuffer<i32> = data.into_iter().collect();
1779 assert_eq!(rb.len(), 5);
1780 assert_eq!(rb.as_slice(), &[1, 2, 3, 4, 5]);
1781 }
1782
1783 #[test]
1784 fn test_from_iter_zst() {
1785 let data = vec![(); 5];
1786 let rb = data.into_iter().collect::<SliceRingBuffer<_>>();
1787 assert_eq!(rb.len(), 5);
1788 assert_eq!(rb.as_slice(), &[(); 5]);
1789 }
1790
1791 #[test]
1792 fn test_from_array() {
1793 let arr = [1, 2, 3, 4, 5];
1794 let rb = SliceRingBuffer::from(arr);
1795 assert_eq!(rb.len(), 5);
1796 assert_eq!(rb.as_slice(), &arr);
1797 }
1798
1799 #[test]
1800 fn test_from_array_zst() {
1801 let arr = [(); 5];
1802 let rb = SliceRingBuffer::from(arr);
1803 assert_eq!(rb.len(), 5);
1804 assert_eq!(rb.as_slice(), &arr);
1805 }
1806
1807 #[test]
1808 fn test_from_slice() {
1809 let slice = [1, 1, 4, 5, 1, 5].as_slice();
1810 let rb = SliceRingBuffer::from(slice);
1811 assert_eq!(rb.len(), 6);
1812 assert_eq!(rb.as_slice(), slice);
1813 }
1814
1815 #[test]
1816 fn test_from_slice_zst() {
1817 let slice = [(); 6].as_slice();
1818 let rb = SliceRingBuffer::from(slice);
1819 assert_eq!(rb.len(), 6);
1820 assert_eq!(rb.as_slice(), slice);
1821 }
1822
1823 #[test]
1824 fn test_extend() {
1825 let mut rb = SliceRingBuffer::from(vec![1, 2]);
1826 rb.extend(vec![3, 4, 5]);
1827 assert_eq!(rb.as_slice(), &[1, 2, 3, 4, 5]);
1828 }
1829
1830 #[test]
1831 fn test_extend_zst() {
1832 let mut rb = SliceRingBuffer::from(vec![(); 2]);
1833 rb.extend(vec![(); 4]);
1834 assert_eq!(rb.as_slice(), &[(); 6]);
1835 }
1836
1837 #[test]
1838 fn test_realloc() {
1839 let ag = allocation_granularity();
1840 let mut rb = SliceRingBuffer::with_capacity(ag);
1841 for i in 0..ag * 2 {
1842 if i.is_even() {
1843 rb.push_back(i);
1844 } else {
1845 rb.push_front(i);
1846 }
1847 }
1848 rb.push_back(114_514);
1849 assert!(rb.capacity() > ag * 3 / size_of::<usize>());
1850 assert_eq!(rb.len(), ag * 2 + 1);
1851 }
1852
1853 #[test]
1854 fn test_drop() {
1855 struct DropTracker {
1856 counter: Rc<AtomicUsize>,
1857 }
1858 impl Drop for DropTracker {
1859 fn drop(&mut self) {
1860 self.counter
1861 .fetch_update(Ordering::Relaxed, Ordering::Acquire, |x| Some(x + 1))
1862 .expect("atomic update failed");
1863 }
1864 }
1865 use core::sync::atomic::Ordering;
1866 let drop_counter = Rc::new(AtomicUsize::new(0));
1867
1868 let clone_tracker = || DropTracker { counter: drop_counter.clone() };
1869 let mut rb = SliceRingBuffer::new();
1870 rb.push_back(clone_tracker());
1871 let _ = rb.pop_front().unwrap();
1872 assert_eq!(drop_counter.load(Ordering::Acquire), 1);
1873
1874 let ag = allocation_granularity();
1875 let count = ag / size_of::<DropTracker>();
1876 for _ in 0..count {
1877 rb.push_back(clone_tracker());
1878 }
1879 assert_eq!(rb.len(), count);
1880 let _ = rb.pop_back();
1881 assert_eq!(rb.len(), count - 1);
1882 assert_eq!(drop_counter.load(Ordering::Acquire), 2);
1883
1884 rb.push_back(clone_tracker());
1885 rb.push_back(clone_tracker());
1886 assert_eq!(rb.len(), count + 1);
1889 drop(rb);
1890 assert_eq!(drop_counter.load(Ordering::Acquire), count + 3);
1891 }
1892
1893 #[test]
1894 fn test_rotate() {
1895 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
1896 rb.rotate(2);
1897 assert_eq!(rb.as_slice(), &[3, 4, 5, 1, 2]);
1898
1899 rb.rotate(-1);
1900 assert_eq!(rb.as_slice(), &[2, 3, 4, 5, 1]);
1901
1902 rb.rotate(5);
1903 assert_eq!(rb.as_slice(), &[2, 3, 4, 5, 1]);
1904 rb.rotate(0);
1905 assert_eq!(rb.as_slice(), &[2, 3, 4, 5, 1]);
1906 rb.rotate(-5);
1907 assert_eq!(rb.as_slice(), &[2, 3, 4, 5, 1]);
1908
1909 let mut rb = iter::repeat_n(false, allocation_granularity()).collect::<SliceRingBuffer<_>>();
1910 *rb.last_mut().unwrap() = true;
1911 rb.rotate(allocation_granularity().cast_signed());
1912 assert_eq!(rb.last(), Some(&true));
1913 rb.rotate(allocation_granularity().cast_signed().neg());
1914 assert_eq!(rb.last(), Some(&true));
1915 rb.rotate(-1);
1916 assert_eq!(rb.first(), Some(&true));
1917 }
1918
1919 #[test]
1920 fn test_rotate_zst() {
1921 let mut rb = SliceRingBuffer::from([(); 5]);
1922 rb.rotate(1);
1923 assert_eq!(rb.first(), Some(&()));
1924 rb.rotate(-1);
1925 assert_eq!(rb.first(), Some(&()));
1926 }
1927
1928 #[test]
1929 fn test_split_off() {
1930 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
1931 let rb2 = rb.split_off(3);
1932
1933 assert_eq!(rb.as_slice(), &[1, 2, 3]);
1934 assert_eq!(rb2.as_slice(), &[4, 5]);
1935
1936 let ag = allocation_granularity();
1937 let mut rb = std::iter::repeat_n(true, ag - 8).collect::<SliceRingBuffer<_>>();
1938 for _ in 0..8 {
1939 rb.push_back(false);
1940 }
1941 let right = rb.split_off(ag - 8);
1942 assert_eq!(right.as_slice(), &[false; 8]);
1943 assert_eq!(rb.len(), ag - 8);
1944 for item in rb.as_slice() {
1945 assert!(*item);
1946 }
1947 }
1948
1949 #[test]
1950 fn test_split_off_zst() {
1951 let mut zst_rb = SliceRingBuffer::from([(); 8]);
1952 let right = zst_rb.split_off(4);
1953 assert_eq!(right.len(), 4);
1954 assert_eq!(zst_rb.len(), 4);
1955 }
1956
1957 #[test]
1958 fn test_append() {
1959 let mut rb1 = iter::repeat_n(true, allocation_granularity()).collect::<SliceRingBuffer<_>>();
1960 let mut rb2 = iter::repeat_n(false, allocation_granularity()).collect::<SliceRingBuffer<_>>();
1961
1962 rb1.append(&mut rb2);
1963
1964 for _ in 0..allocation_granularity() {
1965 assert!(rb1.pop_front().unwrap());
1966 }
1967 for _ in 0..allocation_granularity() {
1968 assert!(!rb1.pop_back().unwrap());
1969 }
1970 assert!(rb2.is_empty());
1971 }
1972
1973 #[test]
1974 fn test_append_zst() {
1975 let mut zst_rb = SliceRingBuffer::from([(); 8]);
1976 zst_rb.append(&mut zst_rb.clone());
1977 assert!(!zst_rb.is_empty());
1978 assert_eq!(zst_rb.len(), 16);
1979 }
1980
1981 #[test]
1982 fn test_truncate_back() {
1983 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
1984 rb.truncate_back(3);
1985 assert_eq!(rb.as_slice(), &[1, 2, 3]);
1986 rb.truncate_back(5);
1987 assert_eq!(rb.as_slice(), &[1, 2, 3]);
1988
1989 let mut rb = (0usize..allocation_granularity()).collect::<SliceRingBuffer<_>>();
1990 rb.extend(0..allocation_granularity());
1991 for i in 0..allocation_granularity() {
1992 assert_eq!(i, rb[i]);
1993 }
1994
1995 for (idx, val) in (allocation_granularity()..allocation_granularity() * 2).enumerate() {
1996 assert_eq!(idx, rb[val]);
1997 }
1998 }
1999
2000 #[test]
2001 fn test_truncate_back_zst() {
2002 let mut zst_rb = SliceRingBuffer::from([(); 96]);
2003 zst_rb.truncate_back(3);
2004 assert_eq!(zst_rb.as_slice(), &[(); 3]);
2005 }
2006
2007 #[test]
2008 fn test_remove() {
2009 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
2010 assert_eq!(rb.remove(2), 3);
2011 assert_eq!(rb.as_slice(), &[1, 2, 4, 5]);
2012 }
2013
2014 #[test]
2015 fn test_remove_zst() {
2016 let mut zst_rb = SliceRingBuffer::from([(); 3]);
2017 assert_eq!(size_of_val(&zst_rb.remove(0)), 0);
2018 assert_eq!(zst_rb.as_slice(), &[(), ()]);
2019 zst_rb.remove(1);
2020 assert_eq!(zst_rb.len(), 1);
2021 }
2022
2023 #[test]
2024 #[should_panic = "out of bounds"]
2025 fn test_remove_out_of_bounds() {
2026 let mut rb = SliceRingBuffer::from(vec![1, 2, 3]);
2027 rb.remove(3);
2028 }
2029
2030 #[test]
2031 #[should_panic = "out of bounds"]
2032 fn test_remove_out_of_bounds_zst() {
2033 let mut rb = SliceRingBuffer::from([(); 8]);
2034 rb.remove(10);
2035 }
2036
2037 #[test]
2038 fn test_swap_remove() {
2039 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
2040 assert_eq!(rb.swap_remove(1), Some(2));
2041 assert_eq!(rb.as_slice(), &[1, 5, 3, 4]);
2042 }
2043
2044 #[test]
2045 fn test_swap_remove_zst() {
2046 let mut zst_rb = SliceRingBuffer::from([(); 8]);
2047 zst_rb.swap_remove(3);
2048 assert_eq!(zst_rb.len(), 7);
2049 }
2050
2051 #[test]
2052 fn test_try_insert() {
2053 let mut rb = SliceRingBuffer::with_capacity(5);
2054 rb.push_back(1);
2055 rb.push_back(2);
2056 rb.push_back(4);
2057 rb.push_back(5);
2058
2059 assert!(rb.try_insert(2, 3).is_some());
2060 assert_eq!(rb.as_slice(), &[1, 2, 3, 4, 5]);
2061 }
2062 #[test]
2063 fn test_try_insert_zst() {
2064 let mut rb = SliceRingBuffer::from([(); 5]);
2065 assert!(rb.try_insert(2, ()).is_none());
2066 assert_eq!(rb.len(), 5);
2067 }
2068
2069 #[test]
2070 fn test_insert() {
2071 let ag = allocation_granularity();
2072 let mut rb = SliceRingBuffer::from(vec![false; ag]);
2073 rb.insert(2, true);
2074 assert!(rb.is_full().not());
2075 assert_eq!(rb.swap_remove(2), Some(true));
2076 assert_eq!(rb.swap_remove(0), Some(false));
2077 assert_eq!(rb.swap_remove(rb.len() - 1), Some(false));
2078 assert_eq!(rb.capacity(), 2 * ag);
2079 }
2080
2081 #[test]
2082 fn test_insert_zst() {
2083 let ag = allocation_granularity();
2084 let mut rb = SliceRingBuffer::from(vec![(); ag]);
2085 rb.insert(2, ());
2086 assert!(rb.is_full());
2087 assert_eq!(rb.swap_remove(2), Some(()));
2088 assert_eq!(rb.capacity(), ag);
2089 }
2090
2091 #[test]
2092 fn test_shrink_to_fit() {
2093 let mut rb = SliceRingBuffer::with_capacity(20);
2094 rb.extend(0..5);
2095 rb.shrink_to_fit();
2096 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
2097 assert_eq!(rb.as_slice(), &[0, 1, 2, 3, 4]);
2098 let mut rb = SliceRingBuffer::with_capacity(2 * allocation_granularity());
2099 rb.push_back(1);
2100 rb.shrink_to_fit();
2101 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
2102 }
2103
2104 #[test]
2105 fn test_shrink_to_fit_zst() {
2106 let mut rb = SliceRingBuffer::with_capacity(20);
2107 rb.extend([(); 5]);
2108 assert_eq!(rb.len(), 5);
2109 assert_eq!(rb.capacity(), 20);
2110 rb.shrink_to_fit();
2111 assert_eq!(rb.capacity(), 5);
2112 }
2113
2114 #[test]
2115 fn test_sliceable_ring_buffer_empty() {
2116 let rb: SliceRingBuffer<i32> = SliceRingBuffer::new();
2117 assert!(rb.is_empty());
2118 assert_eq!(rb.len(), 0);
2119 assert_eq!(rb.capacity(), 0);
2120 assert_eq!(rb.as_slice(), &[] as &[i32]);
2121 }
2122
2123 #[test]
2124 fn test_sliceable_ring_buffer_empty_zst() {
2125 let rb: SliceRingBuffer<()> = SliceRingBuffer::new();
2126 assert!(rb.is_empty());
2127 assert_eq!(rb.len(), 0);
2128 assert_eq!(rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
2129 assert_eq!(rb.as_slice(), &[]);
2130 }
2131
2132 #[test]
2133 fn test_index_access() {
2134 let mut rb = SliceRingBuffer::from(vec![1, 2, 3]);
2135 assert_eq!(rb[0], 1);
2136 assert_eq!(rb[1], 2);
2137 assert_eq!(rb[2], 3);
2138
2139 rb[1] = 5;
2140 assert_eq!(rb[1], 5);
2141 assert_eq!(rb.as_slice(), &[1, 5, 3]);
2142 }
2143
2144 #[test]
2145 fn test_reserve_and_shrink() {
2146 let mut rb = SliceRingBuffer::with_capacity(5);
2147 rb.extend(0..5);
2148 assert_eq!(rb.len(), 5);
2149 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
2150
2151 rb.reserve(10);
2152 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
2153
2154 rb.shrink_to_fit();
2155 assert_eq!(rb.capacity(), allocation_granularity() / size_of::<i32>());
2156 }
2157
2158 #[test]
2159 fn test_reserve_and_shrink_zst() {
2160 let mut rb: SliceRingBuffer<()> = SliceRingBuffer::new();
2161 assert_eq!(rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
2162
2163 rb.reserve(10);
2164 assert_eq!(rb.capacity(), MAX_PHYSICAL_BUF_SIZE);
2165
2166 rb.shrink_to_fit();
2167 assert_eq!(rb.capacity(), 0);
2168 }
2169
2170 #[test]
2171 fn test_reserve() {
2172 let mut rb = SliceRingBuffer::from([false; 10]);
2173 rb.reserve(allocation_granularity() * 2);
2174 assert_eq!(rb.capacity(), allocation_granularity() * 3);
2175 rb.shrink_to_fit();
2176 assert_eq!(rb.capacity(), allocation_granularity());
2177
2178 let mut rb = SliceRingBuffer::from(vec![false; allocation_granularity()]);
2179 rb.reserve(allocation_granularity() / 2);
2180 assert_eq!(rb.capacity(), allocation_granularity() * 2);
2181 }
2182
2183 #[test]
2184 fn test_reserve_zst() {
2185 let mut rb = SliceRingBuffer::from([(); 10]);
2186 rb.reserve(32);
2187 assert_eq!(rb.capacity(), 42);
2188 rb.shrink_to_fit();
2189 assert_eq!(rb.capacity(), 10);
2190 rb.reserve(5);
2191 assert_eq!(rb.capacity(), 15);
2192 }
2193
2194 #[test]
2195 fn test_get_and_get_mut() {
2196 let mut rb = SliceRingBuffer::new();
2197 rb.push_back(1);
2198 rb.push_back(2);
2199 rb.push_back(3);
2200
2201 assert_eq!(rb.get(0), Some(&1));
2203 assert_eq!(rb.get(1), Some(&2));
2204 assert_eq!(rb.get(2), Some(&3));
2205 assert_eq!(rb.get(3), None);
2206
2207 if let Some(elem) = rb.get_mut(1) {
2209 *elem = 7;
2210 }
2211 assert_eq!(rb.as_slice(), &[1, 7, 3]);
2212
2213 assert_eq!(rb.get_mut(3), None);
2215 }
2216
2217 #[test]
2218 fn test_get_and_get_mut_zst() {
2219 let rb = SliceRingBuffer::from([(); 10]);
2220 assert!(rb.get(3).is_some());
2221 assert!(rb.get(0).is_some());
2222 assert!(rb.get(10).is_none());
2223 }
2224
2225 #[test]
2226 fn test_drain_middle() {
2227 let mut buf = (0..6).collect::<SliceRingBuffer<_>>(); let drained: Vec<_> = buf.drain(2..4).collect(); assert_eq!(drained, vec![2, 3]);
2231 assert_eq!(buf.as_slice(), &[0, 1, 4, 5]);
2232 assert_eq!(buf.len(), 4);
2233 }
2234
2235 #[test]
2236 fn test_drain_all() {
2237 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2238 let drained: Vec<_> = buf.drain(..).collect();
2239
2240 assert_eq!(drained, vec![0, 1, 2, 3, 4]);
2241 assert!(buf.is_empty());
2242 }
2243
2244 #[test]
2245 fn test_drain_from_start() {
2246 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2247 let drained: Vec<_> = buf.drain(..2).collect(); assert_eq!(drained, vec![0, 1]);
2250 assert_eq!(buf.as_slice(), &[2, 3, 4]);
2251 }
2252
2253 #[test]
2254 fn test_drain_to_end() {
2255 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2256 let drained: Vec<_> = buf.drain(3..).collect(); assert_eq!(drained, vec![3, 4]);
2259 assert_eq!(buf.as_slice(), &[0, 1, 2]);
2260 }
2261
2262 #[test]
2263 fn test_drain_empty_range() {
2264 let mut buf = (0..3).collect::<SliceRingBuffer<_>>();
2265
2266 assert!(buf.drain(1..1).next().is_none());
2267 assert_eq!(buf.as_slice(), &[0, 1, 2]);
2268 }
2269
2270 #[test]
2271 fn test_drain_drop_incomplete_move_back() {
2272 let mut buf = (0..10).collect::<SliceRingBuffer<_>>(); {
2278 let mut drainer = buf.drain(2..5);
2279 assert_eq!(drainer.next(), Some(2));
2280 }
2282
2283 assert_eq!(buf.as_slice(), &[0, 1, 5, 6, 7, 8, 9]);
2285 assert_eq!(buf.len(), 7);
2286 }
2287
2288 #[test]
2289 fn test_drain_drop_incomplete_move_front() {
2290 let mut buf = (0..10).collect::<SliceRingBuffer<_>>(); {
2295 let mut drainer = buf.drain(7..9);
2296 assert_eq!(drainer.next(), Some(7));
2297 }
2299
2300 assert_eq!(buf.as_slice(), &[0, 1, 2, 3, 4, 5, 6, 9]);
2302 assert_eq!(buf.len(), 8);
2303 }
2304
2305 #[test]
2306 fn test_drain_zst() {
2307 let mut buf = SliceRingBuffer::<()>::new();
2308 buf.push_back(());
2309 buf.push_back(());
2310 buf.push_back(());
2311 buf.push_back(());
2312 buf.push_back(());
2313 assert_eq!(buf.len(), 5);
2314
2315 let mut drained = buf.drain(1..4);
2316
2317 assert_eq!(drained.len(), 3);
2318 assert_eq!(drained.next(), Some(()));
2319 assert_eq!(drained.next(), Some(()));
2320 assert_eq!(drained.next(), Some(()));
2321 assert_eq!(drained.next(), None);
2322
2323 drop(drained);
2325
2326 assert_eq!(buf.len(), 2);
2327 }
2328
2329 #[test]
2330 fn test_drain_zst_drop_incomplete() {
2331 let mut buf = SliceRingBuffer::<()>::new();
2332 buf.extend(iter::repeat_n((), 10));
2333 assert_eq!(buf.len(), 10);
2334
2335 {
2336 let mut drainer = buf.drain(2..8); assert_eq!(drainer.next(), Some(()));
2338 assert_eq!(drainer.next(), Some(()));
2339 }
2341
2342 assert_eq!(buf.len(), 4);
2344 }
2345
2346 #[test]
2347 #[should_panic = "drain upper bound was too large"]
2348 fn test_drain_panic_end_out_of_bounds() {
2349 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2350 let _ = buf.drain(..6);
2351 }
2352
2353 #[test]
2354 #[should_panic = "drain lower bound was too large"]
2355 fn test_drain_panic_start_out_of_bounds() {
2356 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2358 let _ = buf.drain(6..);
2359 }
2360
2361 #[test]
2362 fn test_drain_next_back_only() {
2363 let mut buf = (0..10).collect::<SliceRingBuffer<_>>();
2364 let mut drainer = buf.drain(2..8);
2366
2367 assert_eq!(drainer.next_back(), Some(7));
2368 assert_eq!(drainer.next_back(), Some(6));
2369 assert_eq!(drainer.next_back(), Some(5));
2370 assert_eq!(drainer.next_back(), Some(4));
2371 assert_eq!(drainer.next_back(), Some(3));
2372 assert_eq!(drainer.next_back(), Some(2));
2373 assert_eq!(drainer.next_back(), None);
2374
2375 drop(drainer);
2377 assert_eq!(buf.as_slice(), &[0, 1, 8, 9]);
2378 }
2379
2380 #[test]
2381 fn test_drain_mixed_next_and_next_back() {
2382 let mut buf = (0..10).collect::<SliceRingBuffer<_>>();
2383 let mut drainer = buf.drain(2..8);
2385
2386 assert_eq!(drainer.next(), Some(2));
2387 assert_eq!(drainer.next_back(), Some(7));
2388 assert_eq!(drainer.len(), 4); assert_eq!(drainer.next(), Some(3));
2391 assert_eq!(drainer.next_back(), Some(6));
2392 assert_eq!(drainer.len(), 2); let remaining: Vec<_> = drainer.collect();
2395 assert_eq!(remaining, vec![4, 5]);
2396
2397 assert_eq!(buf.as_slice(), &[0, 1, 8, 9]);
2398 }
2399
2400 #[test]
2401 fn test_drain_next_back_until_empty() {
2402 let mut buf = (0..5).collect::<SliceRingBuffer<_>>();
2403 let mut drainer = buf.drain(1..4); assert_eq!(drainer.next(), Some(1));
2406 assert_eq!(drainer.next_back(), Some(3));
2407 assert_eq!(drainer.next(), Some(2)); assert_eq!(drainer.next(), None);
2410 assert_eq!(drainer.next_back(), None);
2411 }
2412
2413 #[test]
2414 fn test_drain_next_back_zst() {
2415 let mut buf = SliceRingBuffer::<()>::new();
2416 buf.extend(iter::repeat_n((), 10));
2417
2418 let mut drainer = buf.drain(3..7); assert_eq!(drainer.len(), 4);
2420
2421 assert_eq!(drainer.next_back(), Some(()));
2422 assert_eq!(drainer.len(), 3);
2423
2424 assert_eq!(drainer.next(), Some(()));
2425 assert_eq!(drainer.len(), 2);
2426
2427 assert_eq!(drainer.next_back(), Some(()));
2428 assert_eq!(drainer.len(), 1);
2429
2430 assert_eq!(drainer.next_back(), Some(()));
2431 assert_eq!(drainer.len(), 0);
2432
2433 assert_eq!(drainer.next_back(), None);
2434
2435 drop(drainer);
2436 assert_eq!(buf.len(), 6);
2437 }
2438
2439 #[test]
2440 fn test_split_to() {
2441 let mut rb = SliceRingBuffer::from(vec![1, 2, 3, 4, 5]);
2442
2443 let front = rb.split_to(3);
2445 assert_eq!(front.as_slice(), &[1, 2, 3]);
2446 assert_eq!(rb.as_slice(), &[4, 5]);
2447 assert_eq!(rb.len(), 2);
2448
2449 let mut rb2 = SliceRingBuffer::from(vec![10, 20]);
2451 let front2 = rb2.split_to(0);
2452 assert!(front2.is_empty());
2453 assert_eq!(rb2.as_slice(), &[10, 20]);
2454
2455 let mut rb3 = SliceRingBuffer::from(vec![10, 20]);
2457 let front3 = rb3.split_to(2);
2458 assert_eq!(front3.as_slice(), &[10, 20]);
2459 assert!(rb3.is_empty());
2460 }
2461
2462 #[test]
2463 fn test_split_to_zst() {
2464 let mut zst_rb = SliceRingBuffer::from([(); 8]);
2465 let front = zst_rb.split_to(5);
2466 assert_eq!(front.len(), 5);
2467 assert_eq!(zst_rb.len(), 3);
2468 }
2469
2470 #[test]
2471 fn test_clone() {
2472 let mut rb1 = SliceRingBuffer::with_capacity(5);
2473 rb1.extend(0..5); assert_eq!(rb1.pop_front(), Some(0));
2475 assert_eq!(rb1.pop_front(), Some(1));
2476 rb1.push_back(5); let rb2 = rb1.clone();
2479
2480 assert_eq!(rb1.as_slice(), rb2.as_slice());
2482 assert_eq!(rb1.len(), rb2.len());
2483 assert_eq!(rb1.head(), 2);
2484 assert_eq!(rb2.head(), 0);
2485
2486 rb1.push_back(100);
2488 assert_ne!(rb1.as_slice(), rb2.as_slice());
2489 assert_eq!(rb2.as_slice(), &[2, 3, 4, 5]);
2490 }
2491
2492 #[test]
2493 fn test_clone_zst() {
2494 let mut rb1: SliceRingBuffer<()> = SliceRingBuffer::with_capacity(10);
2495 rb1.extend(iter::repeat_n((), 5));
2496 let rb2 = rb1.clone();
2497
2498 assert_eq!(rb1.len(), 5);
2499 assert_eq!(rb2.len(), 5);
2500
2501 rb1.push_back(());
2502 assert_eq!(rb1.len(), 6);
2503 assert_eq!(rb2.len(), 5);
2504 }
2505
2506 #[test]
2507 fn test_shrink_to() {
2508 let mut rb = SliceRingBuffer::with_capacity(allocation_granularity() * 2);
2509 rb.extend(0..10);
2510
2511 rb.shrink_to(allocation_granularity());
2513 assert_eq!(rb.capacity(), allocation_granularity());
2514 assert_eq!(rb.as_slice(), &(0..10).collect::<Vec<_>>());
2515
2516 rb.shrink_to(allocation_granularity() * 2);
2518 assert_eq!(rb.capacity(), allocation_granularity());
2519 }
2520
2521 #[test]
2522 #[should_panic(expected = "min_capacity (5) cannot be less than current length (10)")]
2523 fn test_shrink_to_panic() {
2524 let mut rb = SliceRingBuffer::from(vec![0; 10]);
2525 rb.shrink_to(5); }
2527
2528 #[test]
2529 fn test_try_push_full() {
2530 let mut rb = SliceRingBuffer::from(vec![false; allocation_granularity()]);
2531 assert_eq!(rb.len(), allocation_granularity());
2532 assert_eq!(rb.capacity(), allocation_granularity());
2533 assert!(rb.is_full());
2534
2535 assert!(rb.try_push_back(true).is_none());
2537 assert!(rb.try_push_front(true).is_none());
2538
2539 assert_eq!(rb.len(), allocation_granularity());
2541 }
2542 #[test]
2544 fn test_rotate_full() {
2545 let mut rb = SliceRingBuffer::from(vec![false; allocation_granularity()]);
2546 unsafe {
2547 *rb.get_unchecked_mut(0) = true;
2548 }
2549 assert_eq!(rb.get(0), Some(&true));
2550 assert_eq!(rb.head(), 0);
2551 rb.rotate(-2);
2553 assert_eq!(rb.head(), allocation_granularity() - 2);
2554 assert_eq!(rb.get(2), Some(&true));
2556 }
2557
2558 #[test]
2559 fn test_eq_and_ord() {
2560 let rb1 = SliceRingBuffer::from(vec![1, 2, 3]);
2561 let mut rb2 = SliceRingBuffer::from(vec![1, 2, 3]);
2562 let rb3 = SliceRingBuffer::from(vec![1, 2, 4]);
2563 let rb4 = SliceRingBuffer::from(vec![1, 2]);
2564
2565 assert_eq!(rb1, rb2);
2567 assert_ne!(rb1, rb3);
2568 assert_ne!(rb1, rb4);
2569
2570 assert!(rb1 < rb3);
2572 assert!(rb4 < rb1);
2573
2574 rb2.pop_front();
2576 rb2.push_back(4); let rb5 = SliceRingBuffer::from(vec![2, 3, 4]); assert_eq!(rb2, rb5);
2579 }
2580
2581 #[test]
2582 fn test_append_front() {
2583 let mut rb1 = SliceRingBuffer::new();
2585 rb1.push_back(3);
2586 rb1.push_back(4);
2587 rb1.push_back(5);
2588
2589 let mut rb2 = SliceRingBuffer::new();
2590 rb2.push_back(1);
2591 rb2.push_back(2);
2592
2593 rb1.append_front(&mut rb2);
2594
2595 assert_eq!(rb1.as_slice(), &[1, 2, 3, 4, 5]);
2596 assert!(rb2.is_empty());
2597 assert_eq!(rb1.len(), 5);
2598 assert_eq!(rb2.len(), 0);
2599 }
2600
2601 #[test]
2602 fn test_append_front_zst() {
2603 let mut rb1 = SliceRingBuffer::new();
2605 rb1.push_back(());
2606 rb1.push_back(());
2607
2608 let mut rb2 = SliceRingBuffer::new();
2609 rb2.push_back(());
2610 rb2.push_back(());
2611 rb2.push_back(());
2612
2613 rb1.append_front(&mut rb2);
2614
2615 assert_eq!(rb1.len(), 5);
2616 assert_eq!(rb2.len(), 0);
2617 assert!(rb2.is_empty());
2618 }
2619
2620 #[test]
2621 fn test_append_front_empty_source() {
2622 let mut rb1 = SliceRingBuffer::new();
2624 rb1.push_back(1);
2625 rb1.push_back(2);
2626
2627 let mut rb2 = SliceRingBuffer::new();
2628
2629 rb1.append_front(&mut rb2);
2630
2631 assert_eq!(rb1.as_slice(), &[1, 2]);
2632 assert_eq!(rb1.len(), 2);
2633 assert!(rb2.is_empty());
2634 }
2635
2636 #[test]
2637 fn test_append_front_to_empty() {
2638 let mut rb1 = SliceRingBuffer::new();
2640
2641 let mut rb2 = SliceRingBuffer::new();
2642 rb2.push_back(1);
2643 rb2.push_back(2);
2644 rb2.push_back(3);
2645
2646 rb1.append_front(&mut rb2);
2647
2648 assert_eq!(rb1.as_slice(), &[1, 2, 3]);
2649 assert_eq!(rb1.len(), 3);
2650 assert!(rb2.is_empty());
2651 }
2652
2653 #[test]
2654 fn test_append_front_both_empty() {
2655 let mut rb1: SliceRingBuffer<i32> = SliceRingBuffer::new();
2657 let mut rb2: SliceRingBuffer<i32> = SliceRingBuffer::new();
2658
2659 rb1.append_front(&mut rb2);
2660
2661 assert!(rb1.is_empty());
2662 assert!(rb2.is_empty());
2663 assert_eq!(rb1.len(), 0);
2664 assert_eq!(rb2.len(), 0);
2665 }
2666
2667 #[test]
2668 fn test_append_front_with_reallocation() {
2669 let mut rb1 = SliceRingBuffer::with_capacity(2);
2671 rb1.push_back(3);
2672 rb1.push_back(4);
2673
2674 let mut rb2 = SliceRingBuffer::new();
2675 rb2.push_back(1);
2676 rb2.push_back(2);
2677
2678 rb1.append_front(&mut rb2);
2679
2680 assert_eq!(rb1.as_slice(), &[1, 2, 3, 4]);
2681 assert_eq!(rb1.len(), 4);
2682 assert!(rb2.is_empty());
2683 }
2684 #[test]
2685 fn test_append_front_wraparound() {
2686 let ag = allocation_granularity();
2688 let mut rb1 = SliceRingBuffer::with_capacity(ag + 2);
2689
2690 for _ in 0..ag {
2692 rb1.push_back(false);
2693 }
2694
2695 for _ in 0..3 {
2697 rb1.pop_front();
2698 }
2699
2700 rb1.push_back(true);
2702 rb1.push_back(true);
2703
2704 let mut rb2 = SliceRingBuffer::new();
2706 rb2.push_back(false);
2707 rb2.push_back(false);
2708
2709 rb1.append_front(&mut rb2);
2710
2711 assert!(!rb1.as_slice()[0]);
2713 assert!(!rb1.as_slice()[1]);
2714 assert!(!rb1.as_slice()[2]);
2716 assert!(rb1.as_slice()[rb1.len() - 2]);
2718 assert!(rb1.as_slice()[rb1.len() - 1]);
2719
2720 assert_eq!(rb1.len(), ag - 3 + 2 + 2); assert!(rb2.is_empty());
2722 }
2723}
2724
2725#[cfg(test)]
2726mod proptests {
2727 use super::*;
2728 use proptest::prelude::*;
2729 use std::collections::VecDeque;
2730
2731 #[derive(Debug, Clone)]
2733 enum Op<T> {
2734 PushBack(T),
2735 PopBack,
2736 PushFront(T),
2737 PopFront,
2738 Insert(usize, T),
2739 Remove(usize),
2740 Clear,
2741 }
2742
2743 fn arb_ops<T: Arbitrary + Clone + 'static>() -> impl Strategy<Value = Vec<Op<T>>> {
2746 prop::collection::vec(arb_op(), 0..100)
2747 }
2748
2749 fn arb_op<T: Arbitrary + Clone + 'static>() -> impl Strategy<Value = Op<T>> {
2751 prop_oneof![
2752 any::<T>().prop_map(Op::PushBack),
2753 Just(Op::PopBack),
2754 any::<T>().prop_map(Op::PushFront),
2755 Just(Op::PopFront),
2756 (any::<usize>(), any::<T>()).prop_map(|(i, v)| Op::Insert(i, v)),
2757 any::<usize>().prop_map(Op::Remove),
2758 Just(Op::Clear),
2759 ]
2760 }
2761
2762 proptest! {
2763 #[test]
2764 fn model_based_test(ops in arb_ops::<i32>()) {
2765 let mut srb = SliceRingBuffer::new();
2766 let mut model = VecDeque::new();
2767
2768 for op in ops {
2769 match &op {
2772 Op::PushBack(_) | Op::PushFront(_) | Op::Insert(_, _) => {
2773 if srb.len() + 1 > SliceRingBuffer::<i32>::MAX_CAPACITY {
2774 continue;
2775 }
2776 }
2777 _ => {}
2778 }
2779
2780 match op {
2782 Op::PushBack(v) => {
2783 srb.push_back(v);
2784 model.push_back(v);
2785 },
2786 Op::PopBack => {
2787 let res_srb = srb.pop_back();
2788 let res_model = model.pop_back();
2789 prop_assert_eq!(res_srb, res_model);
2790 },
2791 Op::PushFront(v) => {
2792 srb.push_front(v);
2793 model.push_front(v);
2794 },
2795 Op::PopFront => {
2796 let res_srb = srb.pop_front();
2797 let res_model = model.pop_front();
2798 prop_assert_eq!(res_srb, res_model);
2799 },
2800 Op::Insert(idx, v) => {
2801 let len = model.len();
2802 let a_idx = if len == 0 { 0 } else { idx % (len + 1) };
2804 srb.insert(a_idx, v);
2805 model.insert(a_idx, v);
2806 },
2807 Op::Remove(idx) => {
2808 if !model.is_empty() {
2809 let len = model.len();
2810 let a_idx = idx % len;
2812 let res_srb = srb.remove(a_idx);
2813 let res_model = model.remove(a_idx).unwrap();
2814 prop_assert_eq!(res_srb, res_model);
2815 }
2816 },
2817 Op::Clear => {
2818 srb.clear();
2819 model.clear();
2820 }
2821 }
2822
2823 prop_assert_eq!(srb.len(), model.len());
2826 prop_assert_eq!(srb.is_empty(), model.is_empty());
2827 prop_assert_eq!(srb.as_slice(), model.make_contiguous());
2829 prop_assert_eq!(srb.front(), model.front());
2830 prop_assert_eq!(srb.back(), model.back());
2831 }
2832 }
2833 }
2834}