1#![cfg_attr(
122 feature = "unstable",
123 feature(
124 core_intrinsics,
125 exact_size_is_empty,
126 dropck_eyepatch,
127 trusted_len,
128 ptr_wrapping_offset_from,
129 specialization,
130 )
131)]
132#![cfg_attr(all(test, feature = "unstable"), feature(box_syntax))]
133#![allow(
134 clippy::len_without_is_empty,
135 clippy::shadow_reuse,
136 clippy::cast_possible_wrap,
137 clippy::cast_sign_loss,
138 clippy::cast_possible_truncation,
139 clippy::inline_always,
140 clippy::indexing_slicing
141)]
142#![cfg_attr(not(any(feature = "use_std", test)), no_std)]
143
144#[macro_use]
145mod macros;
146
147#[cfg(any(feature = "use_std", test))]
148extern crate core;
149
150#[cfg(all(
151 any(target_os = "macos", target_os = "ios"),
152 not(feature = "unix_sysv")
153))]
154extern crate mach2 as mach;
155
156#[cfg(unix)]
157extern crate libc;
158
159#[cfg(target_os = "windows")]
160extern crate winapi;
161
162#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
163extern crate bytes;
164
165mod mirrored;
166pub use mirrored::{AllocError, Buffer};
167
168#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
169use std::io;
170
171use core::{cmp, convert, fmt, hash, iter, mem, ops, ptr, slice, str};
172
173use core::ptr::NonNull;
174
175#[cfg(feature = "unstable")]
176use core::intrinsics;
177
178#[cfg(not(feature = "unstable"))]
180mod intrinsics {
181 #[inline(always)]
183 pub unsafe fn unlikely<T>(x: T) -> T {
184 x
185 }
186
187 #[inline(always)]
189 pub unsafe fn assume<T>(x: T) -> T {
190 x
191 }
192
193 #[inline(always)]
196 pub unsafe fn arith_offset<T>(dst: *const T, offset: isize) -> *const T {
197 let r = if offset >= 0 {
198 (dst as usize).wrapping_add(offset as usize)
199 } else {
200 (dst as usize).wrapping_sub((-offset) as usize)
201 };
202 r as *const T
203 }
204}
205
206trait WrappingOffsetFrom {
208 fn wrapping_offset_from_(self, other: Self) -> Option<isize>;
210}
211
212#[cfg(not(feature = "unstable"))]
213impl<T: Sized> WrappingOffsetFrom for *const T {
214 #[inline(always)]
215 fn wrapping_offset_from_(self, other: Self) -> Option<isize>
216 where
217 T: Sized,
218 {
219 let size = mem::size_of::<T>();
220 if size == 0 {
221 None
222 } else {
223 let diff = (other as isize).wrapping_sub(self as isize);
224 Some(diff / size as isize)
225 }
226 }
227}
228
229#[cfg(feature = "unstable")]
230impl<T: Sized> WrappingOffsetFrom for *const T {
231 #[inline(always)]
232 fn wrapping_offset_from_(self, other: Self) -> Option<isize>
233 where
234 T: Sized,
235 {
236 let size = mem::size_of::<T>();
237 if size == 0 {
238 None
239 } else {
240 Some(other.wrapping_offset_from(self))
241 }
242 }
243}
244
245fn in_bounds<T>(s: &[T], p: *mut T) -> bool {
250 let p = p as usize;
251 let s_begin = s.as_ptr() as usize;
252 let s_end = s_begin + mem::size_of::<T>() * s.len();
253 s_begin <= p && p < s_end
254}
255
256unsafe fn nonnull_raw_slice<T>(ptr: *mut T, len: usize) -> NonNull<[T]> {
257 NonNull::new_unchecked(slice::from_raw_parts_mut(ptr, len))
258}
259
260pub struct SliceRingBuffer<T> {
264 elems_: NonNull<[T]>,
266 buf: Buffer<T>,
268}
269
270unsafe impl<T> Send for SliceRingBuffer<T> where T: Send {}
272unsafe impl<T> Sync for SliceRingBuffer<T> where T: Sync {}
274
275#[doc(hidden)]
277pub use mem::forget as __mem_forget;
278
279#[macro_export]
333macro_rules! sdeq {
334 ($elem:expr; $n:expr) => (
335 {
336 let mut deq = $crate::SliceRingBuffer::with_capacity($n);
337 deq.resize($n, $elem);
338 deq
339 }
340 );
341 () => ( $crate::SliceRingBuffer::new() );
342 ($($x:expr),*) => (
343 {
344 unsafe {
345 let array = [$($x),*];
346 let deq = $crate::SliceRingBuffer::steal_from_slice(&array);
347 #[allow(clippy::forget_copy)]
348 $crate::__mem_forget(array);
349 deq
350 }
351 }
352 );
353 ($($x:expr,)*) => (sdeq![$($x),*])
354}
355
356impl<T> SliceRingBuffer<T> {
357 #[inline]
367 pub fn new() -> Self {
368 unsafe {
369 let buf = Buffer::new();
370 Self {
371 elems_: nonnull_raw_slice(buf.ptr(), 0),
372 buf,
373 }
374 }
375 }
376
377 #[inline]
383 pub unsafe fn from_raw_parts(
384 ptr: *mut T, capacity: usize, elems: &mut [T],
385 ) -> Self {
386 let begin = elems.as_mut_ptr();
387 debug_assert!(in_bounds(slice::from_raw_parts(ptr, capacity), begin));
388 debug_assert!(elems.len() < capacity);
389
390 Self {
391 elems_: NonNull::new_unchecked(elems),
392 buf: Buffer::from_raw_parts(ptr, capacity * 2),
393 }
394 }
395
396 #[inline]
406 pub fn with_capacity(n: usize) -> Self {
407 unsafe {
408 let buf = Buffer::uninitialized(2 * n).unwrap_or_else(|e| {
409 let s = tiny_str!(
410 "failed to allocate a buffer with capacity \"{}\" due to \"{:?}\"",
411 n, e
412 );
413 panic!("{}", s.as_str())
414 });
415 Self {
416 elems_: nonnull_raw_slice(buf.ptr(), 0),
417 buf,
418 }
419 }
420 }
421
422 #[inline]
434 pub fn capacity(&self) -> usize {
435 self.buf.len() / 2
438 }
439
440 #[inline]
452 pub fn len(&self) -> usize {
453 let l = self.as_slice().len();
454 debug_assert!(l <= self.capacity());
455 l
456 }
457
458 #[inline]
469 pub fn is_full(&self) -> bool {
470 self.len() == self.capacity()
471 }
472
473 #[inline]
475 pub fn as_slice(&self) -> &[T] {
476 unsafe { self.elems_.as_ref() }
477 }
478
479 #[inline]
481 pub fn as_mut_slice(&mut self) -> &mut [T] {
482 unsafe { self.elems_.as_mut() }
483 }
484
485 #[inline]
488 pub fn as_slices(&self) -> (&[T], &[T]) {
489 let left = self.as_slice();
490 let right = &[];
491 (left, right)
492 }
493
494 #[inline]
497 pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
498 let left = self.as_mut_slice();
499 let right = &mut [];
500 (left, right)
501 }
502
503 pub unsafe fn tail_head_slice(&mut self) -> &mut [mem::MaybeUninit<T>] {
531 let ptr = self.as_mut_slice().as_mut_ptr().add(self.len());
532 slice::from_raw_parts_mut(ptr as _, self.capacity() - self.len())
533 }
534
535 #[inline]
545 pub fn try_reserve(
546 &mut self, additional: usize,
547 ) -> Result<(), AllocError> {
548 let old_len = self.len();
549 let new_cap = self.grow_policy(additional);
550 self.reserve_capacity(new_cap)?;
551 debug_assert!(self.capacity() >= old_len + additional);
552 Ok(())
553 }
554
555 #[inline]
564 pub fn reserve(&mut self, additional: usize) {
565 self.try_reserve(additional).unwrap();
566 }
567
568 #[inline]
571 fn reserve_capacity(
572 &mut self, new_capacity: usize,
573 ) -> Result<(), AllocError> {
574 unsafe {
575 if new_capacity <= self.capacity() {
576 return Ok(());
577 }
578
579 let mut new_buffer = Buffer::uninitialized(2 * new_capacity)?;
580 debug_assert!(new_buffer.len() >= 2 * new_capacity);
581
582 let len = self.len();
583 {
586 let from_ptr = self.as_mut_ptr();
587 let to_ptr = new_buffer.as_mut_slice().as_mut_ptr();
588 crate::ptr::copy_nonoverlapping(from_ptr, to_ptr, len);
589 }
590
591 crate::mem::swap(&mut self.buf, &mut new_buffer);
593
594 self.elems_ = nonnull_raw_slice(self.buf.ptr(), len);
597 Ok(())
598 }
599 }
600
601 #[inline]
625 pub fn reserve_exact(&mut self, additional: usize) {
626 let old_len = self.len();
627 let new_cap = old_len.checked_add(additional).expect("overflow");
628 self.reserve_capacity(new_cap).unwrap();
629 debug_assert!(self.capacity() >= old_len + additional);
630 }
631
632 #[inline]
635 fn grow_policy(&self, additional: usize) -> usize {
636 let cur_cap = self.capacity();
637 let old_len = self.len();
638 let req_cap = old_len.checked_add(additional).expect("overflow");
639 if req_cap > cur_cap {
640 let dbl_cap = cur_cap.saturating_mul(2);
641 cmp::max(req_cap, dbl_cap)
642 } else {
643 req_cap
644 }
645 }
646
647 #[inline]
661 pub unsafe fn move_head_unchecked(&mut self, x: isize) {
662 let cap = self.capacity();
663 let len = self.len();
664 debug_assert!(x >= -((cap - len) as isize));
666 debug_assert!(x <= len as isize);
667
668 let mut new_begin = self.as_mut_ptr().offset(x) as usize;
670
671 let first_region_begin = self.buf.ptr() as usize;
673 let region_size = Buffer::<T>::size_in_bytes(self.buf.len()) / 2;
674 debug_assert!(cap * mem::size_of::<T>() <= region_size);
675 let second_region_begin = first_region_begin + region_size;
676
677 if new_begin < first_region_begin {
680 new_begin += region_size;
681 } else if new_begin >= second_region_begin {
682 debug_assert!(new_begin < second_region_begin + region_size);
684 new_begin -= region_size;
685 }
686 debug_assert!(new_begin >= first_region_begin);
687 debug_assert!(new_begin < second_region_begin);
688
689 let new_begin = new_begin as *mut T;
691 debug_assert!(in_bounds(
692 slice::from_raw_parts(self.buf.ptr() as *mut u8, region_size),
693 new_begin as *mut u8
694 ));
695
696 let new_len = len as isize - x;
697 debug_assert!(
698 new_len >= 0,
699 "len = {}, x = {}, new_len = {}",
700 len,
701 x,
702 new_len
703 );
704 debug_assert!(new_len <= cap as isize);
705 self.elems_ = nonnull_raw_slice(new_begin, new_len as usize);
706 }
707
708 #[inline]
720 pub unsafe fn move_head(&mut self, x: isize) {
721 assert!(
722 x >= -((self.capacity() - self.len()) as isize)
723 && x <= self.len() as isize
724 );
725 self.move_head_unchecked(x)
726 }
727
728 #[inline]
742 pub unsafe fn move_tail_unchecked(&mut self, x: isize) {
743 let len = self.len();
745 let cap = self.capacity();
746 debug_assert!(x >= -(len as isize));
747 debug_assert!(x <= (cap - len) as isize);
748
749 let new_len = len as isize + x;
750 debug_assert!(new_len >= 0);
751 debug_assert!(new_len <= cap as isize);
752
753 self.elems_ = nonnull_raw_slice(self.as_mut_ptr(), new_len as usize);
754 }
755
756 #[inline]
768 pub unsafe fn move_tail(&mut self, x: isize) {
769 assert!(
770 x >= -(self.len() as isize)
771 && x <= (self.capacity() - self.len()) as isize
772 );
773 self.move_tail_unchecked(x);
774 }
775
776 #[inline]
778 unsafe fn append_elements(&mut self, other: *const [T]) {
779 let count = (*other).len();
780 if count > 0 {
782 self.reserve(count);
783 let len = self.len();
784 self.move_tail_unchecked(count as isize);
785 ptr::copy_nonoverlapping(
786 other as *const T,
787 self.get_unchecked_mut(len),
788 count,
789 );
790 }
791 }
792
793 pub unsafe fn steal_from_slice(s: &[T]) -> Self {
796 let mut deq = Self::new();
797 deq.append_elements(s as *const _);
798 deq
799 }
800
801 #[inline]
821 pub fn append(&mut self, other: &mut Self) {
822 unsafe {
823 self.append_elements(other.as_slice() as _);
824 other.elems_ = nonnull_raw_slice(other.buf.ptr(), 0);
825 }
826 }
827
828 #[inline]
845 pub fn front(&self) -> Option<&T> {
846 self.get(0)
847 }
848
849 #[inline]
866 pub fn front_mut(&mut self) -> Option<&mut T> {
867 self.get_mut(0)
868 }
869
870 #[inline]
887 pub fn back(&self) -> Option<&T> {
888 let last_idx = self.len().wrapping_sub(1);
889 self.get(last_idx)
890 }
891
892 #[inline]
909 pub fn back_mut(&mut self) -> Option<&mut T> {
910 let last_idx = self.len().wrapping_sub(1);
911 self.get_mut(last_idx)
912 }
913
914 #[inline]
926 pub fn try_push_front(&mut self, value: T) -> Result<(), (T, AllocError)> {
927 unsafe {
928 if intrinsics::unlikely(self.is_full()) {
929 if let Err(e) = self.try_reserve(1) {
930 return Err((value, e));
931 }
932 }
933
934 self.move_head_unchecked(-1);
935 ptr::write(self.get_mut(0).unwrap(), value);
936 Ok(())
937 }
938 }
939
940 #[inline]
956 pub fn push_front(&mut self, value: T) {
957 if let Err(e) = self.try_push_front(value) {
958 panic!("{:?}", e.1);
959 }
960 }
961
962 #[inline]
974 pub fn try_push_back(&mut self, value: T) -> Result<(), (T, AllocError)> {
975 unsafe {
976 if intrinsics::unlikely(self.is_full()) {
977 if let Err(e) = self.try_reserve(1) {
978 return Err((value, e));
979 }
980 }
981 self.move_tail_unchecked(1);
982 let len = self.len();
983 ptr::write(self.get_mut(len - 1).unwrap(), value);
984 Ok(())
985 }
986 }
987
988 #[inline]
1004 pub fn push_back(&mut self, value: T) {
1005 if let Err(e) = self.try_push_back(value) {
1006 panic!("{:?}", e.1);
1007 }
1008 }
1009
1010 #[inline]
1028 pub fn pop_front(&mut self) -> Option<T> {
1029 unsafe {
1030 let v = match self.get_mut(0) {
1031 None => return None,
1032 Some(v) => ptr::read(v),
1033 };
1034 self.move_head_unchecked(1);
1035 Some(v)
1036 }
1037 }
1038
1039 #[inline]
1057 pub fn pop_back(&mut self) -> Option<T> {
1058 unsafe {
1059 let len = self.len();
1060 let v = match self.get_mut(len.wrapping_sub(1)) {
1061 None => return None,
1062 Some(v) => ptr::read(v),
1063 };
1064 self.move_tail_unchecked(-1);
1065 Some(v)
1066 }
1067 }
1068
1069 #[inline]
1088 pub fn shrink_to_fit(&mut self) {
1089 if unsafe { intrinsics::unlikely(self.is_empty()) } {
1090 return;
1091 }
1092
1093 let mut new_sdeq = Self::with_capacity(self.len());
1096 if new_sdeq.capacity() < self.capacity() {
1097 unsafe {
1098 crate::ptr::copy_nonoverlapping(
1099 self.as_mut_ptr(),
1100 new_sdeq.as_mut_ptr(),
1101 self.len(),
1102 );
1103 new_sdeq.elems_ =
1104 nonnull_raw_slice(new_sdeq.buf.ptr(), self.len());
1105 mem::swap(self, &mut new_sdeq);
1106 }
1107 }
1108 }
1109
1110 #[inline]
1128 pub fn truncate_back(&mut self, len: usize) {
1129 unsafe {
1130 if len >= self.len() {
1131 return;
1132 }
1133
1134 let diff = self.len() - len;
1135 let s = &mut self[len..] as *mut [_];
1136 self.move_tail(-(diff as isize));
1139 ptr::drop_in_place(&mut *s);
1140 debug_assert_eq!(self.len(), len);
1141 }
1142 }
1143
1144 #[inline]
1149 pub fn truncate(&mut self, len: usize) {
1150 self.truncate_back(len);
1151 }
1152
1153 #[inline]
1171 pub fn truncate_front(&mut self, len: usize) {
1172 unsafe {
1173 if len >= self.len() {
1174 return;
1175 }
1176
1177 let diff = self.len() - len;
1178 let s = &mut self[..diff] as *mut [_];
1179 self.move_head(diff as isize);
1182 ptr::drop_in_place(&mut *s);
1183 debug_assert_eq!(self.len(), len);
1184 }
1185 }
1186
1187 #[inline]
1218 #[allow(clippy::needless_pass_by_value)]
1219 pub fn drain<R>(&mut self, range: R) -> Drain<T>
1220 where
1221 R: ops::RangeBounds<usize>,
1222 {
1223 use ops::Bound::{Excluded, Included, Unbounded};
1224 let len = self.len();
1236 let start = match range.start_bound() {
1237 Included(&n) => n,
1238 Excluded(&n) => n + 1,
1239 Unbounded => 0,
1240 };
1241 let end = match range.end_bound() {
1242 Included(&n) => n + 1,
1243 Excluded(&n) => n,
1244 Unbounded => len,
1245 };
1246 assert!(start <= end);
1247 assert!(end <= len);
1248
1249 unsafe {
1250 self.elems_ = nonnull_raw_slice(self.as_mut_ptr(), start);
1253 let range_slice = slice::from_raw_parts_mut(
1256 if mem::size_of::<T>() == 0 {
1257 intrinsics::arith_offset(
1258 self.as_mut_ptr() as *mut i8,
1259 start as _,
1260 ) as *mut _
1261 } else {
1262 self.as_mut_ptr().add(start)
1263 },
1264 end - start,
1265 );
1266 Drain {
1267 tail_start: end,
1268 tail_len: len - end,
1269 iter: range_slice.iter(),
1270 deq: NonNull::from(self),
1271 }
1272 }
1273 }
1274
1275 #[inline]
1290 pub fn clear(&mut self) {
1291 self.truncate(0);
1292 }
1293
1294 #[inline]
1310 pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
1311 let len = self.len();
1312 if self.is_empty() {
1313 None
1314 } else {
1315 self.swap(index, len - 1);
1316 self.pop_back()
1317 }
1318 }
1319
1320 #[inline]
1336 pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
1337 if self.is_empty() {
1338 None
1339 } else {
1340 self.swap(index, 0);
1341 self.pop_front()
1342 }
1343 }
1344
1345 #[inline]
1368 pub fn insert(&mut self, index: usize, element: T) {
1369 unsafe {
1370 let len = self.len();
1371 assert!(index <= len);
1372
1373 if intrinsics::unlikely(self.is_full()) {
1374 self.reserve(1);
1375 }
1380
1381 let p = if index > self.len() / 2 {
1382 let p = self.as_mut_ptr().add(index);
1383 ptr::copy(p, p.add(1), len - index);
1385 self.move_tail_unchecked(1);
1386 p
1387 } else {
1388 self.move_head_unchecked(-1);
1390 let p = self.as_mut_ptr().add(index);
1391 ptr::copy(p, p.sub(1), index);
1392 p
1393 };
1394 ptr::write(p, element); }
1396 }
1397
1398 #[inline]
1416 #[allow(clippy::shadow_unrelated)] pub fn remove(&mut self, index: usize) -> T {
1418 let len = self.len();
1419 assert!(index < len);
1420 unsafe {
1421 let ptr = self.as_mut_ptr().add(index);
1423 let ret = ptr::read(ptr);
1424 if index > self.len() / 2 {
1425 ptr::copy(ptr.add(1), ptr, len - index - 1);
1428 self.move_tail_unchecked(-1);
1429 } else {
1430 let ptr = self.as_mut_ptr();
1433 ptr::copy(ptr, ptr.add(1), index);
1434 self.move_head_unchecked(1);
1435 }
1436
1437 ret
1438 }
1439 }
1440
1441 #[inline]
1465 pub fn split_off(&mut self, at: usize) -> Self {
1466 assert!(at <= self.len(), "`at` out of bounds");
1467
1468 let other_len = self.len() - at;
1469 let mut other = Self::with_capacity(other_len);
1470
1471 unsafe {
1472 self.move_tail_unchecked(-(other_len as isize));
1473 other.move_tail_unchecked(other_len as isize);
1474
1475 ptr::copy_nonoverlapping(
1476 self.as_ptr().add(at),
1477 other.as_mut_ptr(),
1478 other.len(),
1479 );
1480 }
1481 other
1482 }
1483
1484 #[inline]
1502 pub fn retain<F>(&mut self, mut f: F)
1503 where
1504 F: FnMut(&T) -> bool,
1505 {
1506 let len = self.len();
1507 let mut del = 0;
1508 {
1509 let v = &mut **self;
1510
1511 for i in 0..len {
1512 if !f(&v[i]) {
1513 del += 1;
1514 } else if del > 0 {
1515 v.swap(i - del, i);
1516 }
1517 }
1518 }
1519 if del > 0 {
1520 self.truncate(len - del);
1521 }
1522 }
1523
1524 #[inline]
1542 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1543 where
1544 F: FnMut(&mut T) -> K,
1545 K: PartialEq,
1546 {
1547 self.dedup_by(|a, b| key(a) == key(b))
1548 }
1549
1550 #[inline]
1575 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1576 where
1577 F: FnMut(&mut T, &mut T) -> bool,
1578 {
1579 unsafe {
1580 let ln = self.len();
1638 if intrinsics::unlikely(ln <= 1) {
1639 return;
1640 }
1641
1642 let p = self.as_mut_ptr();
1644 let mut r: usize = 1;
1645 let mut w: usize = 1;
1646
1647 while r < ln {
1648 let p_r = p.add(r);
1649 let p_wm1 = p.add(w - 1);
1650 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1651 if r != w {
1652 let p_w = p_wm1.add(1);
1653 mem::swap(&mut *p_r, &mut *p_w);
1654 }
1655 w += 1;
1656 }
1657 r += 1;
1658 }
1659
1660 self.truncate(w);
1661 }
1662 }
1663
1664 #[inline]
1666 fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, value: E) {
1667 self.reserve(n);
1668
1669 unsafe {
1670 let mut ptr = self.as_mut_ptr().add(self.len());
1671
1672 for _ in 1..n {
1674 ptr::write(ptr, value.next());
1675 ptr = ptr.add(1);
1676 self.move_tail_unchecked(1);
1678 }
1679
1680 if n > 0 {
1681 ptr::write(ptr, value.last());
1684 self.move_tail_unchecked(1);
1685 }
1686
1687 }
1689 }
1690
1691 #[inline]
1699 fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
1700 #[allow(clippy::while_let_on_iterator)]
1701 while let Some(element) = iterator.next() {
1702 let len = self.len();
1703 let cap = self.capacity();
1704 if len == cap {
1705 let (lower, upper) = iterator.size_hint();
1706 let additional_cap = if let Some(upper) = upper {
1707 upper
1708 } else {
1709 lower
1710 }
1711 .checked_add(1)
1712 .expect("overflow");
1713 self.reserve(additional_cap);
1714 }
1715 debug_assert!(self.len() < self.capacity());
1716 unsafe {
1717 self.move_tail_unchecked(1);
1720 ptr::write(self.get_unchecked_mut(len), element);
1721 }
1722 }
1723 }
1724
1725 #[inline]
1766 pub fn splice<R, I>(
1767 &mut self, range: R, replace_with: I,
1768 ) -> Splice<I::IntoIter>
1769 where
1770 R: ops::RangeBounds<usize>,
1771 I: IntoIterator<Item = T>,
1772 {
1773 Splice {
1774 drain: self.drain(range),
1775 replace_with: replace_with.into_iter(),
1776 }
1777 }
1778
1779 #[inline]
1839 pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
1840 where
1841 F: FnMut(&mut T) -> bool,
1842 {
1843 let old_len = self.len();
1844
1845 unsafe {
1847 self.move_tail_unchecked(-(old_len as isize));
1848 }
1849
1850 DrainFilter {
1851 deq: self,
1852 idx: 0,
1853 del: 0,
1854 old_len,
1855 pred: filter,
1856 }
1857 }
1858}
1859
1860impl<T> SliceRingBuffer<T>
1861where
1862 T: Clone,
1863{
1864 #[inline]
1884 pub fn extend_from_slice(&mut self, other: &[T]) {
1885 #[cfg(feature = "unstable")]
1886 {
1887 self.spec_extend(other.iter())
1888 }
1889 #[cfg(not(feature = "unstable"))]
1890 {
1891 self.reserve(other.len());
1892 unsafe {
1893 let len = self.len();
1894 self.move_tail_unchecked(other.len() as isize);
1895 self.get_unchecked_mut(len..).clone_from_slice(other);
1896 }
1897 }
1898 }
1899
1900 #[inline]
1921 pub fn resize(&mut self, new_len: usize, value: T) {
1922 let len = self.len();
1923
1924 if new_len > len {
1925 self.reserve(new_len - len);
1926 while self.len() < new_len {
1927 self.push_back(value.clone());
1928 }
1929 } else {
1930 self.truncate(new_len);
1931 }
1932 debug_assert!(self.len() == new_len);
1933 }
1934}
1935
1936impl<T: Default> SliceRingBuffer<T> {
1937 #[inline]
1964 pub fn resize_default(&mut self, new_len: usize) {
1965 let len = self.len();
1966
1967 if new_len > len {
1968 self.extend_with(new_len - len, ExtendDefault);
1969 } else {
1970 self.truncate(new_len);
1971 }
1972 }
1973}
1974
1975impl<T: PartialEq> SliceRingBuffer<T> {
1976 #[inline]
1999 pub fn dedup(&mut self) {
2000 self.dedup_by(|a, b| a == b)
2001 }
2002
2003 #[inline]
2020 pub fn remove_item(&mut self, item: &T) -> Option<T> {
2021 let pos = match self.iter().position(|x| *x == *item) {
2022 Some(x) => x,
2023 None => return None,
2024 };
2025 Some(self.remove(pos))
2026 }
2027}
2028
2029impl<T: fmt::Debug> fmt::Debug for SliceRingBuffer<T> {
2030 fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
2031 write!(f, "{:?}", self.as_slice())
2032 }
2045}
2046
2047impl<T> Drop for SliceRingBuffer<T> {
2048 #[inline]
2049 fn drop(&mut self) {
2050 unsafe {
2054 ptr::drop_in_place(&mut self[..]);
2056 }
2057 }
2059}
2060
2061impl<T> ops::Deref for SliceRingBuffer<T> {
2062 type Target = [T];
2063 #[inline]
2064 fn deref(&self) -> &Self::Target {
2065 self.as_slice()
2066 }
2067}
2068
2069impl<T> ops::DerefMut for SliceRingBuffer<T> {
2070 #[inline]
2071 fn deref_mut(&mut self) -> &mut Self::Target {
2072 self.as_mut_slice()
2073 }
2074}
2075
2076impl<T> Default for SliceRingBuffer<T> {
2077 #[inline]
2078 fn default() -> Self {
2079 Self::new()
2080 }
2081}
2082
2083impl<T: Clone> Clone for SliceRingBuffer<T> {
2084 #[inline]
2085 fn clone(&self) -> Self {
2086 let mut new = Self::with_capacity(self.len());
2087 for i in self.iter() {
2088 new.push_back(i.clone());
2089 }
2090 new
2091 }
2092 #[inline]
2093 fn clone_from(&mut self, other: &Self) {
2094 self.clear();
2095 for i in other.iter() {
2096 self.push_back(i.clone());
2097 }
2098 }
2099}
2100
2101impl<'a, T: Clone> From<&'a [T]> for SliceRingBuffer<T> {
2102 #[inline]
2103 fn from(s: &'a [T]) -> Self {
2104 let mut new = Self::with_capacity(s.len());
2105 for i in s {
2106 new.push_back(i.clone());
2107 }
2108 new
2109 }
2110}
2111
2112impl<'a, T: Clone> From<&'a mut [T]> for SliceRingBuffer<T> {
2113 #[inline]
2114 fn from(s: &'a mut [T]) -> Self {
2115 let mut new = Self::with_capacity(s.len());
2116 for i in s {
2117 new.push_back(i.clone());
2118 }
2119 new
2120 }
2121}
2122
2123impl<T: hash::Hash> hash::Hash for SliceRingBuffer<T> {
2124 #[inline]
2125 fn hash<H: hash::Hasher>(&self, state: &mut H) {
2126 hash::Hash::hash(&**self, state)
2127 }
2128}
2129
2130macro_rules! __impl_slice_eq1 {
2134 ($Lhs:ty, $Rhs:ty) => {
2135 __impl_slice_eq1! { $Lhs, $Rhs, Sized }
2136 };
2137 ($Lhs:ty, $Rhs:ty, $Bound:ident) => {
2138 impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs
2139 where
2140 A: PartialEq<B>,
2141 {
2142 #[inline]
2143 fn eq(&self, other: &$Rhs) -> bool {
2144 self[..] == other[..]
2145 }
2146 }
2147 };
2148}
2149
2150__impl_slice_eq1! { SliceRingBuffer<A>, SliceRingBuffer<B> }
2151__impl_slice_eq1! { SliceRingBuffer<A>, &'b [B] }
2152__impl_slice_eq1! { SliceRingBuffer<A>, &'b mut [B] }
2153
2154#[cfg(feature = "use_std")]
2155__impl_slice_eq1! { SliceRingBuffer<A>, Vec<B> }
2156
2157macro_rules! array_impls {
2158 ($($N: expr)+) => {
2159 $(
2160 __impl_slice_eq1! { SliceRingBuffer<A>, [B; $N] }
2162 __impl_slice_eq1! { SliceRingBuffer<A>, &'b [B; $N] }
2163 )+
2164 }
2165}
2166
2167array_impls! {
2168 0 1 2 3 4 5 6 7 8 9
2169 10 11 12 13 14 15 16 17 18 19
2170 20 21 22 23 24 25 26 27 28 29
2171 30 31 32
2172}
2173
2174impl<T: Eq> Eq for SliceRingBuffer<T> {}
2177
2178impl<T: PartialOrd> PartialOrd for SliceRingBuffer<T> {
2179 #[inline]
2180 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
2181 PartialOrd::partial_cmp(&**self, &**other)
2182 }
2183}
2184
2185impl<'a, T: PartialOrd> PartialOrd<&'a [T]> for SliceRingBuffer<T> {
2186 #[inline]
2187 fn partial_cmp(&self, other: &&'a [T]) -> Option<cmp::Ordering> {
2188 PartialOrd::partial_cmp(&**self, other)
2189 }
2190}
2191
2192pub struct Drain<'a, T: 'a> {
2199 tail_start: usize,
2201 tail_len: usize,
2203 iter: slice::Iter<'a, T>,
2205 deq: NonNull<SliceRingBuffer<T>>,
2207}
2208
2209impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> {
2210 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2211 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
2212 }
2213}
2214
2215unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
2216unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
2217
2218impl<'a, T> Iterator for Drain<'a, T> {
2219 type Item = T;
2220
2221 #[inline]
2222 fn next(&mut self) -> Option<T> {
2223 self.iter
2224 .next()
2225 .map(|elt| unsafe { ptr::read(elt as *const _) })
2226 }
2227 #[inline]
2228 fn size_hint(&self) -> (usize, Option<usize>) {
2229 self.iter.size_hint()
2230 }
2231}
2232
2233impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
2234 #[inline]
2235 fn next_back(&mut self) -> Option<T> {
2236 self.iter
2237 .next_back()
2238 .map(|elt| unsafe { ptr::read(elt as *const _) })
2239 }
2240}
2241
2242impl<'a, T> Drop for Drain<'a, T> {
2243 #[inline]
2244 fn drop(&mut self) {
2245 self.for_each(|_| {});
2247
2248 if self.tail_len > 0 {
2249 unsafe {
2250 let source_deq = self.deq.as_mut();
2251 let start = source_deq.len();
2253 let tail = self.tail_start;
2254 let src = source_deq.as_ptr().add(tail);
2255 let dst = source_deq.as_mut_ptr().add(start);
2256 ptr::copy(src, dst, self.tail_len);
2257 source_deq.move_tail_unchecked(self.tail_len as isize);
2258 }
2259 }
2260 }
2261}
2262
2263#[cfg(feature = "unstable")]
2264impl<'a, T> ExactSizeIterator for Drain<'a, T> {
2265 #[inline]
2266 fn is_empty(&self) -> bool {
2267 self.iter.is_empty()
2268 }
2269}
2270
2271#[cfg(feature = "unstable")]
2272impl<'a, T> iter::FusedIterator for Drain<'a, T> {}
2273
2274pub struct IntoIter<T> {
2282 buf: NonNull<T>,
2284 cap: usize,
2286 ptr: *const T,
2288 end: *const T,
2290}
2291
2292impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
2293 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2294 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
2295 }
2296}
2297
2298impl<T> IntoIter<T> {
2299 #[cfg(feature = "unstable")]
2301 #[allow(clippy::option_unwrap_used)]
2302 #[inline]
2303 fn elems(&mut self) -> &mut [T] {
2304 unsafe {
2305 slice::from_raw_parts_mut(
2306 self.ptr as *mut _,
2307 (self.end as usize - self.ptr as usize) / mem::size_of::<T>(),
2308 )
2309 }
2310 }
2311
2312 #[inline]
2328 pub fn as_slice(&self) -> &[T] {
2329 unsafe { slice::from_raw_parts(self.ptr, self.size_hint().0) }
2330 }
2331
2332 #[inline]
2350 pub fn as_mut_slice(&mut self) -> &mut [T] {
2351 unsafe {
2352 slice::from_raw_parts_mut(self.ptr as *mut T, self.size_hint().0)
2353 }
2354 }
2355}
2356
2357unsafe impl<T: Send> Send for IntoIter<T> {}
2358unsafe impl<T: Sync> Sync for IntoIter<T> {}
2359
2360impl<T> Iterator for IntoIter<T> {
2361 type Item = T;
2362
2363 #[inline]
2364 fn next(&mut self) -> Option<T> {
2365 unsafe {
2366 if self.ptr as *const _ == self.end {
2367 None
2368 } else if mem::size_of::<T>() == 0 {
2369 self.ptr = intrinsics::arith_offset(self.ptr as *const i8, 1)
2373 as *mut T;
2374
2375 Some(ptr::read(1 as *mut T))
2378 } else {
2379 let old = self.ptr;
2380 self.ptr = self.ptr.add(1);
2381
2382 Some(ptr::read(old))
2383 }
2384 }
2385 }
2386
2387 #[inline]
2388 fn size_hint(&self) -> (usize, Option<usize>) {
2389 let exact = match self.ptr.wrapping_offset_from_(self.end) {
2390 Some(x) => x as usize,
2391 None => (self.end as usize).wrapping_sub(self.ptr as usize),
2392 };
2393 (exact, Some(exact))
2394 }
2395
2396 #[inline]
2397 fn count(self) -> usize {
2398 self.size_hint().0
2399 }
2400}
2401
2402impl<T> DoubleEndedIterator for IntoIter<T> {
2403 #[inline]
2404 fn next_back(&mut self) -> Option<T> {
2405 unsafe {
2406 if self.end == self.ptr {
2407 None
2408 } else if mem::size_of::<T>() == 0 {
2409 self.end = intrinsics::arith_offset(self.end as *const i8, -1)
2411 as *mut T;
2412
2413 Some(ptr::read(1 as *mut T))
2416 } else {
2417 self.end = self.end.offset(-1);
2418
2419 Some(ptr::read(self.end))
2420 }
2421 }
2422 }
2423}
2424
2425#[cfg(feature = "unstable")]
2426impl<T> ExactSizeIterator for IntoIter<T> {
2427 #[inline]
2428 fn is_empty(&self) -> bool {
2429 self.ptr == self.end
2430 }
2431}
2432
2433#[cfg(feature = "unstable")]
2434impl<T> iter::FusedIterator for IntoIter<T> {}
2435
2436#[cfg(feature = "unstable")]
2437unsafe impl<T> iter::TrustedLen for IntoIter<T> {}
2438
2439impl<T: Clone> Clone for IntoIter<T> {
2440 #[inline]
2441 fn clone(&self) -> Self {
2442 let mut deq = SliceRingBuffer::<T>::with_capacity(self.size_hint().0);
2443 unsafe {
2444 deq.append_elements(self.as_slice());
2445 }
2446 deq.into_iter()
2447 }
2448}
2449
2450#[cfg(feature = "unstable")]
2451unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
2452 #[inline]
2453 fn drop(&mut self) {
2454 for _x in self.by_ref() {}
2456
2457 let _ =
2459 unsafe { Buffer::from_raw_parts(self.buf.as_ptr(), 2 * self.cap) };
2460 }
2461}
2462
2463#[cfg(not(feature = "unstable"))]
2464impl<T> Drop for IntoIter<T> {
2465 #[inline]
2466 fn drop(&mut self) {
2467 for _x in self.by_ref() {}
2469
2470 let _ =
2472 unsafe { Buffer::from_raw_parts(self.buf.as_ptr(), 2 * self.cap) };
2473 }
2474}
2475
2476impl<T> IntoIterator for SliceRingBuffer<T> {
2477 type Item = T;
2478 type IntoIter = IntoIter<T>;
2479
2480 #[inline]
2500 fn into_iter(self) -> IntoIter<T> {
2501 unsafe {
2502 let buf_ptr = self.buf.ptr();
2503 intrinsics::assume(!buf_ptr.is_null());
2504 let begin = self.as_ptr();
2505 let end = if mem::size_of::<T>() == 0 {
2506 intrinsics::arith_offset(begin as *const i8, self.len() as _)
2507 as *const _
2508 } else {
2509 begin.add(self.len())
2510 };
2511 assert!(begin as usize <= end as usize);
2512 let it = IntoIter {
2513 buf: NonNull::new_unchecked(buf_ptr),
2514 cap: self.capacity(),
2515 ptr: begin,
2516 end,
2517 };
2518 debug_assert_eq!(self.len(), it.size_hint().0);
2519 #[allow(clippy::mem_forget)]
2520 mem::forget(self);
2521 it
2522 }
2523 }
2524}
2525
2526impl<'a, T> IntoIterator for &'a SliceRingBuffer<T> {
2527 type Item = &'a T;
2528 type IntoIter = slice::Iter<'a, T>;
2529 #[inline]
2530 fn into_iter(self) -> slice::Iter<'a, T> {
2531 self.iter()
2532 }
2533}
2534
2535impl<'a, T> IntoIterator for &'a mut SliceRingBuffer<T> {
2536 type Item = &'a mut T;
2537 type IntoIter = slice::IterMut<'a, T>;
2538 #[inline]
2539 fn into_iter(self) -> slice::IterMut<'a, T> {
2540 self.iter_mut()
2541 }
2542}
2543
2544impl<T> Extend<T> for SliceRingBuffer<T> {
2545 #[inline]
2546 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
2547 <Self as SpecExtend<T, I::IntoIter>>::spec_extend(
2548 self,
2549 iter.into_iter(),
2550 )
2551 }
2552}
2553
2554trait SpecExtend<T, I> {
2557 fn from_iter(iter: I) -> Self;
2559 fn spec_extend(&mut self, iter: I);
2561}
2562
2563#[inline(always)]
2565fn from_iter_default<T, I: Iterator<Item = T>>(
2566 mut iterator: I,
2567) -> SliceRingBuffer<T> {
2568 let mut deque = match iterator.next() {
2574 None => return SliceRingBuffer::<T>::new(),
2575 Some(element) => {
2576 let (lower, _) = iterator.size_hint();
2577 let mut deque =
2578 SliceRingBuffer::<T>::with_capacity(lower.saturating_add(1));
2579 unsafe {
2580 deque.move_tail_unchecked(1);
2581 ptr::write(deque.get_unchecked_mut(0), element);
2582 }
2583 deque
2584 }
2585 };
2586 <SliceRingBuffer<T> as SpecExtend<T, I>>::spec_extend(
2587 &mut deque, iterator,
2588 );
2589 deque
2590}
2591
2592impl<T, I> SpecExtend<T, I> for SliceRingBuffer<T>
2593where
2594 I: Iterator<Item = T>,
2595{
2596 #[cfg(feature = "unstable")]
2597 default fn from_iter(iterator: I) -> Self {
2598 from_iter_default(iterator)
2599 }
2600
2601 #[cfg(feature = "unstable")]
2602 default fn spec_extend(&mut self, iter: I) {
2603 self.extend_desugared(iter)
2604 }
2605
2606 #[cfg(not(feature = "unstable"))]
2607 fn from_iter(iterator: I) -> Self {
2608 from_iter_default(iterator)
2609 }
2610
2611 #[cfg(not(feature = "unstable"))]
2612 fn spec_extend(&mut self, iter: I) {
2613 self.extend_desugared(iter)
2614 }
2615}
2616
2617#[cfg(feature = "unstable")]
2618impl<T, I> SpecExtend<T, I> for SliceRingBuffer<T>
2619where
2620 I: iter::TrustedLen<Item = T>,
2621{
2622 default fn from_iter(iterator: I) -> Self {
2623 let mut deque = Self::new();
2624 <Self as SpecExtend<T, I>>::spec_extend(&mut deque, iterator);
2625 deque
2626 }
2627
2628 #[allow(clippy::use_debug)]
2629 default fn spec_extend(&mut self, iterator: I) {
2630 let (low, high) = iterator.size_hint();
2632 if let Some(high_value) = high {
2633 debug_assert_eq!(
2634 low,
2635 high_value,
2636 "TrustedLen iterator's size hint is not exact: {:?}",
2637 (low, high)
2638 );
2639 }
2640 if let Some(additional) = high {
2641 self.reserve(additional);
2642 unsafe {
2643 let mut ptr = self.as_mut_ptr().add(self.len());
2644 for element in iterator {
2645 ptr::write(ptr, element);
2646 ptr = ptr.add(1);
2647 self.move_tail_unchecked(1);
2650 }
2651 }
2652 } else {
2653 self.extend_desugared(iterator)
2654 }
2655 }
2656}
2657
2658#[cfg(feature = "unstable")]
2659impl<T> SpecExtend<T, IntoIter<T>> for SliceRingBuffer<T> {
2660 fn from_iter(mut iterator: IntoIter<T>) -> Self {
2661 if iterator.buf.as_ptr() as *const _ == iterator.ptr {
2665 unsafe {
2666 let deq = Self::from_raw_parts(
2667 iterator.buf.as_ptr(),
2668 iterator.cap,
2669 iterator.elems(),
2670 );
2671 #[allow(clippy::mem_forget)]
2672 mem::forget(iterator);
2673 deq
2674 }
2675 } else {
2676 let mut deque = Self::new();
2677 deque.spec_extend(iterator);
2678 deque
2679 }
2680 }
2681
2682 fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
2683 unsafe {
2684 self.append_elements(iterator.as_slice() as _);
2685 }
2686 iterator.ptr = iterator.end;
2687 }
2688}
2689
2690#[cfg(not(feature = "unstable"))]
2691impl<'a, T: 'a, I> SpecExtend<&'a T, I> for SliceRingBuffer<T>
2692where
2693 I: Iterator<Item = &'a T>,
2694 T: Clone,
2695{
2696 fn from_iter(iterator: I) -> Self {
2697 SpecExtend::from_iter(iterator.cloned())
2698 }
2699
2700 fn spec_extend(&mut self, iterator: I) {
2701 self.spec_extend(iterator.cloned())
2702 }
2703}
2704
2705#[cfg(feature = "unstable")]
2706impl<'a, T: 'a, I> SpecExtend<&'a T, I> for SliceRingBuffer<T>
2707where
2708 I: Iterator<Item = &'a T>,
2709 T: Clone,
2710{
2711 default fn from_iter(iterator: I) -> Self {
2712 SpecExtend::from_iter(iterator.cloned())
2713 }
2714
2715 default fn spec_extend(&mut self, iterator: I) {
2716 self.spec_extend(iterator.cloned())
2717 }
2718}
2719
2720#[cfg(feature = "unstable")]
2721impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for SliceRingBuffer<T>
2722where
2723 T: Copy,
2724{
2725 fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
2726 let slice = iterator.as_slice();
2727 self.reserve(slice.len());
2728 unsafe {
2729 let len = self.len();
2730 self.move_tail_unchecked(slice.len() as isize);
2731 self.get_unchecked_mut(len..).copy_from_slice(slice);
2732 }
2733 }
2734}
2735
2736impl<T> iter::FromIterator<T> for SliceRingBuffer<T> {
2737 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2738 <Self as SpecExtend<T, I::IntoIter>>::from_iter(iter.into_iter())
2739 }
2740}
2741
2742trait ExtendWith<T> {
2744 fn next(&self) -> T;
2746 fn last(self) -> T;
2748}
2749
2750struct ExtendElement<T>(T);
2752impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
2753 fn next(&self) -> T {
2754 self.0.clone()
2755 }
2756 fn last(self) -> T {
2757 self.0
2758 }
2759}
2760
2761struct ExtendDefault;
2763impl<T: Default> ExtendWith<T> for ExtendDefault {
2764 fn next(&self) -> T {
2765 Default::default()
2766 }
2767 fn last(self) -> T {
2768 Default::default()
2769 }
2770}
2771
2772#[doc(hidden)]
2775pub fn from_elem<T: Clone>(elem: T, n: usize) -> SliceRingBuffer<T> {
2776 <T as SpecFromElem>::from_elem(elem, n)
2777}
2778
2779trait SpecFromElem: Sized {
2781 fn from_elem(elem: Self, n: usize) -> SliceRingBuffer<Self>;
2783}
2784
2785impl<T: Clone> SpecFromElem for T {
2786 #[cfg(feature = "unstable")]
2787 default fn from_elem(elem: Self, n: usize) -> SliceRingBuffer<Self> {
2788 let mut v = SliceRingBuffer::with_capacity(n);
2789 v.extend_with(n, ExtendElement(elem));
2790 v
2791 }
2792
2793 #[cfg(not(feature = "unstable"))]
2794 fn from_elem(elem: Self, n: usize) -> SliceRingBuffer<Self> {
2795 let mut v = SliceRingBuffer::with_capacity(n);
2796 v.extend_with(n, ExtendElement(elem));
2797 v
2798 }
2799}
2800
2801#[cfg(feature = "unstable")]
2802impl SpecFromElem for u8 {
2803 #[inline]
2804 fn from_elem(elem: Self, n: usize) -> SliceRingBuffer<Self> {
2805 unsafe {
2806 let mut v = SliceRingBuffer::with_capacity(n);
2807 ptr::write_bytes(v.as_mut_ptr(), elem, n);
2808 v.move_tail_unchecked(n as isize);
2809 v
2810 }
2811 }
2812}
2813
2814macro_rules! impl_spec_from_elem {
2815 ($t:ty, $is_zero:expr) => {
2816 #[cfg(feature = "unstable")]
2817 impl SpecFromElem for $t {
2818 #[inline]
2819 fn from_elem(elem: Self, n: usize) -> SliceRingBuffer<Self> {
2820 let mut v = SliceRingBuffer::with_capacity(n);
2821 v.extend_with(n, ExtendElement(elem));
2822 v
2823 }
2824 }
2825 };
2826}
2827
2828impl_spec_from_elem!(i8, |x| x == 0);
2829impl_spec_from_elem!(i16, |x| x == 0);
2830impl_spec_from_elem!(i32, |x| x == 0);
2831impl_spec_from_elem!(i64, |x| x == 0);
2832#[cfg(feature = "unstable")]
2833impl_spec_from_elem!(i128, |x| x == 0);
2834impl_spec_from_elem!(isize, |x| x == 0);
2835
2836impl_spec_from_elem!(u16, |x| x == 0);
2837impl_spec_from_elem!(u32, |x| x == 0);
2838impl_spec_from_elem!(u64, |x| x == 0);
2839#[cfg(feature = "unstable")]
2840impl_spec_from_elem!(u128, |x| x == 0);
2841impl_spec_from_elem!(usize, |x| x == 0);
2842
2843impl_spec_from_elem!(f32, |x: f32| x == 0. && x.is_sign_positive());
2844impl_spec_from_elem!(f64, |x: f64| x == 0. && x.is_sign_positive());
2845
2846impl<'a, T: 'a + Copy> Extend<&'a T> for SliceRingBuffer<T> {
2854 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
2855 self.spec_extend(iter.into_iter())
2856 }
2857}
2858
2859#[derive(Debug)]
2867pub struct Splice<'a, I: Iterator + 'a> {
2868 drain: Drain<'a, I::Item>,
2870 replace_with: I,
2872}
2873
2874impl<'a, I: Iterator> Iterator for Splice<'a, I> {
2875 type Item = I::Item;
2876 #[inline]
2877 fn next(&mut self) -> Option<Self::Item> {
2878 self.drain.next()
2879 }
2880 #[inline]
2881 fn size_hint(&self) -> (usize, Option<usize>) {
2882 self.drain.size_hint()
2883 }
2884}
2885
2886impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> {
2887 #[inline]
2888 fn next_back(&mut self) -> Option<Self::Item> {
2889 self.drain.next_back()
2890 }
2891}
2892
2893#[cfg(feature = "unstable")]
2894impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
2895
2896#[cfg(feature = "unstable")]
2898impl<'a, I: Iterator> iter::FusedIterator for Splice<'a, I> {}
2899
2900impl<'a, I: Iterator> Drop for Splice<'a, I> {
2901 fn drop(&mut self) {
2902 while let Some(_) = self.drain.next() {}
2904
2905 unsafe {
2906 if self.drain.tail_len == 0 {
2907 self.drain.deq.as_mut().extend(self.replace_with.by_ref());
2908 return;
2909 }
2910
2911 if !self.drain.fill(&mut self.replace_with) {
2913 return;
2914 }
2915
2916 let (lower_bound, _upper_bound) = self.replace_with.size_hint();
2919 if lower_bound > 0 {
2920 self.drain.move_tail_unchecked(lower_bound);
2921 if !self.drain.fill(&mut self.replace_with) {
2922 return;
2923 }
2924 }
2925
2926 let mut collected = self
2930 .replace_with
2931 .by_ref()
2932 .collect::<SliceRingBuffer<I::Item>>()
2933 .into_iter();
2934 if collected.size_hint().0 > 0 {
2936 self.drain.move_tail_unchecked(collected.size_hint().0);
2937 let filled = self.drain.fill(&mut collected);
2938 debug_assert!(filled);
2939 debug_assert_eq!(collected.size_hint().0, 0);
2940 }
2941 }
2942 }
2945}
2946
2947impl<'a, T> Drain<'a, T> {
2949 unsafe fn fill<I: Iterator<Item = T>>(
2955 &mut self, replace_with: &mut I,
2956 ) -> bool {
2957 let deq = self.deq.as_mut();
2958 let range_start = deq.len();
2959 let range_end = self.tail_start;
2960 let range_slice = slice::from_raw_parts_mut(
2961 deq.as_mut_ptr().add(range_start),
2962 range_end - range_start,
2963 );
2964
2965 for place in range_slice {
2966 if let Some(new_item) = replace_with.next() {
2967 ptr::write(place, new_item);
2968 deq.move_tail_unchecked(1);
2969 } else {
2970 return false;
2971 }
2972 }
2973 true
2974 }
2975
2976 unsafe fn move_tail_unchecked(&mut self, extra_capacity: usize) {
2978 let deq = self.deq.as_mut();
2979 let used_capacity = self.tail_start + self.tail_len;
2980 deq.reserve_capacity(used_capacity + extra_capacity)
2981 .expect("oom");
2982
2983 let new_tail_start = self.tail_start + extra_capacity;
2984 let src = deq.as_ptr().add(self.tail_start);
2985 let dst = deq.as_mut_ptr().add(new_tail_start);
2986 ptr::copy(src, dst, self.tail_len);
2987 self.tail_start = new_tail_start;
2988 }
2989}
2990
2991#[derive(Debug)]
2993pub struct DrainFilter<'a, T: 'a, F>
2994where
2995 F: FnMut(&mut T) -> bool,
2996{
2997 deq: &'a mut SliceRingBuffer<T>,
2999 idx: usize,
3001 del: usize,
3003 old_len: usize,
3005 pred: F,
3007}
3008
3009impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
3010where
3011 F: FnMut(&mut T) -> bool,
3012{
3013 type Item = T;
3014
3015 fn next(&mut self) -> Option<T> {
3016 unsafe {
3017 while self.idx != self.old_len {
3018 let i = self.idx;
3019 let v = slice::from_raw_parts_mut(
3020 self.deq.as_mut_ptr(),
3021 self.old_len,
3022 );
3023 let result = (self.pred)(&mut v[i]);
3024 self.idx += 1;
3027 if result {
3028 self.del += 1;
3029 return Some(ptr::read(&v[i]));
3030 } else if self.del > 0 {
3031 let del = self.del;
3032 let src: *const T = &v[i];
3033 let dst: *mut T = &mut v[i - del];
3034 ptr::copy_nonoverlapping(src, dst, 1);
3038 }
3039 }
3040 None
3041 }
3042 }
3043
3044 fn size_hint(&self) -> (usize, Option<usize>) {
3045 (0, Some(self.old_len - self.idx))
3046 }
3047}
3048
3049impl<'a, T, F> Drop for DrainFilter<'a, T, F>
3050where
3051 F: FnMut(&mut T) -> bool,
3052{
3053 fn drop(&mut self) {
3054 for _ in self.by_ref() {}
3055
3056 unsafe {
3057 let new_len = self.old_len - self.del;
3058 self.deq.move_tail_unchecked(new_len as isize);
3059 }
3060 }
3061}
3062
3063impl<T> convert::AsRef<[T]> for SliceRingBuffer<T> {
3064 fn as_ref(&self) -> &[T] {
3065 &*self
3066 }
3067}
3068
3069impl<T> convert::AsMut<[T]> for SliceRingBuffer<T> {
3070 fn as_mut(&mut self) -> &mut [T] {
3071 &mut *self
3072 }
3073}
3074
3075#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
3076impl ::bytes::BufMut for SliceRingBuffer<u8> {
3077 #[inline]
3078 fn remaining_mut(&self) -> usize {
3079 usize::max_value() - self.len()
3080 }
3081 #[inline]
3082 unsafe fn bytes_mut(&mut self) -> &mut [u8] {
3083 if self.capacity() == self.len() {
3084 self.reserve(64); }
3086
3087 let cap = self.capacity();
3088 let len = self.len();
3089
3090 let ptr = self.as_mut_ptr();
3091 &mut slice::from_raw_parts_mut(ptr, cap)[len..]
3092 }
3093 #[inline]
3094 unsafe fn advance_mut(&mut self, cnt: usize) {
3095 let len = self.len();
3096 let remaining = self.capacity() - len;
3097 if cnt > remaining {
3098 self.reserve(cnt);
3101 }
3102
3103 self.move_tail_unchecked(cnt as isize);
3104 }
3105}
3106
3107#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
3108impl ::bytes::IntoBuf for SliceRingBuffer<u8> {
3109 type Buf = io::Cursor<SliceRingBuffer<u8>>;
3110
3111 fn into_buf(self) -> Self::Buf {
3112 io::Cursor::new(self)
3113 }
3114}
3115
3116#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
3117impl<'a> ::bytes::IntoBuf for &'a SliceRingBuffer<u8> {
3118 type Buf = io::Cursor<&'a [u8]>;
3119
3120 fn into_buf(self) -> Self::Buf {
3121 io::Cursor::new(&self[..])
3122 }
3123}
3124
3125#[cfg(all(feature = "bytes_buf", feature = "use_std"))]
3126impl ::bytes::buf::FromBuf for SliceRingBuffer<u8> {
3127 fn from_buf<T>(buf: T) -> Self
3128 where
3129 T: ::bytes::IntoBuf,
3130 {
3131 use bytes::{Buf, BufMut};
3132 let buf = buf.into_buf();
3133 let mut ret = SliceRingBuffer::with_capacity(buf.remaining());
3134 ret.put(buf);
3135 ret
3136 }
3137}
3138
3139#[cfg(test)]
3140mod tests {
3141 use self::collections::HashMap;
3142 use super::SliceRingBuffer;
3143 use std::cell::RefCell;
3144 use std::rc::Rc;
3145 use std::{collections, fmt, hash, mem};
3146
3147 #[derive(Clone, Debug)]
3148 struct WithDrop {
3149 counter: Rc<RefCell<usize>>,
3150 }
3151
3152 impl Drop for WithDrop {
3153 fn drop(&mut self) {
3154 *self.counter.borrow_mut() += 1;
3155 }
3156 }
3157
3158 fn sizes_to_test() -> Vec<usize> {
3159 let sample = vec![
3160 2, 4, 8, 16, 32, 64,
3161 128, ];
3176 sample.into_iter().collect()
3177 }
3178
3179 fn linear_usize_deque(size: usize) -> SliceRingBuffer<usize> {
3180 let mut v: SliceRingBuffer<usize> = SliceRingBuffer::new();
3181 for i in 0..size {
3182 v.push_back(i);
3183 assert_eq!(v.len(), i + 1);
3184 for j in 0..v.len() {
3185 assert_eq!(*v.get(j).unwrap(), j);
3186 }
3187 }
3188 assert_eq!(v.len(), size);
3189 for i in 0..size {
3190 assert_eq!(*v.get(i).unwrap(), i);
3191 }
3192 v
3193 }
3194
3195 fn constant_deque<T: Clone + fmt::Debug>(
3196 size: usize, val: &T,
3197 ) -> SliceRingBuffer<T> {
3198 let mut v: SliceRingBuffer<T> = SliceRingBuffer::with_capacity(size);
3199 for i in 0..size {
3200 let copy = val.clone();
3201 v.push_back(copy);
3202 assert_eq!(v.len(), i + 1);
3203 }
3204 assert_eq!(v.len(), size);
3205 v
3206 }
3207
3208 #[test]
3209 fn get() {
3210 let mut deq = SliceRingBuffer::new();
3211 deq.push_back(3);
3212 deq.push_back(4);
3213 deq.push_back(5);
3214 assert_eq!(deq.get(1), Some(&4));
3215 }
3216
3217 #[test]
3218 fn get_mut() {
3219 let mut deq = SliceRingBuffer::new();
3220 deq.push_back(3);
3221 deq.push_back(4);
3222 deq.push_back(5);
3223 assert_eq!(deq.get(1), Some(&4));
3224 if let Some(elem) = deq.get_mut(1) {
3225 *elem = 7;
3226 }
3227 assert_eq!(deq[1], 7);
3228 }
3229
3230 #[test]
3231 fn is_empty() {
3232 let mut deq = SliceRingBuffer::new();
3233 assert!(deq.is_empty());
3234 deq.push_back(4);
3235 assert!(!deq.is_empty());
3236 deq.pop_front();
3237 assert!(deq.is_empty());
3238 }
3239
3240 #[test]
3241 fn push_pop_front() {
3242 for size in sizes_to_test() {
3243 let mut v: SliceRingBuffer<usize> = SliceRingBuffer::new();
3244 for i in 0..size {
3245 v.push_front(i);
3246 assert_eq!(v.len(), i + 1);
3247 for j in 0..v.len() {
3248 assert_eq!(*v.get(v.len() - j - 1).unwrap(), j);
3249 }
3250 }
3251 assert_eq!(v.len(), size);
3252 for i in 0..size {
3253 assert_eq!(*v.get(i).unwrap(), size - i - 1);
3254 }
3255 for i in 0..size {
3256 assert_eq!(v.len(), size - i);
3257 v.pop_front();
3258 for j in 0..v.len() {
3259 assert_eq!(*v.get(v.len() - j - 1).unwrap(), j);
3260 }
3261 }
3262 assert_eq!(v.len(), 0);
3263 }
3264 }
3265
3266 #[test]
3267 fn push_pop_back() {
3268 for size in sizes_to_test() {
3269 let mut v = linear_usize_deque(size);
3270 for i in 0..size {
3271 assert_eq!(v.len(), size - i);
3272 v.pop_back();
3273 for j in 0..v.len() {
3274 assert_eq!(*v.get(j).unwrap(), j);
3275 }
3276 }
3277 assert_eq!(v.len(), 0);
3278 }
3279 }
3280
3281 #[test]
3282 fn all_head_tails() {
3283 for size in sizes_to_test() {
3284 let mut v = linear_usize_deque(size);
3285 let permutations = 6 * v.capacity();
3286
3287 for _ in 0..permutations {
3289 v.push_back(0);
3290 for j in (0..v.len() - 1).rev() {
3291 *v.get_mut(j + 1).unwrap() = *v.get(j).unwrap();
3292 }
3293 v.pop_front();
3294 assert_eq!(v.len(), size);
3295 for k in 0..size {
3296 assert_eq!(*v.get(k).unwrap(), k);
3297 }
3298 }
3299
3300 for _ in 0..permutations {
3302 v.push_front(0);
3303 for j in 0..v.len() - 1 {
3304 *v.get_mut(j).unwrap() = *v.get(j + 1).unwrap()
3305 }
3306 v.pop_back();
3307 assert_eq!(v.len(), size);
3308 for k in 0..size {
3309 assert_eq!(*v.get(k).unwrap(), k);
3310 }
3311 }
3312 }
3313 }
3314
3315 #[test]
3316 fn drop() {
3317 for size in sizes_to_test() {
3318 let counter = Rc::new(RefCell::new(0));
3319 let val = WithDrop {
3320 counter: counter.clone(),
3321 };
3322 {
3323 let _v = constant_deque(size, &val);
3324 }
3325 assert_eq!(*counter.borrow(), size);
3326 }
3327 }
3328
3329 #[test]
3330 fn clear() {
3331 for size in sizes_to_test() {
3332 let counter = Rc::new(RefCell::new(0));
3333 let val = WithDrop {
3334 counter: counter.clone(),
3335 };
3336 assert_eq!(*counter.borrow(), 0);
3337 let mut v = constant_deque(size, &val);
3338 assert_eq!(*counter.borrow(), 0);
3339 v.clear();
3340 assert_eq!(*counter.borrow(), size);
3341 assert_eq!(v.len(), 0);
3342 }
3343 }
3344
3345 #[test]
3346 fn reserve_no_cap_change() {
3347 let mut slice = SliceRingBuffer::<u8>::with_capacity(4096);
3348 let cap = slice.capacity();
3349 assert!(cap >= 4096);
3350 slice.reserve(cap);
3351 assert_eq!(slice.capacity(), cap);
3354 }
3355
3356 #[test]
3357 fn resize() {
3358 for size in sizes_to_test() {
3359 let mut v = linear_usize_deque(size);
3360 let mut v_ref = linear_usize_deque(size / 2);
3361 v.resize(size / 2, 0);
3362 assert_eq!(v.len(), size / 2);
3363 assert_eq!(v.as_slice(), v_ref.as_slice());
3364 while v_ref.len() < size {
3365 v_ref.push_back(3);
3366 }
3367 v.resize(size, 3);
3368 assert_eq!(v.len(), size);
3369 assert_eq!(v_ref.len(), size);
3370 assert_eq!(v.as_slice(), v_ref.as_slice());
3371
3372 v.resize(0, 3);
3373 assert_eq!(v.len(), 0);
3374
3375 v.resize(size, 3);
3376 let v_ref = constant_deque(size, &3);
3377 assert_eq!(v.len(), size);
3378 assert_eq!(v_ref.len(), size);
3379 assert_eq!(v.as_slice(), v_ref.as_slice());
3380 }
3381 }
3382
3383 #[test]
3384 fn default() {
3385 let d = SliceRingBuffer::<u8>::default();
3386 let r = SliceRingBuffer::<u8>::new();
3387 assert_eq!(d.as_slice(), r.as_slice());
3388 }
3389
3390 #[test]
3391 fn shrink_to_fit() {
3392 let page_size = 4096;
3393 for size in sizes_to_test() {
3394 let mut deq = constant_deque(size, &(3 as u8));
3395 let old_cap = deq.capacity();
3396 deq.resize(size / 4, 3);
3397 deq.shrink_to_fit();
3398 if size <= page_size {
3399 assert_eq!(deq.capacity(), old_cap);
3400 } else {
3401 assert!(deq.capacity() < old_cap);
3402 }
3403 }
3404 }
3405
3406 #[test]
3407 fn iter() {
3408 let mut deq = SliceRingBuffer::new();
3409 deq.push_back(5);
3410 deq.push_back(3);
3411 deq.push_back(4);
3412 let b: &[_] = &[&5, &3, &4];
3413 let c: Vec<&i32> = deq.iter().collect();
3414 assert_eq!(&c[..], b);
3415 }
3416
3417 #[test]
3418 fn iter_mut() {
3419 let mut deq = SliceRingBuffer::new();
3420 deq.push_back(5);
3421 deq.push_back(3);
3422 deq.push_back(4);
3423 for num in deq.iter_mut() {
3424 *num = *num - 2;
3425 }
3426 let b: &[_] = &[&mut 3, &mut 1, &mut 2];
3427 assert_eq!(&deq.iter_mut().collect::<Vec<&mut i32>>()[..], b);
3428 }
3429
3430 #[test]
3431 fn hash_map() {
3432 let mut hm: HashMap<SliceRingBuffer<u32>, u32> = HashMap::new();
3433 let mut a = SliceRingBuffer::new();
3434 a.push_back(1);
3435 a.push_back(2);
3436 hm.insert(a.clone(), 3);
3437 let b = SliceRingBuffer::new();
3438 assert_eq!(hm.get(&a), Some(&3));
3439 assert_eq!(hm.get(&b), None);
3440 }
3441
3442 #[test]
3443 fn partial_ord_eq() {
3444 let mut a = SliceRingBuffer::new();
3445 a.push_back(1);
3446 a.push_back(2);
3447 a.push_back(3);
3448 assert!(a == a);
3449 assert!(!(a != a));
3450
3451 let mut b = SliceRingBuffer::new();
3452 b.push_back(1);
3453 b.push_back(3);
3454 b.push_back(2);
3455 assert!(a < b);
3456 assert!(b > a);
3457 assert!(a != b);
3458
3459 let mut c = SliceRingBuffer::new();
3460 c.push_back(2);
3461 assert!(c > a);
3462 assert!(a < c);
3463 }
3464
3465 struct DropCounter<'a> {
3466 count: &'a mut u32,
3467 }
3468
3469 impl<'a> Drop for DropCounter<'a> {
3470 fn drop(&mut self) {
3471 *self.count += 1;
3472 }
3473 }
3474
3475 #[test]
3476 fn vec_double_drop() {
3477 struct TwoSliceDeque<T> {
3478 x: SliceRingBuffer<T>,
3479 y: SliceRingBuffer<T>,
3480 }
3481
3482 let (mut count_x, mut count_y) = (0, 0);
3483 {
3484 let mut tv = TwoSliceDeque {
3485 x: SliceRingBuffer::new(),
3486 y: SliceRingBuffer::new(),
3487 };
3488 tv.x.push_back(DropCounter {
3489 count: &mut count_x,
3490 });
3491 tv.y.push_back(DropCounter {
3492 count: &mut count_y,
3493 });
3494
3495 mem::drop(tv.x);
3499
3500 }
3502
3503 assert_eq!(count_x, 1);
3504 assert_eq!(count_y, 1);
3505 }
3506
3507 #[test]
3508 fn vec_reserve() {
3509 let mut v = SliceRingBuffer::new();
3510 assert_eq!(v.capacity(), 0);
3511
3512 v.reserve(2);
3513 assert!(v.capacity() >= 2);
3514
3515 for i in 0..16 {
3516 v.push_back(i);
3517 }
3518
3519 assert!(v.capacity() >= 16);
3520 v.reserve(16);
3521 assert!(v.capacity() >= 32);
3522
3523 v.push_back(16);
3524
3525 v.reserve(16);
3526 assert!(v.capacity() >= 33)
3527 }
3528
3529 #[test]
3530 fn vec_extend() {
3531 let mut v = SliceRingBuffer::new();
3532 let mut w = SliceRingBuffer::new();
3533
3534 v.extend(w.clone());
3535 assert_eq!(v, &[]);
3536
3537 v.extend(0..3);
3538 for i in 0..3 {
3539 w.push_back(i)
3540 }
3541
3542 assert_eq!(v, w);
3543
3544 v.extend(3..10);
3545 for i in 3..10 {
3546 w.push_back(i)
3547 }
3548
3549 assert_eq!(v, w);
3550
3551 v.extend(w.clone()); assert!(v.iter().eq(w.iter().chain(w.iter())));
3553
3554 let mut count_x = 0;
3556 {
3557 let mut x = SliceRingBuffer::new();
3558 let mut y = SliceRingBuffer::new();
3559 y.push_back(DropCounter {
3560 count: &mut count_x,
3561 });
3562 x.extend(y);
3563 }
3564 assert_eq!(count_x, 1);
3565 }
3566
3567 #[test]
3568 fn vec_extend_zst() {
3569 #[derive(PartialEq, Debug)]
3571 struct Foo;
3572
3573 let mut a = SliceRingBuffer::new();
3574 let b = sdeq![Foo, Foo];
3575
3576 a.extend(b);
3577 assert_eq!(a, &[Foo, Foo]);
3578 }
3579
3580 #[test]
3581 fn vec_extend_ref() {
3582 let mut v = SliceRingBuffer::new();
3583 for &i in &[1, 2] {
3584 v.push_back(i);
3585 }
3586 v.extend(&[3, 4, 5]);
3587
3588 assert_eq!(v.len(), 5);
3589 assert_eq!(v, [1, 2, 3, 4, 5]);
3590
3591 let mut w = SliceRingBuffer::new();
3592 for &i in &[6, 7] {
3593 w.push_back(i);
3594 }
3595 v.extend(&w);
3596
3597 assert_eq!(v.len(), 7);
3598 assert_eq!(v, [1, 2, 3, 4, 5, 6, 7]);
3599 }
3600
3601 #[test]
3602 fn vec_slice_from_mut() {
3603 let mut values = sdeq![1, 2, 3, 4, 5];
3604 {
3605 let slice = &mut values[2..];
3606 assert!(slice == [3, 4, 5]);
3607 for p in slice {
3608 *p += 2;
3609 }
3610 }
3611
3612 assert!(values == [1, 2, 5, 6, 7]);
3613 }
3614
3615 #[test]
3616 fn vec_slice_to_mut() {
3617 let mut values = sdeq![1, 2, 3, 4, 5];
3618 {
3619 let slice = &mut values[..2];
3620 assert!(slice == [1, 2]);
3621 for p in slice {
3622 *p += 1;
3623 }
3624 }
3625
3626 assert!(values == [2, 3, 3, 4, 5]);
3627 }
3628
3629 #[test]
3630 fn vec_split_at_mut() {
3631 let mut values = sdeq![1, 2, 3, 4, 5];
3632 {
3633 let (left, right) = values.split_at_mut(2);
3634 {
3635 let left: &[_] = left;
3636 assert!(&left[..left.len()] == &[1, 2]);
3637 }
3638 for p in left {
3639 *p += 1;
3640 }
3641
3642 {
3643 let right: &[_] = right;
3644 assert!(&right[..right.len()] == &[3, 4, 5]);
3645 }
3646 for p in right {
3647 *p += 2;
3648 }
3649 }
3650
3651 assert_eq!(values, [2, 3, 5, 6, 7]);
3652 }
3653
3654 #[test]
3655 fn vec_clone() {
3656 let v: SliceRingBuffer<i32> = sdeq![];
3657 let w = sdeq![1, 2, 3];
3658
3659 assert_eq!(v, v.clone());
3660
3661 let z = w.clone();
3662 assert_eq!(w, z);
3663 assert!(w.as_ptr() != z.as_ptr())
3665 }
3666
3667 #[cfg(feature = "unstable")]
3668 #[test]
3669 fn vec_clone_from() {
3670 let mut v = sdeq![];
3671 let three: SliceRingBuffer<Box<_>> = sdeq![box 1, box 2, box 3];
3672 let two: SliceRingBuffer<Box<_>> = sdeq![box 4, box 5];
3673
3674 v.clone_from(&three);
3676 assert_eq!(v, three);
3677
3678 v.clone_from(&three);
3680 assert_eq!(v, three);
3681
3682 v.clone_from(&two);
3684 assert_eq!(v, two);
3685
3686 v.clone_from(&three);
3688 assert_eq!(v, three)
3689 }
3690
3691 #[test]
3692 fn vec_retain() {
3693 let mut deq = sdeq![1, 2, 3, 4];
3694 deq.retain(|&x| x % 2 == 0);
3695 assert_eq!(deq, [2, 4]);
3696 }
3697
3698 #[test]
3699 fn vec_dedup() {
3700 fn case(a: SliceRingBuffer<i32>, b: SliceRingBuffer<i32>) {
3701 let mut v = a;
3702 v.dedup();
3703 assert_eq!(v, b);
3704 }
3705 case(sdeq![], sdeq![]);
3706 case(sdeq![1], sdeq![1]);
3707 case(sdeq![1, 1], sdeq![1]);
3708 case(sdeq![1, 2, 3], sdeq![1, 2, 3]);
3709 case(sdeq![1, 1, 2, 3], sdeq![1, 2, 3]);
3710 case(sdeq![1, 2, 2, 3], sdeq![1, 2, 3]);
3711 case(sdeq![1, 2, 3, 3], sdeq![1, 2, 3]);
3712 case(sdeq![1, 1, 2, 2, 2, 3, 3], sdeq![1, 2, 3]);
3713 }
3714
3715 #[test]
3716 fn vec_dedup_by_key() {
3717 fn case(a: SliceRingBuffer<i32>, b: SliceRingBuffer<i32>) {
3718 let mut v = a;
3719 v.dedup_by_key(|i| *i / 10);
3720 assert_eq!(v, b);
3721 }
3722 case(sdeq![], sdeq![]);
3723 case(sdeq![10], sdeq![10]);
3724 case(sdeq![10, 11], sdeq![10]);
3725 case(sdeq![10, 20, 30], sdeq![10, 20, 30]);
3726 case(sdeq![10, 11, 20, 30], sdeq![10, 20, 30]);
3727 case(sdeq![10, 20, 21, 30], sdeq![10, 20, 30]);
3728 case(sdeq![10, 20, 30, 31], sdeq![10, 20, 30]);
3729 case(sdeq![10, 11, 20, 21, 22, 30, 31], sdeq![10, 20, 30]);
3730 }
3731
3732 #[test]
3733 fn vec_dedup_by() {
3734 let mut deq = sdeq!["foo", "bar", "Bar", "baz", "bar"];
3735 deq.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
3736
3737 assert_eq!(deq, ["foo", "bar", "baz", "bar"]);
3738
3739 let mut deq: SliceRingBuffer<(&'static str, i32)> =
3740 sdeq![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)];
3741 deq.dedup_by(|a, b| {
3742 a.0 == b.0 && {
3743 b.1 += a.1;
3744 true
3745 }
3746 });
3747
3748 assert_eq!(deq, [("foo", 3), ("bar", 12)]);
3749 }
3750
3751 #[cfg(feature = "unstable")]
3752 #[test]
3753 fn vec_dedup_unique() {
3754 let mut v0: SliceRingBuffer<Box<_>> =
3755 sdeq![box 1, box 1, box 2, box 3];
3756 v0.dedup();
3757 let mut v1: SliceRingBuffer<Box<_>> =
3758 sdeq![box 1, box 2, box 2, box 3];
3759 v1.dedup();
3760 let mut v2: SliceRingBuffer<Box<_>> =
3761 sdeq![box 1, box 2, box 3, box 3];
3762 v2.dedup();
3763 }
3766
3767 #[test]
3768 fn zero_sized_values() {
3769 let mut v = SliceRingBuffer::new();
3770 assert_eq!(v.len(), 0);
3771 v.push_back(());
3772 assert_eq!(v.len(), 1);
3773 v.push_back(());
3774 assert_eq!(v.len(), 2);
3775 assert_eq!(v.pop_back(), Some(()));
3776 assert_eq!(v.pop_back(), Some(()));
3777 assert_eq!(v.pop_back(), None);
3778
3779 assert_eq!(v.iter().count(), 0);
3780 v.push_back(());
3781 assert_eq!(v.iter().count(), 1);
3782 v.push_back(());
3783 assert_eq!(v.iter().count(), 2);
3784
3785 for &() in &v {}
3786
3787 assert_eq!(v.iter_mut().count(), 2);
3788 v.push_back(());
3789 assert_eq!(v.iter_mut().count(), 3);
3790 v.push_back(());
3791 assert_eq!(v.iter_mut().count(), 4);
3792
3793 for &mut () in &mut v {}
3794 unsafe {
3795 let len = v.len() as isize;
3796 v.move_tail_unchecked(-len);
3797 }
3798 assert_eq!(v.iter_mut().count(), 0);
3799 }
3800
3801 #[test]
3802 fn vec_partition() {
3803 assert_eq!(
3804 sdeq![].into_iter().partition(|x: &i32| *x < 3),
3805 (sdeq![], sdeq![])
3806 );
3807 assert_eq!(
3808 sdeq![1, 2, 3].into_iter().partition(|x| *x < 4),
3809 (sdeq![1, 2, 3], sdeq![])
3810 );
3811 assert_eq!(
3812 sdeq![1, 2, 3].into_iter().partition(|x| *x < 2),
3813 (sdeq![1], sdeq![2, 3])
3814 );
3815 assert_eq!(
3816 sdeq![1, 2, 3].into_iter().partition(|x| *x < 0),
3817 (sdeq![], sdeq![1, 2, 3])
3818 );
3819 }
3820
3821 #[test]
3822 fn vec_zip_unzip() {
3823 let z1 = sdeq![(1, 4), (2, 5), (3, 6)];
3824
3825 let (left, right): (SliceRingBuffer<_>, SliceRingBuffer<_>) =
3826 z1.iter().cloned().unzip();
3827
3828 assert_eq!((1, 4), (left[0], right[0]));
3829 assert_eq!((2, 5), (left[1], right[1]));
3830 assert_eq!((3, 6), (left[2], right[2]));
3831 }
3832
3833 #[test]
3834 fn vec_vec_truncate_drop() {
3835 static mut DROPS: u32 = 0;
3836 struct Elem(i32);
3837 impl Drop for Elem {
3838 fn drop(&mut self) {
3839 unsafe {
3840 DROPS += 1;
3841 }
3842 }
3843 }
3844
3845 let mut v = sdeq![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
3846 assert_eq!(unsafe { DROPS }, 0);
3847 v.truncate(3);
3848 assert_eq!(unsafe { DROPS }, 2);
3849 v.truncate(0);
3850 assert_eq!(unsafe { DROPS }, 5);
3851 }
3852
3853 #[test]
3854 fn vec_vec_truncate_front_drop() {
3855 static mut DROPS: u32 = 0;
3856 struct Elem(i32);
3857 impl Drop for Elem {
3858 fn drop(&mut self) {
3859 unsafe {
3860 DROPS += 1;
3861 }
3862 }
3863 }
3864
3865 let mut v = sdeq![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
3866 assert_eq!(unsafe { DROPS }, 0);
3867 v.truncate_front(3);
3868 assert_eq!(unsafe { DROPS }, 2);
3869 v.truncate_front(0);
3870 assert_eq!(unsafe { DROPS }, 5);
3871 }
3872
3873 #[test]
3874 #[should_panic]
3875 fn vec_vec_truncate_fail() {
3876 struct BadElem(i32);
3877 impl Drop for BadElem {
3878 fn drop(&mut self) {
3879 let BadElem(ref mut x) = *self;
3880 if *x == 0xbadbeef {
3881 panic!("BadElem panic: 0xbadbeef")
3882 }
3883 }
3884 }
3885
3886 let mut v =
3887 sdeq![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
3888 v.truncate(0);
3889 }
3890
3891 #[test]
3892 fn vec_index() {
3893 let deq = sdeq![1, 2, 3];
3894 assert!(deq[1] == 2);
3895 }
3896
3897 #[test]
3898 #[should_panic]
3899 fn vec_index_out_of_bounds() {
3900 let deq = sdeq![1, 2, 3];
3901 let _ = deq[3];
3902 }
3903
3904 #[test]
3905 #[should_panic]
3906 fn vec_slice_out_of_bounds_1() {
3907 let x = sdeq![1, 2, 3, 4, 5];
3908 let _ = &x[!0..];
3909 }
3910
3911 #[test]
3912 #[should_panic]
3913 fn vec_slice_out_of_bounds_2() {
3914 let x = sdeq![1, 2, 3, 4, 5];
3915 let _ = &x[..6];
3916 }
3917
3918 #[test]
3919 #[should_panic]
3920 fn vec_slice_out_of_bounds_3() {
3921 let x = sdeq![1, 2, 3, 4, 5];
3922 let _ = &x[!0..4];
3923 }
3924
3925 #[test]
3926 #[should_panic]
3927 fn vec_slice_out_of_bounds_4() {
3928 let x = sdeq![1, 2, 3, 4, 5];
3929 let _ = &x[1..6];
3930 }
3931
3932 #[test]
3933 #[should_panic]
3934 fn vec_slice_out_of_bounds_5() {
3935 let x = sdeq![1, 2, 3, 4, 5];
3936 let _ = &x[3..2];
3937 }
3938
3939 #[test]
3940 fn vec_swap_remove_empty() {
3941 let mut deq = SliceRingBuffer::<i32>::new();
3942 assert_eq!(deq.swap_remove_back(0), None);
3943 }
3944
3945 #[test]
3946 fn vec_move_items() {
3947 let deq = sdeq![1, 2, 3];
3948 let mut deq2 = sdeq![];
3949 for i in deq {
3950 deq2.push_back(i);
3951 }
3952 assert_eq!(deq2, [1, 2, 3]);
3953 }
3954
3955 #[test]
3956 fn vec_move_items_reverse() {
3957 let deq = sdeq![1, 2, 3];
3958 let mut deq2 = sdeq![];
3959 for i in deq.into_iter().rev() {
3960 deq2.push_back(i);
3961 }
3962 assert_eq!(deq2, [3, 2, 1]);
3963 }
3964
3965 #[test]
3966 fn vec_move_items_zero_sized() {
3967 let deq = sdeq![(), (), ()];
3968 let mut deq2 = sdeq![];
3969 for i in deq {
3970 deq2.push_back(i);
3971 }
3972 assert_eq!(deq2, [(), (), ()]);
3973 }
3974
3975 #[test]
3976 fn vec_drain_items() {
3977 let mut deq = sdeq![1, 2, 3];
3978 let mut deq2 = sdeq![];
3979 for i in deq.drain(..) {
3980 deq2.push_back(i);
3981 }
3982 assert_eq!(deq, []);
3983 assert_eq!(deq2, [1, 2, 3]);
3984 }
3985
3986 #[test]
3987 fn vec_drain_items_reverse() {
3988 let mut deq = sdeq![1, 2, 3];
3989 let mut deq2 = sdeq![];
3990 for i in deq.drain(..).rev() {
3991 deq2.push_back(i);
3992 }
3993 assert_eq!(deq, []);
3994 assert_eq!(deq2, [3, 2, 1]);
3995 }
3996
3997 #[test]
3998 fn vec_drain_items_zero_sized() {
3999 let mut deq = sdeq![(), (), ()];
4000 let mut deq2 = sdeq![];
4001 for i in deq.drain(..) {
4002 deq2.push_back(i);
4003 }
4004 assert_eq!(deq, []);
4005 assert_eq!(deq2, [(), (), ()]);
4006 }
4007
4008 #[test]
4009 #[should_panic]
4010 fn vec_drain_out_of_bounds() {
4011 let mut v = sdeq![1, 2, 3, 4, 5];
4012 v.drain(5..6);
4013 }
4014
4015 #[test]
4016 fn vec_drain_range() {
4017 let mut v = sdeq![1, 2, 3, 4, 5];
4018 for _ in v.drain(4..) {}
4019 assert_eq!(v, &[1, 2, 3, 4]);
4020
4021 let mut v: SliceRingBuffer<_> =
4022 (1..6).map(|x| x.to_string()).collect();
4023 for _ in v.drain(1..4) {}
4024 assert_eq!(v, &[1.to_string(), 5.to_string()]);
4025
4026 let mut v: SliceRingBuffer<_> =
4027 (1..6).map(|x| x.to_string()).collect();
4028 for _ in v.drain(1..4).rev() {}
4029 assert_eq!(v, &[1.to_string(), 5.to_string()]);
4030 }
4031
4032 #[test]
4033 fn vec_drain_range_zst() {
4034 let mut v: SliceRingBuffer<_> = sdeq![(); 5];
4035 for _ in v.drain(1..4).rev() {}
4036 assert_eq!(v, &[(), ()]);
4037 }
4038
4039 #[test]
4040 fn vec_drain_inclusive_range() {
4041 let mut v = sdeq!['a', 'b', 'c', 'd', 'e'];
4042 for _ in v.drain(1..=3) {}
4043 assert_eq!(v, &['a', 'e']);
4044
4045 let mut v: SliceRingBuffer<_> =
4046 (0..=5).map(|x| x.to_string()).collect();
4047 for _ in v.drain(1..=5) {}
4048 assert_eq!(v, &["0".to_string()]);
4049
4050 let mut v: SliceRingBuffer<String> =
4051 (0..=5).map(|x| x.to_string()).collect();
4052 for _ in v.drain(0..=5) {}
4053 assert_eq!(v, SliceRingBuffer::<String>::new());
4054
4055 let mut v: SliceRingBuffer<_> =
4056 (0..=5).map(|x| x.to_string()).collect();
4057 for _ in v.drain(0..=3) {}
4058 assert_eq!(v, &["4".to_string(), "5".to_string()]);
4059
4060 let mut v: SliceRingBuffer<_> =
4061 (0..=1).map(|x| x.to_string()).collect();
4062 for _ in v.drain(..=0) {}
4063 assert_eq!(v, &["1".to_string()]);
4064 }
4065
4066 #[test]
4067 fn vec_drain_max_vec_size() {
4068 const M: usize = isize::max_value() as usize;
4069 let mut v = SliceRingBuffer::<()>::with_capacity(M);
4070 unsafe { v.move_tail_unchecked(M as isize) };
4071 assert_eq!(v.len(), M as usize);
4072 for _ in v.drain(M - 1..) {}
4073 assert_eq!(v.len(), M - 1);
4074
4075 let mut v = SliceRingBuffer::<()>::with_capacity(M);
4076 unsafe { v.move_tail_unchecked(M as isize) };
4077 assert_eq!(v.len(), M as usize);
4078 for _ in v.drain(M - 1..=M - 1) {}
4079 assert_eq!(v.len(), M - 1);
4080 }
4081
4082 #[test]
4083 #[should_panic]
4084 fn vec_drain_inclusive_out_of_bounds() {
4085 let mut v = sdeq![1, 2, 3, 4, 5];
4086 v.drain(5..=5);
4087 }
4088
4089 #[test]
4090 fn vec_splice() {
4091 let mut v = sdeq![1, 2, 3, 4, 5];
4092 let a = [10, 11, 12];
4093 v.splice(2..4, a.iter().cloned());
4094 assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
4095 v.splice(1..3, Some(20));
4096 assert_eq!(v, &[1, 20, 11, 12, 5]);
4097 }
4098
4099 #[test]
4100 fn vec_splice_inclusive_range() {
4101 let mut v = sdeq![1, 2, 3, 4, 5];
4102 let a = [10, 11, 12];
4103 let t1: SliceRingBuffer<_> =
4104 v.splice(2..=3, a.iter().cloned()).collect();
4105 assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
4106 assert_eq!(t1, &[3, 4]);
4107 let t2: SliceRingBuffer<_> = v.splice(1..=2, Some(20)).collect();
4108 assert_eq!(v, &[1, 20, 11, 12, 5]);
4109 assert_eq!(t2, &[2, 10]);
4110 }
4111
4112 #[test]
4113 #[should_panic]
4114 fn vec_splice_out_of_bounds() {
4115 let mut v = sdeq![1, 2, 3, 4, 5];
4116 let a = [10, 11, 12];
4117 v.splice(5..6, a.iter().cloned());
4118 }
4119
4120 #[test]
4121 #[should_panic]
4122 fn vec_splice_inclusive_out_of_bounds() {
4123 let mut v = sdeq![1, 2, 3, 4, 5];
4124 let a = [10, 11, 12];
4125 v.splice(5..=5, a.iter().cloned());
4126 }
4127
4128 #[test]
4129 fn vec_splice_items_zero_sized() {
4130 let mut deq = sdeq![(), (), ()];
4131 let deq2 = sdeq![];
4132 let t: SliceRingBuffer<_> =
4133 deq.splice(1..2, deq2.iter().cloned()).collect();
4134 assert_eq!(deq, &[(), ()]);
4135 assert_eq!(t, &[()]);
4136 }
4137
4138 #[test]
4139 fn vec_splice_unbounded() {
4140 let mut deq = sdeq![1, 2, 3, 4, 5];
4141 let t: SliceRingBuffer<_> = deq.splice(.., None).collect();
4142 assert_eq!(deq, &[]);
4143 assert_eq!(t, &[1, 2, 3, 4, 5]);
4144 }
4145
4146 #[test]
4147 fn vec_splice_forget() {
4148 let mut v = sdeq![1, 2, 3, 4, 5];
4149 let a = [10, 11, 12];
4150 mem::forget(v.splice(2..4, a.iter().cloned()));
4151 assert_eq!(v, &[1, 2]);
4152 }
4153
4154 #[test]
4164 fn vec_append() {
4165 let mut deq = sdeq![1, 2, 3];
4166 let mut deq2 = sdeq![4, 5, 6];
4167 deq.append(&mut deq2);
4168 assert_eq!(deq, [1, 2, 3, 4, 5, 6]);
4169 assert_eq!(deq2, []);
4170 }
4171
4172 #[test]
4173 fn vec_split_off() {
4174 let mut deq = sdeq![1, 2, 3, 4, 5, 6];
4175 let deq2 = deq.split_off(4);
4176 assert_eq!(deq, [1, 2, 3, 4]);
4177 assert_eq!(deq2, [5, 6]);
4178 }
4179
4180 #[test]
4181 fn vec_into_iter_as_slice() {
4182 let deq = sdeq!['a', 'b', 'c'];
4183 let mut into_iter = deq.into_iter();
4184 assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
4185 let _ = into_iter.next().unwrap();
4186 assert_eq!(into_iter.as_slice(), &['b', 'c']);
4187 let _ = into_iter.next().unwrap();
4188 let _ = into_iter.next().unwrap();
4189 assert_eq!(into_iter.as_slice(), &[]);
4190 }
4191
4192 #[test]
4193 fn vec_into_iter_as_mut_slice() {
4194 let deq = sdeq!['a', 'b', 'c'];
4195 let mut into_iter = deq.into_iter();
4196 assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
4197 into_iter.as_mut_slice()[0] = 'x';
4198 into_iter.as_mut_slice()[1] = 'y';
4199 assert_eq!(into_iter.next().unwrap(), 'x');
4200 assert_eq!(into_iter.as_slice(), &['y', 'c']);
4201 }
4202
4203 #[test]
4204 fn vec_into_iter_debug() {
4205 let deq = sdeq!['a', 'b', 'c'];
4206 let into_iter = deq.into_iter();
4207 let debug = format!("{:?}", into_iter);
4208 assert_eq!(debug, "IntoIter(['a', 'b', 'c'])");
4209 }
4210
4211 #[test]
4212 fn vec_into_iter_count() {
4213 assert_eq!(sdeq![1, 2, 3].into_iter().count(), 3);
4214 }
4215
4216 #[test]
4217 fn vec_into_iter_clone() {
4218 fn iter_equal<I: Iterator<Item = i32>>(it: I, slice: &[i32]) {
4219 let v: SliceRingBuffer<i32> = it.collect();
4220 assert_eq!(&v[..], slice);
4221 }
4222 let deq = sdeq![1, 2, 3];
4223 let mut it = deq.into_iter();
4224 let it_c = it.clone();
4225 iter_equal(it_c, &[1, 2, 3]);
4226 assert_eq!(it.next(), Some(1));
4227 let mut it = it.rev();
4228 iter_equal(it.clone(), &[3, 2]);
4229 assert_eq!(it.next(), Some(3));
4230 iter_equal(it.clone(), &[2]);
4231 assert_eq!(it.next(), Some(2));
4232 iter_equal(it.clone(), &[]);
4233 assert_eq!(it.next(), None);
4234 }
4235
4236 #[test]
4273 fn from_into_inner() {
4274 let deq = sdeq![1, 2, 3];
4275 #[allow(unused_variables)]
4276 let ptr = deq.as_ptr();
4277 let deq = deq.into_iter().collect::<SliceRingBuffer<_>>();
4278 assert_eq!(deq, [1, 2, 3]);
4279 #[cfg(feature = "unstable")]
4280 {
4281 assert_eq!(deq.as_ptr(), ptr);
4282 }
4283
4284 let ptr = &deq[1] as *const _;
4285 let mut it = deq.into_iter();
4286 it.next().unwrap();
4287 let deq = it.collect::<SliceRingBuffer<_>>();
4288 assert_eq!(deq, [2, 3]);
4289 assert!(ptr != deq.as_ptr());
4290 }
4291
4292 #[cfg(feature = "unstable")]
4293 #[test]
4294 fn overaligned_allocations() {
4295 #[repr(align(256))]
4296 struct Foo(usize);
4297 let mut v = sdeq![Foo(273)];
4298 for i in 0..0x1000 {
4299 v.reserve_exact(i);
4300 assert!(v[0].0 == 273);
4301 assert!(v.as_ptr() as usize & 0xff == 0);
4302 v.shrink_to_fit();
4303 assert!(v[0].0 == 273);
4304 assert!(v.as_ptr() as usize & 0xff == 0);
4305 }
4306 }
4307
4308 #[test]
4309 fn drain_filter_empty() {
4310 let mut deq: SliceRingBuffer<i32> = sdeq![];
4311
4312 {
4313 let mut iter = deq.drain_filter(|_| true);
4314 assert_eq!(iter.size_hint(), (0, Some(0)));
4315 assert_eq!(iter.next(), None);
4316 assert_eq!(iter.size_hint(), (0, Some(0)));
4317 assert_eq!(iter.next(), None);
4318 assert_eq!(iter.size_hint(), (0, Some(0)));
4319 }
4320 assert_eq!(deq.len(), 0);
4321 assert_eq!(deq, sdeq![]);
4322 }
4323
4324 #[test]
4325 fn drain_filter_zst() {
4326 let mut deq = sdeq![(), (), (), (), ()];
4327 let initial_len = deq.len();
4328 let mut count = 0;
4329 {
4330 let mut iter = deq.drain_filter(|_| true);
4331 assert_eq!(iter.size_hint(), (0, Some(initial_len)));
4332 while let Some(_) = iter.next() {
4333 count += 1;
4334 assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
4335 }
4336 assert_eq!(iter.size_hint(), (0, Some(0)));
4337 assert_eq!(iter.next(), None);
4338 assert_eq!(iter.size_hint(), (0, Some(0)));
4339 }
4340
4341 assert_eq!(count, initial_len);
4342 assert_eq!(deq.len(), 0);
4343 assert_eq!(deq, sdeq![]);
4344 }
4345
4346 #[test]
4347 fn drain_filter_false() {
4348 let mut deq = sdeq![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
4349
4350 let initial_len = deq.len();
4351 let mut count = 0;
4352 {
4353 let mut iter = deq.drain_filter(|_| false);
4354 assert_eq!(iter.size_hint(), (0, Some(initial_len)));
4355 for _ in iter.by_ref() {
4356 count += 1;
4357 }
4358 assert_eq!(iter.size_hint(), (0, Some(0)));
4359 assert_eq!(iter.next(), None);
4360 assert_eq!(iter.size_hint(), (0, Some(0)));
4361 }
4362
4363 assert_eq!(count, 0);
4364 assert_eq!(deq.len(), initial_len);
4365 assert_eq!(deq, sdeq![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
4366 }
4367
4368 #[test]
4369 fn drain_filter_true() {
4370 let mut deq = sdeq![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
4371
4372 let initial_len = deq.len();
4373 let mut count = 0;
4374 {
4375 let mut iter = deq.drain_filter(|_| true);
4376 assert_eq!(iter.size_hint(), (0, Some(initial_len)));
4377 while let Some(_) = iter.next() {
4378 count += 1;
4379 assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
4380 }
4381 assert_eq!(iter.size_hint(), (0, Some(0)));
4382 assert_eq!(iter.next(), None);
4383 assert_eq!(iter.size_hint(), (0, Some(0)));
4384 }
4385
4386 assert_eq!(count, initial_len);
4387 assert_eq!(deq.len(), 0);
4388 assert_eq!(deq, sdeq![]);
4389 }
4390
4391 #[test]
4392 fn drain_filter_complex() {
4393 {
4394 let mut deq = sdeq![
4396 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29,
4397 31, 33, 34, 35, 36, 37, 39,
4398 ];
4399
4400 let removed = deq
4401 .drain_filter(|x| *x % 2 == 0)
4402 .collect::<SliceRingBuffer<_>>();
4403 assert_eq!(removed.len(), 10);
4404 assert_eq!(removed, sdeq![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
4405
4406 assert_eq!(deq.len(), 14);
4407 assert_eq!(
4408 deq,
4409 sdeq![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
4410 );
4411 }
4412
4413 {
4414 let mut deq = sdeq![
4416 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31,
4417 33, 34, 35, 36, 37, 39,
4418 ];
4419
4420 let removed = deq
4421 .drain_filter(|x| *x % 2 == 0)
4422 .collect::<SliceRingBuffer<_>>();
4423 assert_eq!(removed.len(), 10);
4424 assert_eq!(removed, sdeq![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
4425
4426 assert_eq!(deq.len(), 13);
4427 assert_eq!(
4428 deq,
4429 sdeq![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
4430 );
4431 }
4432
4433 {
4434 let mut deq = sdeq![
4436 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31,
4437 33, 34, 35, 36,
4438 ];
4439
4440 let removed = deq
4441 .drain_filter(|x| *x % 2 == 0)
4442 .collect::<SliceRingBuffer<_>>();
4443 assert_eq!(removed.len(), 10);
4444 assert_eq!(removed, sdeq![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
4445
4446 assert_eq!(deq.len(), 11);
4447 assert_eq!(deq, sdeq![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]);
4448 }
4449
4450 {
4451 let mut deq = sdeq![
4453 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15,
4454 17, 19,
4455 ];
4456
4457 let removed = deq
4458 .drain_filter(|x| *x % 2 == 0)
4459 .collect::<SliceRingBuffer<_>>();
4460 assert_eq!(removed.len(), 10);
4461 assert_eq!(removed, sdeq![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
4462
4463 assert_eq!(deq.len(), 10);
4464 assert_eq!(deq, sdeq![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
4465 }
4466
4467 {
4468 let mut deq = sdeq![
4470 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16,
4471 18, 20,
4472 ];
4473
4474 let removed = deq
4475 .drain_filter(|x| *x % 2 == 0)
4476 .collect::<SliceRingBuffer<_>>();
4477 assert_eq!(removed.len(), 10);
4478 assert_eq!(removed, sdeq![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
4479
4480 assert_eq!(deq.len(), 10);
4481 assert_eq!(deq, sdeq![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
4482 }
4483 }
4484
4485 #[test]
4486 fn vecdeque_simple() {
4487 let mut d = SliceRingBuffer::new();
4488 assert_eq!(d.len(), 0);
4489 d.push_front(17);
4490 d.push_front(42);
4491 d.push_back(137);
4492 assert_eq!(d.len(), 3);
4493 d.push_back(137);
4494 assert_eq!(d.len(), 4);
4495 assert_eq!(*d.front().unwrap(), 42);
4496 assert_eq!(*d.back().unwrap(), 137);
4497 let mut i = d.pop_front();
4498 assert_eq!(i, Some(42));
4499 i = d.pop_back();
4500 assert_eq!(i, Some(137));
4501 i = d.pop_back();
4502 assert_eq!(i, Some(137));
4503 i = d.pop_back();
4504 assert_eq!(i, Some(17));
4505 assert_eq!(d.len(), 0);
4506 d.push_back(3);
4507 assert_eq!(d.len(), 1);
4508 d.push_front(2);
4509 assert_eq!(d.len(), 2);
4510 d.push_back(4);
4511 assert_eq!(d.len(), 3);
4512 d.push_front(1);
4513 assert_eq!(d.len(), 4);
4514 assert_eq!(d[0], 1);
4515 assert_eq!(d[1], 2);
4516 assert_eq!(d[2], 3);
4517 assert_eq!(d[3], 4);
4518 }
4519
4520 #[cfg(test)]
4521 fn vecdeque_parameterized<T: Clone + PartialEq + fmt::Debug>(
4522 a: T, b: T, c: T, d: T,
4523 ) {
4524 let mut deq = SliceRingBuffer::new();
4525 assert_eq!(deq.len(), 0);
4526 deq.push_front(a.clone());
4527 deq.push_front(b.clone());
4528 deq.push_back(c.clone());
4529 assert_eq!(deq.len(), 3);
4530 deq.push_back(d.clone());
4531 assert_eq!(deq.len(), 4);
4532 assert_eq!((*deq.front().unwrap()).clone(), b.clone());
4533 assert_eq!((*deq.back().unwrap()).clone(), d.clone());
4534 assert_eq!(deq.pop_front().unwrap(), b.clone());
4535 assert_eq!(deq.pop_back().unwrap(), d.clone());
4536 assert_eq!(deq.pop_back().unwrap(), c.clone());
4537 assert_eq!(deq.pop_back().unwrap(), a.clone());
4538 assert_eq!(deq.len(), 0);
4539 deq.push_back(c.clone());
4540 assert_eq!(deq.len(), 1);
4541 deq.push_front(b.clone());
4542 assert_eq!(deq.len(), 2);
4543 deq.push_back(d.clone());
4544 assert_eq!(deq.len(), 3);
4545 deq.push_front(a.clone());
4546 assert_eq!(deq.len(), 4);
4547 assert_eq!(deq[0].clone(), a.clone());
4548 assert_eq!(deq[1].clone(), b.clone());
4549 assert_eq!(deq[2].clone(), c.clone());
4550 assert_eq!(deq[3].clone(), d.clone());
4551 }
4552
4553 #[test]
4554 fn vecdeque_push_front_grow() {
4555 let mut deq = SliceRingBuffer::new();
4556 for i in 0..66 {
4557 deq.push_front(i);
4558 }
4559 assert_eq!(deq.len(), 66);
4560
4561 for i in 0..66 {
4562 assert_eq!(deq[i], 65 - i);
4563 }
4564
4565 let mut deq = SliceRingBuffer::new();
4566 for i in 0..66 {
4567 deq.push_back(i);
4568 }
4569
4570 for i in 0..66 {
4571 assert_eq!(deq[i], i);
4572 }
4573 }
4574
4575 #[test]
4576 fn vecdeque_index() {
4577 let mut deq = SliceRingBuffer::new();
4578 for i in 1..4 {
4579 deq.push_front(i);
4580 }
4581 assert_eq!(deq[1], 2);
4582 }
4583
4584 #[test]
4585 #[should_panic]
4586 fn vecdeque_index_out_of_bounds() {
4587 let mut deq = SliceRingBuffer::new();
4588 for i in 1..4 {
4589 deq.push_front(i);
4590 }
4591 deq[3];
4592 }
4593
4594 #[derive(Clone, PartialEq, Debug)]
4595 enum Taggy {
4596 One(i32),
4597 Two(i32, i32),
4598 Three(i32, i32, i32),
4599 }
4600
4601 #[derive(Clone, PartialEq, Debug)]
4602 enum Taggypar<T> {
4603 Onepar(T),
4604 Twopar(T, T),
4605 Threepar(T, T, T),
4606 }
4607
4608 #[derive(Clone, PartialEq, Debug)]
4609 struct RecCy {
4610 x: i32,
4611 y: i32,
4612 t: Taggy,
4613 }
4614
4615 use self::Taggy::*;
4616 use self::Taggypar::*;
4617
4618 fn hash<T: hash::Hash>(t: &T) -> u64 {
4619 let mut s = collections::hash_map::DefaultHasher::new();
4620 use hash::Hasher;
4621 t.hash(&mut s);
4622 s.finish()
4623 }
4624
4625 #[test]
4626 fn vecdeque_param_int() {
4627 vecdeque_parameterized::<i32>(5, 72, 64, 175);
4628 }
4629
4630 #[test]
4631 fn vecdeque_param_taggy() {
4632 vecdeque_parameterized::<Taggy>(
4633 One(1),
4634 Two(1, 2),
4635 Three(1, 2, 3),
4636 Two(17, 42),
4637 );
4638 }
4639
4640 #[test]
4641 fn vecdeque_param_taggypar() {
4642 vecdeque_parameterized::<Taggypar<i32>>(
4643 Onepar::<i32>(1),
4644 Twopar::<i32>(1, 2),
4645 Threepar::<i32>(1, 2, 3),
4646 Twopar::<i32>(17, 42),
4647 );
4648 }
4649
4650 #[test]
4651 fn vecdeque_param_reccy() {
4652 let reccy1 = RecCy {
4653 x: 1,
4654 y: 2,
4655 t: One(1),
4656 };
4657 let reccy2 = RecCy {
4658 x: 345,
4659 y: 2,
4660 t: Two(1, 2),
4661 };
4662 let reccy3 = RecCy {
4663 x: 1,
4664 y: 777,
4665 t: Three(1, 2, 3),
4666 };
4667 let reccy4 = RecCy {
4668 x: 19,
4669 y: 252,
4670 t: Two(17, 42),
4671 };
4672 vecdeque_parameterized::<RecCy>(reccy1, reccy2, reccy3, reccy4);
4673 }
4674
4675 #[test]
4676 fn vecdeque_with_capacity() {
4677 let mut d = SliceRingBuffer::with_capacity(0);
4678 d.push_back(1);
4679 assert_eq!(d.len(), 1);
4680 let mut d = SliceRingBuffer::with_capacity(50);
4681 d.push_back(1);
4682 assert_eq!(d.len(), 1);
4683 }
4684
4685 #[test]
4686 fn vecdeque_with_capacity_non_power_two() {
4687 let mut d3 = SliceRingBuffer::with_capacity(3);
4688 d3.push_back(1);
4689
4690 assert_eq!(d3.pop_front(), Some(1));
4693 assert_eq!(d3.front(), None);
4695
4696 d3.push_back(3);
4698 d3.push_back(6);
4700 assert_eq!(d3.pop_front(), Some(3));
4702
4703 d3.push_back(9);
4707 d3.push_back(12);
4709
4710 d3.push_back(15);
4711 assert_eq!(d3.pop_front(), Some(6));
4719
4720 assert_eq!(d3.front(), Some(&9));
4724 }
4725
4726 #[test]
4727 fn vecdeque_reserve_exact() {
4728 let mut d = SliceRingBuffer::new();
4729 d.push_back(0);
4730 d.reserve_exact(50);
4731 assert!(d.capacity() >= 51);
4732 }
4733
4734 #[test]
4735 fn vecdeque_reserve() {
4736 let mut d = SliceRingBuffer::new();
4737 d.push_back(0);
4738 d.reserve(50);
4739 assert!(d.capacity() >= 51);
4740 }
4741
4742 #[test]
4743 fn vecdeque_swap() {
4744 let mut d: SliceRingBuffer<_> = (0..5).collect();
4745 d.pop_front();
4746 d.swap(0, 3);
4747 assert_eq!(d.iter().cloned().collect::<Vec<_>>(), [4, 2, 3, 1]);
4748 }
4749
4750 #[test]
4751 fn vecdeque_iter() {
4752 let mut d = SliceRingBuffer::new();
4753 assert_eq!(d.iter().next(), None);
4754 assert_eq!(d.iter().size_hint(), (0, Some(0)));
4755
4756 for i in 0..5 {
4757 d.push_back(i);
4758 }
4759 {
4760 let b: &[_] = &[&0, &1, &2, &3, &4];
4761 assert_eq!(d.iter().collect::<Vec<_>>(), b);
4762 }
4763
4764 for i in 6..9 {
4765 d.push_front(i);
4766 }
4767 {
4768 let b: &[_] = &[&8, &7, &6, &0, &1, &2, &3, &4];
4769 assert_eq!(d.iter().collect::<Vec<_>>(), b);
4770 }
4771
4772 let mut it = d.iter();
4773 let mut len = d.len();
4774 loop {
4775 match it.next() {
4776 None => break,
4777 _ => {
4778 len -= 1;
4779 assert_eq!(it.size_hint(), (len, Some(len)))
4780 }
4781 }
4782 }
4783 }
4784
4785 #[test]
4786 fn vecdeque_rev_iter() {
4787 let mut d = SliceRingBuffer::new();
4788 assert_eq!(d.iter().rev().next(), None);
4789
4790 for i in 0..5 {
4791 d.push_back(i);
4792 }
4793 {
4794 let b: &[_] = &[&4, &3, &2, &1, &0];
4795 assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
4796 }
4797
4798 for i in 6..9 {
4799 d.push_front(i);
4800 }
4801 let b: &[_] = &[&4, &3, &2, &1, &0, &6, &7, &8];
4802 assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
4803 }
4804
4805 #[test]
4806 fn vecdeque_mut_rev_iter_wrap() {
4807 let mut d = SliceRingBuffer::with_capacity(3);
4808 assert!(d.iter_mut().rev().next().is_none());
4809
4810 d.push_back(1);
4811 d.push_back(2);
4812 d.push_back(3);
4813 assert_eq!(d.pop_front(), Some(1));
4814 d.push_back(4);
4815
4816 assert_eq!(
4817 d.iter_mut().rev().map(|x| *x).collect::<Vec<_>>(),
4818 vec![4, 3, 2]
4819 );
4820 }
4821
4822 #[test]
4823 fn vecdeque_mut_iter() {
4824 let mut d = SliceRingBuffer::new();
4825 assert!(d.iter_mut().next().is_none());
4826
4827 for i in 0..3 {
4828 d.push_front(i);
4829 }
4830
4831 for (i, elt) in d.iter_mut().enumerate() {
4832 assert_eq!(*elt, 2 - i);
4833 *elt = i;
4834 }
4835
4836 {
4837 let mut it = d.iter_mut();
4838 assert_eq!(*it.next().unwrap(), 0);
4839 assert_eq!(*it.next().unwrap(), 1);
4840 assert_eq!(*it.next().unwrap(), 2);
4841 assert!(it.next().is_none());
4842 }
4843 }
4844
4845 #[test]
4846 fn vecdeque_mut_rev_iter() {
4847 let mut d = SliceRingBuffer::new();
4848 assert!(d.iter_mut().rev().next().is_none());
4849
4850 for i in 0..3 {
4851 d.push_front(i);
4852 }
4853
4854 for (i, elt) in d.iter_mut().rev().enumerate() {
4855 assert_eq!(*elt, i);
4856 *elt = i;
4857 }
4858
4859 {
4860 let mut it = d.iter_mut().rev();
4861 assert_eq!(*it.next().unwrap(), 0);
4862 assert_eq!(*it.next().unwrap(), 1);
4863 assert_eq!(*it.next().unwrap(), 2);
4864 assert!(it.next().is_none());
4865 }
4866 }
4867
4868 #[test]
4869 fn vecdeque_into_iter() {
4870 {
4872 let d: SliceRingBuffer<i32> = SliceRingBuffer::new();
4873 let mut iter = d.into_iter();
4874
4875 assert_eq!(iter.size_hint(), (0, Some(0)));
4876 assert_eq!(iter.next(), None);
4877 assert_eq!(iter.size_hint(), (0, Some(0)));
4878 }
4879
4880 {
4882 let mut d = SliceRingBuffer::new();
4883 for i in 0..5 {
4884 d.push_back(i);
4885 }
4886
4887 let b = vec![0, 1, 2, 3, 4];
4888 assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
4889 }
4890
4891 {
4893 let mut d = SliceRingBuffer::new();
4894 for i in 0..5 {
4895 d.push_back(i);
4896 }
4897 for i in 6..9 {
4898 d.push_front(i);
4899 }
4900
4901 let b = vec![8, 7, 6, 0, 1, 2, 3, 4];
4902 assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
4903 }
4904
4905 {
4907 let mut d = SliceRingBuffer::new();
4908 for i in 0..5 {
4909 d.push_back(i);
4910 }
4911 for i in 6..9 {
4912 d.push_front(i);
4913 }
4914
4915 let mut it = d.into_iter();
4916 assert_eq!(it.size_hint(), (8, Some(8)));
4917 assert_eq!(it.next(), Some(8));
4918 assert_eq!(it.size_hint(), (7, Some(7)));
4919 assert_eq!(it.next_back(), Some(4));
4920 assert_eq!(it.size_hint(), (6, Some(6)));
4921 assert_eq!(it.next(), Some(7));
4922 assert_eq!(it.size_hint(), (5, Some(5)));
4923 }
4924 }
4925
4926 #[test]
4927 fn vecdeque_drain() {
4928 {
4930 let mut d: SliceRingBuffer<i32> = SliceRingBuffer::new();
4931
4932 {
4933 let mut iter = d.drain(..);
4934
4935 assert_eq!(iter.size_hint(), (0, Some(0)));
4936 assert_eq!(iter.next(), None);
4937 assert_eq!(iter.size_hint(), (0, Some(0)));
4938 }
4939
4940 assert!(d.is_empty());
4941 }
4942
4943 {
4945 let mut d = SliceRingBuffer::new();
4946 for i in 0..5 {
4947 d.push_back(i);
4948 }
4949
4950 assert_eq!(d.drain(..).collect::<Vec<_>>(), [0, 1, 2, 3, 4]);
4951 assert!(d.is_empty());
4952 }
4953
4954 {
4956 let mut d = SliceRingBuffer::new();
4957 for i in 0..5 {
4958 d.push_back(i);
4959 }
4960 for i in 6..9 {
4961 d.push_front(i);
4962 }
4963
4964 assert_eq!(
4965 d.drain(..).collect::<Vec<_>>(),
4966 [8, 7, 6, 0, 1, 2, 3, 4]
4967 );
4968 assert!(d.is_empty());
4969 }
4970
4971 {
4973 let mut d: SliceRingBuffer<_> = SliceRingBuffer::new();
4974 for i in 0..5 {
4975 d.push_back(i);
4976 }
4977 for i in 6..9 {
4978 d.push_front(i);
4979 }
4980
4981 {
4982 let mut it = d.drain(..);
4983 assert_eq!(it.size_hint(), (8, Some(8)));
4984 assert_eq!(it.next(), Some(8));
4985 assert_eq!(it.size_hint(), (7, Some(7)));
4986 assert_eq!(it.next_back(), Some(4));
4987 assert_eq!(it.size_hint(), (6, Some(6)));
4988 assert_eq!(it.next(), Some(7));
4989 assert_eq!(it.size_hint(), (5, Some(5)));
4990 }
4991 assert!(d.is_empty());
4992 }
4993 }
4994
4995 #[cfg(feature = "unstable")]
4996 #[test]
4997 fn vecdeque_from_iter() {
4998 let v = vec![1, 2, 3, 4, 5, 6, 7];
4999 let deq: SliceRingBuffer<_> = v.iter().cloned().collect();
5000 let u: Vec<_> = deq.iter().cloned().collect();
5001 assert_eq!(u, v);
5002
5003 let seq = (0..).step_by(2).take(256);
5004 let deq: SliceRingBuffer<_> = seq.collect();
5005 for (i, &x) in deq.iter().enumerate() {
5006 assert_eq!(2 * i, x);
5007 }
5008 assert_eq!(deq.len(), 256);
5009 }
5010
5011 #[test]
5012 fn vecdeque_clone() {
5013 let mut d = SliceRingBuffer::new();
5014 d.push_front(17);
5015 d.push_front(42);
5016 d.push_back(137);
5017 d.push_back(137);
5018 assert_eq!(d.len(), 4);
5019 let mut e = d.clone();
5020 assert_eq!(e.len(), 4);
5021 while !d.is_empty() {
5022 assert_eq!(d.pop_back(), e.pop_back());
5023 }
5024 assert_eq!(d.len(), 0);
5025 assert_eq!(e.len(), 0);
5026 }
5027
5028 #[test]
5029 fn vecdeque_eq() {
5030 let mut d = SliceRingBuffer::new();
5031 assert!(d == SliceRingBuffer::with_capacity(0));
5032 d.push_front(137);
5033 d.push_front(17);
5034 d.push_front(42);
5035 d.push_back(137);
5036 let mut e = SliceRingBuffer::with_capacity(0);
5037 e.push_back(42);
5038 e.push_back(17);
5039 e.push_back(137);
5040 e.push_back(137);
5041 assert!(&e == &d);
5042 e.pop_back();
5043 e.push_back(0);
5044 assert!(e != d);
5045 e.clear();
5046 assert!(e == SliceRingBuffer::new());
5047 }
5048
5049 #[test]
5050 fn vecdeque_partial_eq_array() {
5051 let d = SliceRingBuffer::<char>::new();
5052 assert!(d == []);
5053
5054 let mut d = SliceRingBuffer::new();
5055 d.push_front('a');
5056 assert!(d == ['a']);
5057
5058 let mut d = SliceRingBuffer::new();
5059 d.push_back('a');
5060 assert!(d == ['a']);
5061
5062 let mut d = SliceRingBuffer::new();
5063 d.push_back('a');
5064 d.push_back('b');
5065 assert!(d == ['a', 'b']);
5066 }
5067
5068 #[test]
5069 fn vecdeque_hash() {
5070 let mut x = SliceRingBuffer::new();
5071 let mut y = SliceRingBuffer::new();
5072
5073 x.push_back(1);
5074 x.push_back(2);
5075 x.push_back(3);
5076
5077 y.push_back(0);
5078 y.push_back(1);
5079 y.pop_front();
5080 y.push_back(2);
5081 y.push_back(3);
5082
5083 assert!(hash(&x) == hash(&y));
5084 }
5085
5086 #[test]
5087 fn vecdeque_hash_after_rotation() {
5088 let len = 28;
5091 let mut ring: SliceRingBuffer<i32> = (0..len as i32).collect();
5092 let orig = ring.clone();
5093 for _ in 0..ring.capacity() {
5094 ring.pop_front();
5096 for elt in &mut ring {
5097 *elt -= 1;
5098 }
5099 ring.push_back(len - 1);
5100 assert_eq!(hash(&orig), hash(&ring));
5101 assert_eq!(orig, ring);
5102 assert_eq!(ring, orig);
5103 }
5104 }
5105
5106 #[test]
5107 fn vecdeque_eq_after_rotation() {
5108 let len = 28;
5111 let mut ring: SliceRingBuffer<i32> = (0..len as i32).collect();
5112 let mut shifted = ring.clone();
5113 for _ in 0..10 {
5114 ring.pop_front();
5116 for elt in &mut ring {
5117 *elt -= 1;
5118 }
5119 ring.push_back(len - 1);
5120 }
5121
5122 for _ in 0..shifted.capacity() {
5124 shifted.pop_front();
5125 for elt in &mut shifted {
5126 *elt -= 1;
5127 }
5128 shifted.push_back(len - 1);
5129 assert_eq!(shifted, ring);
5130 assert_eq!(ring, shifted);
5131 }
5132 }
5133
5134 #[test]
5135 fn vecdeque_ord() {
5136 let x = SliceRingBuffer::new();
5137 let mut y = SliceRingBuffer::new();
5138 y.push_back(1);
5139 y.push_back(2);
5140 y.push_back(3);
5141 assert!(x < y);
5142 assert!(y > x);
5143 assert!(x <= x);
5144 assert!(x >= x);
5145 }
5146
5147 #[test]
5148 fn vecdeque_show() {
5149 let ringbuf: SliceRingBuffer<_> = (0..10).collect();
5150 assert_eq!(format!("{:?}", ringbuf), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
5151
5152 let ringbuf: SliceRingBuffer<_> = vec!["just", "one", "test", "more"]
5153 .iter()
5154 .cloned()
5155 .collect();
5156 assert_eq!(
5157 format!("{:?}", ringbuf),
5158 "[\"just\", \"one\", \"test\", \"more\"]"
5159 );
5160 }
5161
5162 #[test]
5163 fn vecdeque_drop_zst() {
5164 static mut DROPS: i32 = 0;
5165 struct Elem;
5166 impl Drop for Elem {
5167 fn drop(&mut self) {
5168 unsafe {
5169 DROPS += 1;
5170 }
5171 }
5172 }
5173
5174 let mut ring = SliceRingBuffer::new();
5175 ring.push_back(Elem);
5176 ring.push_front(Elem);
5177 ring.push_back(Elem);
5178 ring.push_front(Elem);
5179 mem::drop(ring);
5180
5181 assert_eq!(unsafe { DROPS }, 4);
5182 }
5183
5184 #[test]
5185 fn vecdeque_drop() {
5186 static mut DROPS: i32 = 0;
5187 #[derive(Clone)]
5188 struct Elem(i32);
5189 impl Drop for Elem {
5190 fn drop(&mut self) {
5191 unsafe {
5192 DROPS += 1;
5193 }
5194 }
5195 }
5196
5197 let mut ring = SliceRingBuffer::new();
5198 ring.push_back(Elem(0));
5199 ring.push_front(Elem(1));
5200 ring.push_back(Elem(2));
5201 ring.push_front(Elem(3));
5202 mem::drop(ring);
5203
5204 assert_eq!(unsafe { DROPS }, 4);
5205 }
5206
5207 #[test]
5208 fn vecdeque_drop_with_pop_zst() {
5209 static mut DROPS: i32 = 0;
5210 struct Elem;
5211 impl Drop for Elem {
5212 fn drop(&mut self) {
5213 unsafe {
5214 DROPS += 1;
5215 }
5216 }
5217 }
5218
5219 let mut ring = SliceRingBuffer::new();
5220 ring.push_back(Elem);
5221 ring.push_front(Elem);
5222 ring.push_back(Elem);
5223 ring.push_front(Elem);
5224
5225 mem::drop(ring.pop_back());
5226 mem::drop(ring.pop_front());
5227 assert_eq!(unsafe { DROPS }, 2);
5228
5229 mem::drop(ring);
5230 assert_eq!(unsafe { DROPS }, 4);
5231 }
5232
5233 #[test]
5234 fn vecdeque_drop_with_pop() {
5235 static mut DROPS: i32 = 0;
5236 struct Elem(i32);
5237 impl Drop for Elem {
5238 fn drop(&mut self) {
5239 unsafe {
5240 DROPS += 1;
5241 }
5242 }
5243 }
5244
5245 let mut ring = SliceRingBuffer::new();
5246 ring.push_back(Elem(0));
5247 ring.push_front(Elem(0));
5248 ring.push_back(Elem(0));
5249 ring.push_front(Elem(0));
5250
5251 mem::drop(ring.pop_back());
5252 mem::drop(ring.pop_front());
5253 assert_eq!(unsafe { DROPS }, 2);
5254
5255 mem::drop(ring);
5256 assert_eq!(unsafe { DROPS }, 4);
5257 }
5258
5259 #[test]
5260 fn vecdeque_drop_clear_zst() {
5261 static mut DROPS: i32 = 0;
5262 struct Elem;
5263 impl Drop for Elem {
5264 fn drop(&mut self) {
5265 unsafe {
5266 DROPS += 1;
5267 }
5268 }
5269 }
5270
5271 let mut ring = SliceRingBuffer::new();
5272 ring.push_back(Elem);
5273 ring.push_front(Elem);
5274 ring.push_back(Elem);
5275 ring.push_front(Elem);
5276 ring.clear();
5277 assert_eq!(unsafe { DROPS }, 4);
5278
5279 mem::drop(ring);
5280 assert_eq!(unsafe { DROPS }, 4);
5281 }
5282
5283 #[test]
5284 fn vecdeque_drop_clear() {
5285 static mut DROPS: i32 = 0;
5286 struct Elem(i32);
5287 impl Drop for Elem {
5288 fn drop(&mut self) {
5289 unsafe {
5290 DROPS += 1;
5291 }
5292 }
5293 }
5294
5295 let mut ring = SliceRingBuffer::new();
5296 ring.push_back(Elem(0));
5297 ring.push_front(Elem(0));
5298 ring.push_back(Elem(0));
5299 ring.push_front(Elem(0));
5300 ring.clear();
5301 assert_eq!(unsafe { DROPS }, 4);
5302
5303 mem::drop(ring);
5304 assert_eq!(unsafe { DROPS }, 4);
5305 }
5306
5307 #[test]
5308 fn vecdeque_reserve_grow() {
5309 let mut ring = SliceRingBuffer::with_capacity(4);
5312 for i in 0..3 {
5313 ring.push_back(i);
5314 }
5315 ring.reserve(7);
5316 for i in 0..3 {
5317 assert_eq!(ring.pop_front(), Some(i));
5318 }
5319
5320 let mut ring = SliceRingBuffer::with_capacity(4);
5323 for i in 0..1 {
5324 ring.push_back(i);
5325 assert_eq!(ring.pop_front(), Some(i));
5326 }
5327 for i in 0..3 {
5328 ring.push_back(i);
5329 }
5330 ring.reserve(7);
5331 for i in 0..3 {
5332 assert_eq!(ring.pop_front(), Some(i));
5333 }
5334
5335 let mut ring = SliceRingBuffer::with_capacity(4);
5338 for i in 0..3 {
5339 ring.push_back(i);
5340 assert_eq!(ring.pop_front(), Some(i));
5341 }
5342 for i in 0..3 {
5343 ring.push_back(i);
5344 }
5345 ring.reserve(7);
5346 for i in 0..3 {
5347 assert_eq!(ring.pop_front(), Some(i));
5348 }
5349 }
5350
5351 #[test]
5352 fn vecdeque_get() {
5353 let mut ring = SliceRingBuffer::new();
5354 ring.push_back(0);
5355 assert_eq!(ring.get(0), Some(&0));
5356 assert_eq!(ring.get(1), None);
5357
5358 ring.push_back(1);
5359 assert_eq!(ring.get(0), Some(&0));
5360 assert_eq!(ring.get(1), Some(&1));
5361 assert_eq!(ring.get(2), None);
5362
5363 ring.push_back(2);
5364 assert_eq!(ring.get(0), Some(&0));
5365 assert_eq!(ring.get(1), Some(&1));
5366 assert_eq!(ring.get(2), Some(&2));
5367 assert_eq!(ring.get(3), None);
5368
5369 assert_eq!(ring.pop_front(), Some(0));
5370 assert_eq!(ring.get(0), Some(&1));
5371 assert_eq!(ring.get(1), Some(&2));
5372 assert_eq!(ring.get(2), None);
5373
5374 assert_eq!(ring.pop_front(), Some(1));
5375 assert_eq!(ring.get(0), Some(&2));
5376 assert_eq!(ring.get(1), None);
5377
5378 assert_eq!(ring.pop_front(), Some(2));
5379 assert_eq!(ring.get(0), None);
5380 assert_eq!(ring.get(1), None);
5381 }
5382
5383 #[test]
5384 fn vecdeque_get_mut() {
5385 let mut ring = SliceRingBuffer::new();
5386 for i in 0..3 {
5387 ring.push_back(i);
5388 }
5389
5390 match ring.get_mut(1) {
5391 Some(x) => *x = -1,
5392 None => (),
5393 };
5394
5395 assert_eq!(ring.get_mut(0), Some(&mut 0));
5396 assert_eq!(ring.get_mut(1), Some(&mut -1));
5397 assert_eq!(ring.get_mut(2), Some(&mut 2));
5398 assert_eq!(ring.get_mut(3), None);
5399
5400 assert_eq!(ring.pop_front(), Some(0));
5401 assert_eq!(ring.get_mut(0), Some(&mut -1));
5402 assert_eq!(ring.get_mut(1), Some(&mut 2));
5403 assert_eq!(ring.get_mut(2), None);
5404 }
5405
5406 #[test]
5407 fn vecdeque_front() {
5408 let mut ring = SliceRingBuffer::new();
5409 ring.push_back(10);
5410 ring.push_back(20);
5411 assert_eq!(ring.front(), Some(&10));
5412 ring.pop_front();
5413 assert_eq!(ring.front(), Some(&20));
5414 ring.pop_front();
5415 assert_eq!(ring.front(), None);
5416 }
5417
5418 #[test]
5419 fn vecdeque_as_slices() {
5420 let mut ring: SliceRingBuffer<i32> =
5421 SliceRingBuffer::with_capacity(127);
5422 let cap = ring.capacity() as i32;
5423 let first = cap / 2;
5424 let last = cap - first;
5425 for i in 0..first {
5426 ring.push_back(i);
5427
5428 let (left, right) = ring.as_slices();
5429 let expected: Vec<_> = (0..i + 1).collect();
5430 assert_eq!(left, &expected[..]);
5431 assert_eq!(right, []);
5432 }
5433
5434 for j in -last..0 {
5435 ring.push_front(j);
5436 let (left, right) = ring.as_slices();
5437 let mut expected_left: Vec<_> = (-last..j + 1).rev().collect();
5438 expected_left.extend(0..first);
5439 assert_eq!(left, &expected_left[..]);
5440 assert_eq!(right, []);
5441 }
5442
5443 assert_eq!(ring.len() as i32, cap);
5444 assert_eq!(ring.capacity() as i32, cap);
5445 }
5446
5447 #[test]
5448 fn vecdeque_as_mut_slices() {
5449 let mut ring: SliceRingBuffer<i32> =
5450 SliceRingBuffer::with_capacity(127);
5451 let cap = ring.capacity() as i32;
5452 let first = cap / 2;
5453 let last = cap - first;
5454 for i in 0..first {
5455 ring.push_back(i);
5456
5457 let (left, right) = ring.as_mut_slices();
5458 let expected: Vec<_> = (0..i + 1).collect();
5459 assert_eq!(left, &expected[..]);
5460 assert_eq!(right, []);
5461 }
5462
5463 for j in -last..0 {
5464 ring.push_front(j);
5465 let (left, right) = ring.as_mut_slices();
5466 let mut expected_left: Vec<_> = (-last..j + 1).rev().collect();
5467 expected_left.extend(0..first);
5468 assert_eq!(left, &expected_left[..]);
5469 assert_eq!(right, []);
5470 }
5471
5472 assert_eq!(ring.len() as i32, cap);
5473 assert_eq!(ring.capacity() as i32, cap);
5474 }
5475
5476 #[test]
5477 fn vecdeque_append() {
5478 let mut a: SliceRingBuffer<_> = vec![1, 2, 3].into_iter().collect();
5479 let mut b: SliceRingBuffer<_> = vec![4, 5, 6].into_iter().collect();
5480
5481 a.append(&mut b);
5483 assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
5484 assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
5485
5486 a.append(&mut b);
5488 assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
5489 assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
5490
5491 b.append(&mut a);
5493 assert_eq!(b.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
5494 assert_eq!(a.iter().cloned().collect::<Vec<_>>(), []);
5495 }
5496
5497 #[test]
5498 fn vecdeque_retain() {
5499 let mut buf = SliceRingBuffer::new();
5500 buf.extend(1..5);
5501 buf.retain(|&x| x % 2 == 0);
5502 let v: Vec<_> = buf.into_iter().collect();
5503 assert_eq!(&v[..], &[2, 4]);
5504 }
5505
5506 #[test]
5507 fn vecdeque_extend_ref() {
5508 let mut v = SliceRingBuffer::new();
5509 v.push_back(1);
5510 v.extend(&[2, 3, 4]);
5511
5512 assert_eq!(v.len(), 4);
5513 assert_eq!(v[0], 1);
5514 assert_eq!(v[1], 2);
5515 assert_eq!(v[2], 3);
5516 assert_eq!(v[3], 4);
5517
5518 let mut w = SliceRingBuffer::new();
5519 w.push_back(5);
5520 w.push_back(6);
5521 v.extend(&w);
5522
5523 assert_eq!(v.len(), 6);
5524 assert_eq!(v[0], 1);
5525 assert_eq!(v[1], 2);
5526 assert_eq!(v[2], 3);
5527 assert_eq!(v[3], 4);
5528 assert_eq!(v[4], 5);
5529 assert_eq!(v[5], 6);
5530 }
5531
5532 #[test]
5533 fn vecdeque_contains() {
5534 let mut v = SliceRingBuffer::new();
5535 v.extend(&[2, 3, 4]);
5536
5537 assert!(v.contains(&3));
5538 assert!(!v.contains(&1));
5539
5540 v.clear();
5541
5542 assert!(!v.contains(&3));
5543 }
5544
5545 #[cfg(feature = "unstable")]
5555 #[test]
5556 fn vecdeque_is_empty() {
5557 let mut v = SliceRingBuffer::<i32>::new();
5558 assert!(v.is_empty());
5559 assert!(v.iter().is_empty());
5560 assert!(v.iter_mut().is_empty());
5561 v.extend(&[2, 3, 4]);
5562 assert!(!v.is_empty());
5563 assert!(!v.iter().is_empty());
5564 assert!(!v.iter_mut().is_empty());
5565 while let Some(_) = v.pop_front() {
5566 assert_eq!(v.is_empty(), v.len() == 0);
5567 assert_eq!(v.iter().is_empty(), v.iter().len() == 0);
5568 assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0);
5569 }
5570 assert!(v.is_empty());
5571 assert!(v.iter().is_empty());
5572 assert!(v.iter_mut().is_empty());
5573 assert!(v.into_iter().is_empty());
5574 }
5575
5576 #[cfg(all(feature = "bytes_buf", feature = "use_std"))]
5577 #[test]
5578 fn bytes_bufmut() {
5579 use bytes::{BigEndian, BufMut};
5580 use std::io::Write;
5581
5582 {
5583 let mut buf = sdeq![];
5584
5585 buf.put("hello world");
5586
5587 assert_eq!(buf, b"hello world");
5588 }
5589 {
5590 let mut buf = SliceRingBuffer::with_capacity(16);
5591
5592 unsafe {
5593 buf.bytes_mut()[0] = b'h';
5594 buf.bytes_mut()[1] = b'e';
5595
5596 buf.advance_mut(2);
5597
5598 buf.bytes_mut()[0] = b'l';
5599 buf.bytes_mut()[1..3].copy_from_slice(b"lo");
5600
5601 buf.advance_mut(3);
5602 }
5603
5604 assert_eq!(5, buf.len());
5605 assert_eq!(buf, b"hello");
5606 }
5607 {
5608 let mut buf = SliceRingBuffer::with_capacity(16);
5609
5610 unsafe {
5611 buf.bytes_mut()[0] = b'h';
5612 buf.bytes_mut()[1] = b'e';
5613
5614 buf.advance_mut(2);
5615
5616 buf.bytes_mut()[0] = b'l';
5617 buf.bytes_mut()[1..3].copy_from_slice(b"lo");
5618
5619 buf.advance_mut(3);
5620 }
5621
5622 assert_eq!(5, buf.len());
5623 assert_eq!(buf, b"hello");
5624 }
5625 {
5626 let mut buf = sdeq![];
5627
5628 buf.put(b'h');
5629 buf.put(&b"ello"[..]);
5630 buf.put(" world");
5631
5632 assert_eq!(buf, b"hello world");
5633 }
5634 {
5635 let mut buf = sdeq![];
5636 buf.put_u8(0x01);
5637 assert_eq!(buf, b"\x01");
5638 }
5639 {
5640 let mut buf = sdeq![];
5641 buf.put_i8(0x01);
5642 assert_eq!(buf, b"\x01");
5643 }
5644 {
5645 let mut buf = sdeq![];
5646 buf.put_u16::<BigEndian>(0x0809);
5647 assert_eq!(buf, b"\x08\x09");
5648 }
5649 {
5650 let mut buf = sdeq![];
5651
5652 {
5653 let reference = buf.by_ref();
5654
5655 let mut writer = reference.writer();
5657
5658 Write::write(&mut writer, &b"hello world"[..]).unwrap();
5660 } assert_eq!(buf, &b"hello world"[..]);
5663 }
5664 {
5665 let mut buf = sdeq![].writer();
5666
5667 let num = buf.write(&b"hello world"[..]).unwrap();
5668 assert_eq!(11, num);
5669
5670 let buf = buf.into_inner();
5671
5672 assert_eq!(*buf, b"hello world"[..]);
5673 }
5674 }
5675
5676 #[cfg(all(feature = "bytes_buf", feature = "use_std"))]
5677 #[test]
5678 fn bytes_buf() {
5679 {
5680 use bytes::{Buf, Bytes, IntoBuf};
5681
5682 let buf = Bytes::from(&b"hello world"[..]).into_buf();
5683 let vec: SliceRingBuffer<u8> = buf.collect();
5684
5685 assert_eq!(vec, &b"hello world"[..]);
5686 }
5687 {
5688 use bytes::{Buf, BufMut};
5689 use std::io::Cursor;
5690
5691 let mut buf = Cursor::new("hello world").take(5);
5692 let mut dst = sdeq![];
5693
5694 dst.put(&mut buf);
5695 assert_eq!(dst, b"hello");
5696
5697 let mut buf = buf.into_inner();
5698 dst.clear();
5699 dst.put(&mut buf);
5700 assert_eq!(dst, b" world");
5701 }
5702 {
5703 use bytes::{Buf, BufMut};
5704 use std::io::Cursor;
5705
5706 let mut buf = Cursor::new("hello world");
5707 let mut dst = sdeq![];
5708
5709 {
5710 let reference = buf.by_ref();
5711 dst.put(&mut reference.take(5));
5712 assert_eq!(dst, b"hello");
5713 } dst.clear();
5716 dst.put(&mut buf);
5717 assert_eq!(dst, b" world");
5718 }
5719 }
5720
5721 #[test]
5722 fn issue_42() {
5723 let page_size = crate::mirrored::allocation_granularity();
5725 let mut deque = SliceRingBuffer::<u8>::with_capacity(page_size);
5726 let page_size = page_size as isize;
5727
5728 let slice = unsafe {
5729 deque.move_tail(page_size);
5730 deque.move_head(page_size / 100 * 99);
5731 deque.move_tail(page_size / 100 * 99);
5732 deque.move_head(page_size / 100 * 99);
5733 deque.tail_head_slice()
5734 };
5735
5736 for i in 0..slice.len() {
5737 slice[i] = mem::MaybeUninit::new(0);
5739 }
5740 }
5741
5742 #[test]
5743 fn issue_45() {
5744 fn refill(buf: &mut SliceRingBuffer<u8>) {
5746 let data = [0u8; MAX_SAMPLES_PER_FRAME * 5];
5747 buf.extend(data.iter());
5748 }
5749
5750 const MAX_SAMPLES_PER_FRAME: usize = 1152 * 2;
5751 const BUFFER_SIZE: usize = MAX_SAMPLES_PER_FRAME * 15;
5752 const REFILL_TRIGGER: usize = MAX_SAMPLES_PER_FRAME * 8;
5753
5754 let mut buf = SliceRingBuffer::with_capacity(BUFFER_SIZE);
5755 for _ in 0..10_000 {
5756 if buf.len() < REFILL_TRIGGER {
5757 refill(&mut buf);
5758 }
5759
5760 let cur_len = buf.len();
5761 buf.truncate_front(cur_len - 10);
5762 }
5763 }
5764
5765 #[test]
5766 fn issue_47() {
5767 let page_size = crate::mirrored::allocation_granularity();
5768 let mut sdq = SliceRingBuffer::<u8>::new();
5769 let vec = vec![0_u8; page_size + 1];
5770 sdq.extend(vec);
5771 }
5772
5773 #[test]
5774 fn issue_50() {
5775 use std::fs::File;
5776 use std::io::Write;
5777 use std::path::Path;
5778
5779 let out_buffer = SliceRingBuffer::new();
5780
5781 let p = if cfg!(target_os = "windows") {
5782 "slice_deque_test"
5783 } else {
5784 "/tmp/slice_deque_test"
5785 };
5786
5787 let mut out_file = File::create(Path::new(p)).unwrap();
5788 let res = out_file.write(&out_buffer[..]);
5789 println!("Result was {:?}", res);
5790 println!("Buffer size: {}", out_buffer.len());
5791 println!("Address of buffer was: {:?}", out_buffer.as_ptr());
5792 }
5793
5794 #[test]
5795 fn empty_ptr() {
5796 {
5797 let sdeq = SliceRingBuffer::<i8>::new();
5798 let v = Vec::<i8>::new();
5799 assert_eq!(sdeq.as_ptr() as usize, mem::align_of::<i8>());
5800 assert_eq!(v.as_ptr() as usize, mem::align_of::<i8>());
5801 }
5802 {
5803 let sdeq = SliceRingBuffer::<i16>::new();
5804 let v = Vec::<i16>::new();
5805 assert_eq!(sdeq.as_ptr() as usize, mem::align_of::<i16>());
5806 assert_eq!(v.as_ptr() as usize, mem::align_of::<i16>());
5807 }
5808 {
5809 let sdeq = SliceRingBuffer::<i32>::new();
5810 let v = Vec::<i32>::new();
5811 assert_eq!(sdeq.as_ptr() as usize, mem::align_of::<i32>());
5812 assert_eq!(v.as_ptr() as usize, mem::align_of::<i32>());
5813 }
5814 {
5815 let sdeq = SliceRingBuffer::<i64>::new();
5816 let v = Vec::<i64>::new();
5817 assert_eq!(sdeq.as_ptr() as usize, mem::align_of::<i64>());
5818 assert_eq!(v.as_ptr() as usize, mem::align_of::<i64>());
5819 }
5820 {
5821 #[repr(align(32))]
5822 struct Foo(i8);
5823 let sdeq = SliceRingBuffer::<Foo>::new();
5824 let v = Vec::<Foo>::new();
5825 assert_eq!(sdeq.as_ptr() as usize, mem::align_of::<Foo>());
5826 assert_eq!(v.as_ptr() as usize, mem::align_of::<Foo>());
5827 }
5828 }
5829
5830 #[test]
5831 fn issue_57() {
5832 const C: [i16; 3] = [42; 3];
5833
5834 let mut deque = SliceRingBuffer::new();
5835
5836 for _ in 0..918 {
5837 deque.push_front(C);
5838 }
5839
5840 for _ in 0..237 {
5841 assert_eq!(deque.pop_front(), Some(C));
5842 assert!(!deque.is_empty());
5843 assert_eq!(*deque.back().unwrap(), C);
5844 }
5845 }
5846
5847 #[test]
5848 fn drop_count() {
5849 #[repr(C)]
5850 struct Foo(*mut usize);
5851 impl Drop for Foo {
5852 fn drop(&mut self) {
5853 unsafe {
5854 *self.0 += 1;
5855 }
5856 }
5857 }
5858
5859 let mut count = 0_usize;
5860 {
5861 let mut deque = SliceRingBuffer::new();
5862 for _ in 0..10 {
5863 deque.push_back(Foo(&mut count as *mut _));
5864 deque.pop_front();
5865 }
5866 }
5867 assert_eq!(count, 10);
5868 }
5869
5870 #[test]
5871 fn non_fitting_elem_type() {
5872 #[repr(C)]
5876 struct NonFitting(u8, u8, u8);
5877 impl NonFitting {
5878 fn new() -> Self {
5879 Self(1, 2, 3)
5880 }
5881 fn valid(&self) -> bool {
5882 if self.0 == 1 && self.1 == 2 && self.2 == 3 {
5884 true
5885 } else {
5886 dbg!((self.0, self.1, self.2));
5887 false
5888 }
5889 }
5890 }
5891
5892 impl Drop for NonFitting {
5893 fn drop(&mut self) {
5894 unsafe {
5896 let ptr = self as *mut Self as *mut u8;
5897 ptr.write_volatile(4);
5898 ptr.add(1).write_volatile(5);
5899 ptr.add(2).write_volatile(6);
5900 }
5901 }
5902 }
5903
5904 let page_size = crate::mirrored::allocation_granularity();
5905 let elem_size = mem::size_of::<NonFitting>();
5906
5907 assert!(elem_size % page_size != 0);
5908 let no_elements_that_fit = page_size / elem_size;
5909
5910 let mut deque = SliceRingBuffer::new();
5916 deque.push_back(NonFitting::new());
5917
5918 for i in 0..no_elements_that_fit {
5919 if i > no_elements_that_fit - 2 {
5920 }
5922 deque.push_back(NonFitting::new());
5924 deque.truncate_front(1);
5926 assert_eq!(deque.len(), 1);
5927 assert!(deque[0].valid());
5928 }
5929 }
5930
5931 #[test]
5932 fn issue_57_2() {
5933 let mut deque = SliceRingBuffer::new();
5934 for _ in 0..30_000 {
5935 deque.push_back(String::from("test"));
5936 if deque.len() == 8 {
5937 deque.pop_front();
5938 }
5939 }
5940 }
5941
5942 #[test]
5943 fn zst() {
5944 struct A;
5945 let mut s = SliceRingBuffer::<A>::new();
5946 assert_eq!(s.len(), 0);
5947 assert_eq!(s.capacity(), isize::max_value() as usize);
5948
5949 for _ in 0..10 {
5950 s.push_back(A);
5951 }
5952 assert_eq!(s.len(), 10);
5953 }
5954
5955 #[test]
5956 fn sync() {
5957 fn assert_sync<T: Sync>(_: T) {}
5958
5959 struct S(*mut u8);
5960 unsafe impl Sync for S {}
5961 let x = SliceRingBuffer::<S>::new();
5962 assert_sync(x);
5963 }
5964
5965 #[test]
5966 fn send() {
5967 fn assert_send<T: Send>(_: T) {}
5968
5969 struct S(*mut u8);
5970 unsafe impl Send for S {}
5971 let x = SliceRingBuffer::<S>::new();
5972 assert_send(x);
5973 }
5974}