1use core::fmt::{self, Debug, Formatter};
7use core::hash::{Hash, Hasher};
8use core::iter::FusedIterator;
9use core::marker::PhantomData;
10use core::ops::{Index, IndexMut, Range};
11
12use crate::storage::{
13 buffer_too_large_for_index_type, mut_ptr_at_index, normalize_range, ptr_at_index, ArrayLayout, Capacity, Storage,
14};
15use crate::collections::vec::Vec;
16
17pub struct Deque<T, S: Storage<ArrayLayout<T>>, I: Capacity> {
35 front: I,
36 len: I,
37 buf: S,
38 elem: PhantomData<T>,
39}
40
41impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> From<S> for Deque<T, S, I> {
42 fn from(buf: S) -> Self {
47 if buf.capacity() > I::MAX_REPRESENTABLE {
48 buffer_too_large_for_index_type::<I>();
49 }
50
51 Deque {
52 front: I::from_usize(0),
53 len: I::from_usize(0),
54 buf,
55 elem: PhantomData,
56 }
57 }
58}
59
60impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> From<Vec<T, S, I>> for Deque<T, S, I> {
61 fn from(vec: Vec<T, S, I>) -> Self {
62 let (buf, len) = vec.into_raw_parts();
63 Deque {
64 front: I::from_usize(0),
65 len,
66 buf,
67 elem: PhantomData,
68 }
69 }
70}
71
72impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Deque<T, S, I> {
73 pub fn into_raw_parts(self) -> (S, I, I) {
93 let ptr = core::ptr::addr_of!(self.buf);
94 let result = (unsafe { ptr.read() }, self.front, self.len);
95 core::mem::forget(self);
96 result
97 }
98
99 pub unsafe fn from_raw_parts(buf: S, front: I, length: I) -> Self {
118 Deque {
119 front,
120 len: length,
121 buf,
122 elem: PhantomData,
123 }
124 }
125
126 #[inline]
128 pub fn capacity(&self) -> usize {
129 self.buf.capacity()
130 }
131
132 #[inline]
134 pub fn len(&self) -> usize {
135 self.len.as_usize()
136 }
137
138 #[inline]
140 pub fn is_empty(&self) -> bool {
141 self.len.as_usize() == 0
142 }
143
144 #[inline]
146 pub fn is_full(&self) -> bool {
147 self.len.as_usize() == self.buf.capacity()
148 }
149
150 pub fn contains(&self, x: &T) -> bool
162 where
163 T: PartialEq,
164 {
165 let (a, b) = self.as_slices();
166 a.contains(x) || b.contains(x)
167 }
168
169 #[inline(always)]
170 fn physical_index_unchecked(&self, index: I) -> usize {
171 let index = index.as_usize();
172 (self.front.as_usize() + index) % self.capacity()
173 }
174
175 #[inline(always)]
176 fn physical_index(&self, index: I) -> Option<usize> {
177 let index = index.as_usize();
178 if index >= self.len() {
179 return None;
180 }
181
182 Some((self.front.as_usize() + index) % self.capacity())
183 }
184
185 fn storage_mut(&mut self) -> &mut [T] {
186 unsafe {
187 core::slice::from_raw_parts_mut(self.buf.get_mut_ptr().cast::<T>(), self.capacity())
188 }
189 }
190
191 #[inline]
196 pub fn get(&self, index: I) -> Option<&T> {
197 let index = self.physical_index(index)?;
198 unsafe { Some(&*ptr_at_index(&self.buf, index)) }
199 }
200
201 #[inline]
206 pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
207 let index = self.physical_index(index)?;
208 unsafe { Some(&mut *mut_ptr_at_index(&mut self.buf, index)) }
209 }
210
211 #[inline]
213 pub fn front(&self) -> Option<&T> {
214 self.get(I::from_usize(0))
215 }
216
217 #[inline]
219 pub fn front_mut(&mut self) -> Option<&mut T> {
220 self.get_mut(I::from_usize(0))
221 }
222
223 #[inline]
225 pub fn back(&self) -> Option<&T> {
226 self.get(I::from_usize(self.len() - 1))
227 }
228
229 #[inline]
231 pub fn back_mut(&mut self) -> Option<&mut T> {
232 self.get_mut(I::from_usize(self.len() - 1))
233 }
234
235 pub fn pop_front(&mut self) -> Option<T> {
248 if self.is_empty() {
249 return None;
250 }
251
252 let front = self.front.as_usize();
253 let result = unsafe { ptr_at_index(&self.buf, front).read() };
254 self.front = I::from_usize(front + 1 % self.capacity());
255 self.len = I::from_usize(self.len() - 1);
256
257 Some(result)
258 }
259
260 pub fn pop_back(&mut self) -> Option<T> {
273 if self.is_empty() {
274 return None;
275 }
276
277 let idx = (self.front.as_usize() + self.len() - 1) % self.capacity();
278 let result = unsafe { ptr_at_index(&self.buf, idx).read() };
279 self.len = I::from_usize(self.len() - 1);
280
281 Some(result)
282 }
283
284 pub fn try_push_front(&mut self, value: T) -> Result<(), T> {
297 if self.is_full() {
298 return Err(value);
299 }
300
301 let idx = (self.front.as_usize() + self.capacity() - 1) % self.capacity();
302 let ptr = mut_ptr_at_index(&mut self.buf, idx);
303 unsafe {
304 ptr.write(value);
305 }
306
307 self.front = I::from_usize(idx);
308 self.len = I::from_usize(self.len() + 1);
309
310 Ok(())
311 }
312
313 pub fn push_front(&mut self, value: T) {
319 if self.try_push_front(value).is_err() {
320 panic!("deque is already at capacity")
321 }
322 }
323
324 pub fn force_push_front(&mut self, value: T) -> Option<T> {
339 let result = self.is_full().then(|| self.pop_back()).flatten();
340 self.push_front(value);
341 result
342 }
343
344 pub fn try_push_back(&mut self, value: T) -> Result<(), T> {
357 if self.is_full() {
358 return Err(value);
359 }
360
361 let end = self.physical_index_unchecked(self.len);
362 let ptr = mut_ptr_at_index(&mut self.buf, end);
363 unsafe {
364 ptr.write(value);
365 }
366
367 self.len = I::from_usize(self.len() + 1);
368
369 Ok(())
370 }
371
372 pub fn push_back(&mut self, value: T) {
378 if self.try_push_back(value).is_err() {
379 panic!("deque is already at capacity")
380 }
381 }
382
383 pub fn force_push_back(&mut self, value: T) -> Option<T> {
398 let result = self.is_full().then(|| self.pop_front()).flatten();
399 self.push_back(value);
400 result
401 }
402
403 pub fn truncate(&mut self, len: I) {
419 let new_len = len.as_usize();
420 let old_len = self.len();
421
422 if new_len >= old_len {
423 return;
424 }
425
426 for i in new_len..old_len {
427 let idx = i % self.capacity();
428 let ptr = self.buf.get_mut_ptr().cast::<T>();
429 unsafe {
430 ptr.add(idx).drop_in_place();
431 }
432 }
433
434 self.len = len;
435 }
436
437 pub fn clear(&mut self) {
439 self.truncate(I::from_usize(0));
440 self.front = I::from_usize(0);
441 }
442
443 pub fn swap(&mut self, i: I, j: I) {
463 let i = self
464 .physical_index(i)
465 .expect("index `i` is out of bounds in `swap`");
466 let j = self
467 .physical_index(j)
468 .expect("index `j` is out of bounds in `swap`");
469 self.storage_mut().swap(i, j);
470 }
471
472 pub fn swap_remove_front(&mut self, index: I) -> Option<T> {
490 let index = self.physical_index(index)?;
491 let front = self.front.as_usize();
492
493 unsafe {
494 let last = ptr_at_index(&self.buf, front).read();
495 let hole = mut_ptr_at_index(&mut self.buf, index);
496 self.len = I::from_usize(self.len() - 1);
497 self.front = I::from_usize((front + 1) % self.capacity());
498 Some(hole.replace(last))
499 }
500 }
501
502 pub fn swap_remove_back(&mut self, index: I) -> Option<T> {
520 let index = self.physical_index(index)?;
521 let back = (self.front.as_usize() + self.len() - 1) % self.capacity();
522
523 unsafe {
524 let last = ptr_at_index(&self.buf, back).read();
525 let hole = mut_ptr_at_index(&mut self.buf, index);
526 self.len = I::from_usize(self.len() - 1);
527 Some(hole.replace(last))
528 }
529 }
530
531 pub fn try_insert(&mut self, index: I, value: T) -> Result<(), T> {
555 if self.is_full() {
556 return Err(value);
557 }
558
559 let index = index.as_usize();
560 if index > self.len() {
561 panic!("index out of bounds in `try_insert`");
562 }
563
564 let cap = self.capacity();
565 let front = self.front.as_usize();
566 let back = front + self.len();
567
568 let contiguous = back < self.capacity();
570
571 let distance_to_front = index;
572 let distance_to_back = self.len() - index;
573 let move_front = distance_to_front < distance_to_back;
574
575 let new_front = if move_front {
576 (front + self.capacity() - 1) % self.capacity()
577 } else {
578 front
579 };
580
581 let index_wrapped = new_front + index >= self.capacity();
582
583 match (contiguous, move_front, index_wrapped) {
584 (true, true, _) => {
585 if distance_to_front > 0 {
588 let dst = mut_ptr_at_index(&mut self.buf, new_front);
589 let src = ptr_at_index(&self.buf, front);
590 unsafe { core::ptr::copy(src, dst, 1); }
591
592 let dst = mut_ptr_at_index(&mut self.buf, front);
593 let src = ptr_at_index(&self.buf, front + 1);
594 unsafe { core::ptr::copy(src, dst, distance_to_front - 1); }
595 }
596
597 let ptr = mut_ptr_at_index(&mut self.buf, (new_front + index) % cap);
598 unsafe { ptr.write(value); }
599 }
600 (true, false, _) => {
601 let physical_index = front + index;
604 let src = ptr_at_index(&self.buf, physical_index);
605 let dst = mut_ptr_at_index(&mut self.buf, physical_index + 1);
606 unsafe { core::ptr::copy(src, dst, distance_to_back); }
607 let ptr = mut_ptr_at_index(&mut self.buf, physical_index);
608 unsafe { ptr.write(value); }
609 }
610 (false, true, false) => {
611 let ptr = mut_ptr_at_index(&mut self.buf, new_front);
614 unsafe {
615 ptr.write(value);
616 }
617 self.storage_mut()[new_front..].rotate_left(1);
618 }
619 (false, false, true) => {
620 let physical_index = (front + index) % self.capacity();
623 let ptr = mut_ptr_at_index(&mut self.buf, physical_index + distance_to_back);
624 unsafe {
625 ptr.write(value);
626 }
627 self.storage_mut()[physical_index..=physical_index + distance_to_back]
628 .rotate_right(1);
629 }
630 (false, true, true) => {
631 let physical_index = (new_front + index) % self.capacity();
634 let ptr = mut_ptr_at_index(&mut self.buf, 0);
635 let tmp = unsafe { ptr.replace(value) };
636 self.storage_mut()[..=physical_index].rotate_left(1);
637
638 let ptr = mut_ptr_at_index(&mut self.buf, new_front);
639 unsafe {
640 ptr.write(tmp);
641 }
642 self.storage_mut()[new_front..].rotate_left(1);
643 }
644 (false, false, false) => {
645 let physical_index = (front + index) % cap;
648 let ptr = mut_ptr_at_index(&mut self.buf, cap - 1);
649 let tmp = unsafe { ptr.replace(value) };
650 self.storage_mut()[physical_index..].rotate_right(1);
651
652 let back = back % cap;
653 let ptr = mut_ptr_at_index(&mut self.buf, back);
654 unsafe {
655 ptr.write(tmp);
656 }
657 self.storage_mut()[..=back].rotate_right(1);
658 }
659 }
660
661 self.len = I::from_usize(self.len() + 1);
662 self.front = I::from_usize(new_front);
663 Ok(())
664 }
665
666 pub fn insert(&mut self, index: I, value: T) {
678 if self.try_insert(index, value).is_err() {
679 panic!("deque is already at capacity");
680 }
681 }
682
683 pub fn remove(&mut self, index: I) -> Option<T> {
702 let index = index.as_usize();
703 if index >= self.len() {
704 return None;
705 }
706
707 let cap = self.capacity();
708 let front = self.front.as_usize();
709 let back = front + self.len();
710 let contiguous = back <= cap;
711
712 let distance_to_front = index;
713 let distance_to_back = self.len() - index - 1;
714 let move_front = distance_to_front < distance_to_back;
715
716 let index_wrapped = front + index >= cap;
717 let physical_index = (front + index) % cap;
718
719 let ptr = ptr_at_index(&self.buf, physical_index);
720 let result = unsafe { ptr.read() };
721
722 match (contiguous, move_front, index_wrapped) {
723 (true, true, _) | (false, true, false) => {
724 unsafe {
728 let src = ptr_at_index(&self.buf, front);
729 let dst = mut_ptr_at_index(&mut self.buf, front + 1);
730 core::ptr::copy(src, dst, distance_to_front);
731 }
732
733 self.front = I::from_usize((front + 1) % cap);
734 }
735 (true, false, _) | (false, false, true) => {
736 unsafe {
740 let src = ptr_at_index(&self.buf, physical_index + 1);
741 let dst = mut_ptr_at_index(&mut self.buf, physical_index);
742 core::ptr::copy(src, dst, distance_to_back);
743 }
744 }
745 (false, true, true) => {
746 unsafe {
749 let src = self.buf.get_ptr().cast::<T>();
750 let dst = mut_ptr_at_index(&mut self.buf, 1);
751 core::ptr::copy(src, dst, physical_index);
752
753 let src = ptr_at_index(&self.buf, cap - 1);
754 let dst = self.buf.get_mut_ptr().cast::<T>();
755 core::ptr::copy(src, dst, 1);
756
757 let src = ptr_at_index(&self.buf, front);
758 let dst = mut_ptr_at_index(&mut self.buf, front + 1);
759 core::ptr::copy(src, dst, cap - front - 1);
760 }
761
762 self.front = I::from_usize((front + 1) % cap);
763 }
764 (false, false, false) => {
765 unsafe {
768 let src = ptr_at_index(&self.buf, physical_index + 1);
769 let dst = mut_ptr_at_index(&mut self.buf, physical_index);
770 core::ptr::copy(src, dst, cap - physical_index - 1);
771
772 let src = self.buf.get_ptr().cast::<T>();
773 let dst = mut_ptr_at_index(&mut self.buf, cap - 1);
774 core::ptr::copy(src, dst, 1);
775
776 let src = ptr_at_index(&self.buf, 1);
777 let dst = self.buf.get_mut_ptr().cast::<T>();
778 core::ptr::copy(src, dst, back % cap - 1);
779 }
780 }
781 }
782
783 self.len = I::from_usize(self.len() - 1);
784 Some(result)
785 }
786
787 pub fn replace(&mut self, index: I, value: T) -> T {
804 let index = self
805 .physical_index(index)
806 .expect("index out of bounds in `replace`");
807 unsafe { mut_ptr_at_index(&mut self.buf, index).replace(value) }
808 }
809
810 pub fn retain<F>(&mut self, mut f: F)
828 where
829 F: FnMut(&T) -> bool,
830 {
831 let capacity = self.capacity();
832 let old_len = self.len();
833 let mut new_len = 0;
834
835 for i in 0..old_len {
836 let idx = i % capacity;
837 let src = ptr_at_index(&self.buf, idx);
838 let retain = f(unsafe { &*src });
839
840 if retain {
841 let dst = mut_ptr_at_index(&mut self.buf, new_len % capacity);
842 unsafe { core::ptr::copy(src, dst, 1); }
843 new_len += 1;
844 } else {
845 let to_drop = mut_ptr_at_index(&mut self.buf, idx);
846 unsafe { core::ptr::drop_in_place(to_drop); }
847 }
848 }
849
850 self.len = I::from_usize(new_len);
851 }
852
853 fn rotate_left_inner(&mut self, mid: usize) {
854 debug_assert!(mid * 2 <= self.len());
855
856 let cap = self.capacity();
857 let front = self.front.as_usize();
858 let back = front + self.len();
859 let contiguous = back <= cap;
860
861 let first_count = if contiguous {
862 usize::min(cap - back, mid)
864 } else {
865 usize::min(cap - front, mid)
867 };
868
869 let src = ptr_at_index(&self.buf, front);
870 let dst = mut_ptr_at_index(&mut self.buf, back % cap);
871 unsafe { core::ptr::copy(src, dst, first_count); }
872
873 let src = ptr_at_index(&self.buf, (front + first_count) % cap);
874 let dst = mut_ptr_at_index(&mut self.buf, (back + first_count) % cap);
875 unsafe { core::ptr::copy(src, dst, mid - first_count); }
876
877 self.front = I::from_usize((front + mid) % cap);
878 }
879
880 fn rotate_right_inner(&mut self, k: usize) {
881 debug_assert!(k * 2 <= self.len());
882
883 let cap = self.capacity();
884 let front = self.front.as_usize();
885 let back = front + self.len();
886 let contiguous = back <= cap;
887
888 let first_count = if contiguous {
889 usize::min(front, k)
891 } else {
892 usize::min(back % cap, k)
894 };
895
896 let src = ptr_at_index(&self.buf, (back - first_count) % cap);
897 let dst = mut_ptr_at_index(&mut self.buf, (front + cap - first_count) % cap);
898 unsafe { core::ptr::copy(src, dst, first_count); }
899
900 let src = ptr_at_index(&self.buf, (back + cap - k) % cap);
901 let dst = mut_ptr_at_index(&mut self.buf, (front + cap - k) % cap);
902 unsafe { core::ptr::copy(src, dst, k - first_count); }
903
904 self.front = I::from_usize((front + cap - k) % cap);
905 }
906
907 pub fn rotate_left(&mut self, mid: I) {
932 let mid = mid.as_usize();
933 assert!(mid <= self.len());
934 let k = self.len() - mid;
935 if mid <= k {
936 self.rotate_left_inner(mid);
937 } else {
938 self.rotate_right_inner(k);
939 }
940 }
941
942 pub fn rotate_right(&mut self, k: I) {
967 let k = k.as_usize();
968 assert!(k <= self.len());
969 let mid = self.len() - k;
970 if k <= mid {
971 self.rotate_right_inner(k);
972 } else {
973 self.rotate_left_inner(mid);
974 }
975 }
976
977 pub fn as_slices(&self) -> (&[T], &[T]) {
993 let front = self.front.as_usize();
994 let back = front + self.len();
995 let ptr = self.buf.get_ptr().cast::<T>();
996 if back <= self.capacity() {
997 let slice = unsafe { core::slice::from_raw_parts(ptr.add(front), self.len()) };
998 (slice, &[])
999 } else {
1000 let fst =
1001 unsafe { core::slice::from_raw_parts(ptr.add(front), self.capacity() - front) };
1002 let snd =
1003 unsafe { core::slice::from_raw_parts(ptr, self.len() - (self.capacity() - front)) };
1004 (fst, snd)
1005 }
1006 }
1007
1008 pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
1027 let front = self.front.as_usize();
1028 let back = front + self.len();
1029 if back <= self.capacity() {
1030 let ptr = self.buf.get_mut_ptr().cast::<T>();
1031 let slice = unsafe { core::slice::from_raw_parts_mut(ptr.add(front), self.len()) };
1032 (slice, &mut [])
1033 } else {
1034 let len = self.len();
1035 let cap = self.capacity();
1036
1037 let storage = self.storage_mut();
1038 let (rest, fst) = storage.split_at_mut(front);
1039 let (snd, _) = rest.split_at_mut(len - (cap - front));
1040
1041 let fst = unsafe {
1042 let ptr = fst.as_mut_ptr().cast::<T>();
1043 core::slice::from_raw_parts_mut(ptr, fst.len())
1044 };
1045 let snd = unsafe {
1046 let ptr = snd.as_mut_ptr().cast::<T>();
1047 core::slice::from_raw_parts_mut(ptr, snd.len())
1048 };
1049
1050 (fst, snd)
1051 }
1052 }
1053
1054 pub fn make_contiguous(&mut self) -> &mut [T] {
1076 let front = self.front.as_usize();
1077 let back = front + self.len();
1078 if back <= self.capacity() {
1079 let ptr = self.buf.get_mut_ptr().cast::<T>();
1080 unsafe { core::slice::from_raw_parts_mut(ptr.add(front), self.len()) }
1081 } else {
1082 self.storage_mut().rotate_left(front);
1083 self.front = I::from_usize(0);
1084
1085 let ptr = self.buf.get_mut_ptr().cast::<T>();
1086 unsafe { core::slice::from_raw_parts_mut(ptr, self.len()) }
1087 }
1088 }
1089
1090 pub fn iter(&self) -> Iter<'_, T, S, I> {
1107 Iter {
1108 front: self.front,
1109 len: self.len,
1110 buf: &self.buf,
1111 _ref: PhantomData,
1112 }
1113 }
1114
1115 pub fn iter_mut(&mut self) -> IterMut<'_, T, S, I> {
1130 IterMut {
1131 front: self.front,
1132 len: self.len,
1133 buf: &mut self.buf,
1134 _ref: PhantomData,
1135 }
1136 }
1137
1138 pub fn range<R: core::ops::RangeBounds<I>>(&self, range: R) -> Iter<'_, T, S, I> {
1157 let Range { start, end } = normalize_range(range, self.len());
1158 Iter {
1159 front: I::from_usize((self.front.as_usize() + start) % self.capacity()),
1160 len: I::from_usize(end - start),
1161 buf: &self.buf,
1162 _ref: PhantomData,
1163 }
1164 }
1165
1166 pub fn range_mut<R: core::ops::RangeBounds<I>>(&mut self, range: R) -> IterMut<'_, T, S, I> {
1183 let Range { start, end } = normalize_range(range, self.len());
1184 IterMut {
1185 front: I::from_usize((self.front.as_usize() + start) % self.capacity()),
1186 len: I::from_usize(end - start),
1187 buf: &mut self.buf,
1188 _ref: PhantomData,
1189 }
1190 }
1191
1192 pub fn drain<R: core::ops::RangeBounds<I>>(&mut self, range: R) -> Drain<'_, T, S, I> {
1223 let Range { start, end } = normalize_range(range, self.len());
1224
1225 let original_len = self.len();
1226 self.len = I::from_usize(start);
1227
1228 Drain {
1229 parent: self,
1230 original_len,
1231 target_start: start,
1232 front_index: start,
1233 back_index: end,
1234 target_end: end,
1235 }
1236 }
1237}
1238
1239impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Index<I> for Deque<T, S, I> {
1240 type Output = T;
1241
1242 #[inline]
1243 fn index(&self, index: I) -> &T {
1244 self.get(index).expect("out of bounds access")
1245 }
1246}
1247
1248impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> IndexMut<I> for Deque<T, S, I> {
1249 #[inline]
1250 fn index_mut(&mut self, index: I) -> &mut T {
1251 self.get_mut(index).expect("out of bounds access")
1252 }
1253}
1254
1255impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Drop for Deque<T, S, I> {
1256 fn drop(&mut self) {
1257 let (front, back) = self.as_mut_slices();
1258 let front_ptr = front.as_mut_ptr();
1259 let back_ptr = back.as_mut_ptr();
1260 unsafe {
1261 core::ptr::slice_from_raw_parts_mut(front_ptr, front.len()).drop_in_place();
1262 core::ptr::slice_from_raw_parts_mut(back_ptr, back.len()).drop_in_place();
1263 }
1264 }
1265}
1266
1267impl<T: Debug, S: Storage<ArrayLayout<T>>, I: Capacity> Debug for Deque<T, S, I> {
1268 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1269 let (front, back) = self.as_slices();
1270 f.debug_list().entries(front).entries(back).finish()
1271 }
1272}
1273
1274impl<T: Hash, S: Storage<ArrayLayout<T>>, I: Capacity> Hash for Deque<T, S, I> {
1275 fn hash<H: Hasher>(&self, state: &mut H) {
1276 self.len().hash(state);
1277 let (front, back) = self.as_slices();
1278 Hash::hash_slice(front, state);
1279 Hash::hash_slice(back, state);
1280 }
1281}
1282
1283impl<AT, AS, AI, BT, BS, BI> PartialEq<Deque<BT, BS, BI>> for Deque<AT, AS, AI>
1284where
1285 AT: PartialEq<BT>,
1286 AS: Storage<ArrayLayout<AT>>,
1287 BS: Storage<ArrayLayout<BT>>,
1288 AI: Capacity,
1289 BI: Capacity,
1290{
1291 fn eq(&self, other: &Deque<BT, BS, BI>) -> bool {
1292 if self.len() != other.len() {
1293 return false;
1294 }
1295
1296 let (self_front, self_back) = self.as_slices();
1297 let (other_front, other_back) = other.as_slices();
1298
1299 match self_front.len() {
1300 len if len == other_front.len() => self_front == other_front && self_back == other_back,
1301 len if len < other_front.len() => {
1302 let a = self_front.len();
1303 let b = other_front.len() - a;
1304 debug_assert_eq!(self_back[..b].len(), other_front[a..].len());
1305 debug_assert_eq!(self_back[b..].len(), other_back.len());
1306 self_front == &other_front[..a]
1307 && self_back[..b] == other_front[a..]
1308 && &self_back[b..] == other_back
1309 }
1310 _ => {
1311 let a = other_front.len();
1312 let b = self_front.len() - a;
1313 debug_assert_eq!(self_front[a..].len(), other_back[..b].len());
1314 debug_assert_eq!(self_back.len(), other_back[b..].len());
1315 &self_front[..a] == other_front
1316 && self_front[a..] == other_back[..b]
1317 && self_back == &other_back[b..]
1318 }
1319 }
1320 }
1321}
1322
1323impl<T: Eq, S: Storage<ArrayLayout<T>>, I: Capacity> Eq for Deque<T, S, I> {}
1324
1325impl<T: PartialEq, S: Storage<ArrayLayout<T>>, I: Capacity, R: AsRef<[T]>> PartialEq<R>
1326 for Deque<T, S, I>
1327{
1328 fn eq(&self, other: &R) -> bool {
1329 let other = other.as_ref();
1330 if self.len() != other.len() {
1331 return false;
1332 }
1333
1334 let (front, back) = self.as_slices();
1335 let mid = front.len();
1336 front == &other[..mid] && back == &other[mid..]
1337 }
1338}
1339
1340impl<T, AS, AI, BS, BI> PartialOrd<Deque<T, BS, BI>> for Deque<T, AS, AI>
1341where
1342 T: PartialOrd,
1343 AS: Storage<ArrayLayout<T>>,
1344 BS: Storage<ArrayLayout<T>>,
1345 AI: Capacity,
1346 BI: Capacity,
1347{
1348 fn partial_cmp(&self, other: &Deque<T, BS, BI>) -> Option<core::cmp::Ordering> {
1349 self.iter().partial_cmp(other.iter())
1350 }
1351}
1352
1353impl<T: Ord, S: Storage<ArrayLayout<T>>, I: Capacity> Ord for Deque<T, S, I> {
1354 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
1355 self.iter().cmp(other.iter())
1356 }
1357}
1358
1359impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Extend<T> for Deque<T, S, I> {
1360 fn extend<It: IntoIterator<Item = T>>(&mut self, iter: It) {
1361 iter.into_iter().for_each(|item| self.push_back(item));
1362 }
1363}
1364
1365impl<'a, T: 'a + Clone, S: Storage<ArrayLayout<T>>, I: Capacity> Extend<&'a T> for Deque<T, S, I> {
1366 fn extend<It: IntoIterator<Item = &'a T>>(&mut self, iter: It) {
1367 iter.into_iter()
1368 .for_each(|item| self.push_back(item.clone()));
1369 }
1370}
1371
1372pub struct Iter<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> {
1377 front: I,
1378 len: I,
1379 buf: &'a S,
1380 _ref: PhantomData<&'a T>,
1381}
1382
1383impl<'a, T: 'a + Debug, S: Storage<ArrayLayout<T>>, I: Capacity> Debug for Iter<'a, T, S, I> {
1384 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1385 let (front, back) = {
1386 let front = self.front.as_usize();
1387 let back = front + self.len.as_usize();
1388
1389 if back <= self.buf.capacity() {
1390 unsafe {
1391 (
1392 core::slice::from_raw_parts(
1393 ptr_at_index(self.buf, front),
1394 self.len.as_usize(),
1395 ),
1396 &[][..],
1397 )
1398 }
1399 } else {
1400 unsafe {
1401 (
1402 core::slice::from_raw_parts(
1403 ptr_at_index(self.buf, front),
1404 self.buf.capacity() - front,
1405 ),
1406 core::slice::from_raw_parts(
1407 ptr_at_index(self.buf, 0),
1408 back - self.buf.capacity(),
1409 ),
1410 )
1411 }
1412 }
1413 };
1414
1415 f.debug_tuple("Iter").field(&front).field(&back).finish()
1416 }
1417}
1418
1419impl<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for Iter<'a, T, S, I> {
1420 type Item = &'a T;
1421
1422 #[inline]
1423 fn next(&mut self) -> Option<&'a T> {
1424 let len = self.len.as_usize();
1425 if len == 0 {
1426 return None;
1427 }
1428
1429 let front = self.front.as_usize();
1430 self.front = I::from_usize((front + 1) % self.buf.capacity());
1431 self.len = I::from_usize(len - 1);
1432 let result = unsafe { ptr_at_index(self.buf, front).as_ref() };
1433 debug_assert!(result.is_some());
1434 result
1435 }
1436
1437 #[inline]
1438 fn size_hint(&self) -> (usize, Option<usize>) {
1439 let len = self.len.as_usize();
1440 (len, Some(len))
1441 }
1442}
1443
1444impl<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator for Iter<'a, T, S, I> {
1445 #[inline]
1446 fn next_back(&mut self) -> Option<&'a T> {
1447 let len = self.len.as_usize();
1448 if len == 0 {
1449 return None;
1450 }
1451
1452 let front = self.front.as_usize();
1453 let idx = (front + len - 1) % self.buf.capacity();
1454 self.len = I::from_usize(len - 1);
1455 let result = unsafe { ptr_at_index(self.buf, idx).as_ref() };
1456 debug_assert!(result.is_some());
1457 result
1458 }
1459}
1460
1461impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for Iter<'_, T, S, I> {}
1462impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for Iter<'_, T, S, I> {}
1463
1464pub struct IterMut<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> {
1469 front: I,
1470 len: I,
1471 buf: &'a mut S,
1472 _ref: PhantomData<&'a mut T>,
1473}
1474
1475impl<'a, T: 'a + Debug, S: Storage<ArrayLayout<T>>, I: Capacity> Debug for IterMut<'a, T, S, I> {
1476 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> fmt::Result {
1477 let (front, back) = {
1478 let front = self.front.as_usize();
1479 let back = front + self.len.as_usize();
1480
1481 if back <= self.buf.capacity() {
1482 unsafe {
1483 (
1484 core::slice::from_raw_parts(
1485 ptr_at_index(&self.buf, front),
1486 self.len.as_usize(),
1487 ),
1488 &[][..],
1489 )
1490 }
1491 } else {
1492 unsafe {
1493 (
1494 core::slice::from_raw_parts(
1495 ptr_at_index(&self.buf, front),
1496 self.buf.capacity() - front,
1497 ),
1498 core::slice::from_raw_parts(
1499 ptr_at_index(&self.buf, 0),
1500 back - self.buf.capacity(),
1501 ),
1502 )
1503 }
1504 }
1505 };
1506
1507 f.debug_tuple("Iter").field(&front).field(&back).finish()
1508 }
1509}
1510
1511impl<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for IterMut<'a, T, S, I> {
1512 type Item = &'a mut T;
1513
1514 #[inline]
1515 fn next(&mut self) -> Option<&'a mut T> {
1516 let len = self.len.as_usize();
1517 if len == 0 {
1518 return None;
1519 }
1520
1521 let front = self.front.as_usize();
1522 self.front = I::from_usize((front + 1) % self.buf.capacity());
1523 self.len = I::from_usize(len - 1);
1524 let result = unsafe { mut_ptr_at_index(&mut self.buf, front).as_mut() };
1525 debug_assert!(result.is_some());
1526 result
1527 }
1528
1529 #[inline]
1530 fn size_hint(&self) -> (usize, Option<usize>) {
1531 let len = self.len.as_usize();
1532 (len, Some(len))
1533 }
1534}
1535
1536impl<'a, T: 'a, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator
1537 for IterMut<'a, T, S, I>
1538{
1539 #[inline]
1540 fn next_back(&mut self) -> Option<&'a mut T> {
1541 let len = self.len.as_usize();
1542 if len == 0 {
1543 return None;
1544 }
1545
1546 let front = self.front.as_usize();
1547 let idx = (front + len - 1) % self.buf.capacity();
1548 self.len = I::from_usize(len - 1);
1549 let result = unsafe { mut_ptr_at_index(&mut self.buf, idx).as_mut() };
1550 debug_assert!(result.is_some());
1551 result
1552 }
1553}
1554
1555impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for IterMut<'_, T, S, I> {}
1556impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for IterMut<'_, T, S, I> {}
1557
1558pub struct IntoIter<T, S: Storage<ArrayLayout<T>>, I: Capacity> {
1564 inner: Deque<T, S, I>,
1565}
1566
1567impl<T: Debug, S: Storage<ArrayLayout<T>>, I: Capacity> Debug for IntoIter<T, S, I> {
1568 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1569 f.debug_tuple("IntoIterator").field(&self.inner).finish()
1570 }
1571}
1572
1573impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for IntoIter<T, S, I> {
1574 type Item = T;
1575
1576 #[inline]
1577 fn next(&mut self) -> Option<T> {
1578 self.inner.pop_front()
1579 }
1580
1581 #[inline]
1582 fn size_hint(&self) -> (usize, Option<usize>) {
1583 let len = self.inner.len();
1584 (len, Some(len))
1585 }
1586}
1587
1588impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator for IntoIter<T, S, I> {
1589 #[inline]
1590 fn next_back(&mut self) -> Option<T> {
1591 self.inner.pop_back()
1592 }
1593}
1594
1595impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for IntoIter<T, S, I> {}
1596impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for IntoIter<T, S, I> {}
1597
1598impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIterator for Deque<T, S, I> {
1599 type Item = T;
1600 type IntoIter = IntoIter<T, S, I>;
1601
1602 fn into_iter(self) -> IntoIter<T, S, I> {
1604 IntoIter { inner: self }
1605 }
1606}
1607
1608impl<'a, T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIterator for &'a Deque<T, S, I> {
1609 type Item = &'a T;
1610 type IntoIter = Iter<'a, T, S, I>;
1611
1612 fn into_iter(self) -> Iter<'a, T, S, I> {
1613 self.iter()
1614 }
1615}
1616
1617impl<'a, T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIterator for &'a mut Deque<T, S, I> {
1618 type Item = &'a mut T;
1619 type IntoIter = IterMut<'a, T, S, I>;
1620
1621 fn into_iter(self) -> IterMut<'a, T, S, I> {
1622 self.iter_mut()
1623 }
1624}
1625
1626pub struct Drain<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> {
1631 parent: &'p mut Deque<T, S, I>,
1632 original_len: usize,
1633 target_start: usize,
1634 front_index: usize,
1635 back_index: usize,
1636 target_end: usize,
1637}
1638
1639impl<T: Debug, S: Storage<ArrayLayout<T>>, I: Capacity> Debug for Drain<'_, T, S, I> {
1640 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1641 f.debug_tuple("Drain")
1642 .field(&self.target_start)
1643 .field(&self.target_end)
1644 .field(
1645 &self
1646 .parent
1647 .range(I::from_usize(self.front_index)..I::from_usize(self.back_index)),
1648 )
1649 .finish()
1650 }
1651}
1652
1653impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for Drain<'_, T, S, I> {
1654 type Item = T;
1655
1656 #[inline]
1657 fn next(&mut self) -> Option<T> {
1658 if self.front_index == self.back_index {
1659 return None;
1660 }
1661
1662 let idx = (self.parent.front.as_usize() + self.front_index) % self.parent.capacity();
1663 self.front_index += 1;
1664
1665 unsafe { Some(ptr_at_index(&self.parent.buf, idx).read()) }
1666 }
1667
1668 #[inline]
1669 fn size_hint(&self) -> (usize, Option<usize>) {
1670 let len = self.back_index - self.front_index;
1671 (len, Some(len))
1672 }
1673}
1674
1675impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator for Drain<'_, T, S, I> {
1676 #[inline]
1677 fn next_back(&mut self) -> Option<T> {
1678 if self.back_index == self.front_index {
1679 return None;
1680 }
1681
1682 let idx = (self.parent.front.as_usize() + self.back_index - 1) % self.parent.capacity();
1683 self.back_index -= 1;
1684
1685 unsafe { Some(ptr_at_index(&self.parent.buf, idx).read()) }
1686 }
1687}
1688
1689impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for Drain<'_, T, S, I> {}
1690impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for Drain<'_, T, S, I> {}
1691
1692impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Drop for Drain<'_, T, S, I> {
1693 fn drop(&mut self) {
1694 let cap = self.parent.capacity();
1696 let front = self.parent.front.as_usize() + self.front_index;
1697 let back = self.parent.front.as_usize() + self.back_index;
1698
1699 if front >= cap || back <= cap {
1700 let ptr = mut_ptr_at_index(&mut self.parent.buf, front % cap);
1702 unsafe { core::ptr::slice_from_raw_parts_mut(ptr, back - front).drop_in_place(); }
1703 } else {
1704 let ptr = mut_ptr_at_index(&mut self.parent.buf, front);
1706 let len = cap - front;
1707 unsafe { core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place(); }
1708
1709 let ptr = mut_ptr_at_index(&mut self.parent.buf, 0);
1710 let len = (back - front) - len;
1711 unsafe { core::ptr::slice_from_raw_parts_mut(ptr, len).drop_in_place(); }
1712 }
1713
1714 let front = self.parent.front.as_usize();
1716 let back = front + self.original_len;
1717 let target_start = front + self.target_start;
1718 let target_end = front + self.target_end;
1719 let target_wrapped = target_start <= cap && cap <= target_end;
1720
1721 let distance_to_front = self.target_start;
1722 let distance_to_back = self.original_len - self.target_end;
1723 let move_front = distance_to_front < distance_to_back;
1724 let source_wrapped = if move_front {
1725 front < cap && cap < target_start
1726 } else {
1727 target_end < cap && cap < back
1728 };
1729
1730 match (move_front, target_wrapped, source_wrapped) {
1731 (false, false, false) => {
1732 let src = ptr_at_index(&self.parent.buf, target_end % cap);
1734 let dst = mut_ptr_at_index(&mut self.parent.buf, target_start % cap);
1735 unsafe { core::ptr::copy(src, dst, distance_to_back); }
1736 }
1737 (true, false, false) => {
1738 let new_front = target_end - distance_to_front;
1740 self.parent.front = I::from_usize(new_front);
1741
1742 let src = ptr_at_index(&self.parent.buf, front);
1743 let dst = mut_ptr_at_index(&mut self.parent.buf, new_front);
1744 unsafe { core::ptr::copy(src, dst, distance_to_front); }
1745 }
1746 (false, true, false) => {
1747 let fst_count = usize::min(cap - target_start, distance_to_back);
1749 let src = ptr_at_index(&self.parent.buf, target_end % cap);
1750 let dst = mut_ptr_at_index(&mut self.parent.buf, target_start % cap);
1751 unsafe { core::ptr::copy(src, dst, fst_count); }
1752
1753 let dst_idx = (target_start + fst_count) % cap;
1754 let src = ptr_at_index(&self.parent.buf, (target_end + fst_count) % cap);
1755 let dst = mut_ptr_at_index(&mut self.parent.buf, dst_idx);
1756 unsafe { core::ptr::copy(src, dst, distance_to_back - fst_count); }
1757 }
1758 (true, true, false) => {
1759 let fst_count = usize::min(target_end - cap, distance_to_front);
1761 let src = ptr_at_index(&self.parent.buf, target_start - fst_count);
1762 let dst = mut_ptr_at_index(&mut self.parent.buf, (target_end - fst_count) % cap);
1763 unsafe { core::ptr::copy(src, dst, fst_count); }
1764
1765 let new_front = (target_end - distance_to_front) % cap;
1766 self.parent.front = I::from_usize(new_front);
1767
1768 let src = ptr_at_index(&self.parent.buf, front);
1769 let dst = mut_ptr_at_index(&mut self.parent.buf, new_front);
1770 unsafe { core::ptr::copy(src, dst, distance_to_front - fst_count); }
1771 }
1772 (false, false, true) => {
1773 let fst_count = cap - target_end;
1775 let src = ptr_at_index(&self.parent.buf, target_end);
1776 let dst = mut_ptr_at_index(&mut self.parent.buf, target_start);
1777 unsafe { core::ptr::copy(src, dst, fst_count); }
1778
1779 let remaining = distance_to_back - fst_count;
1780 let snd_count = usize::min(cap - (target_start + fst_count), remaining);
1781 let src = ptr_at_index(&self.parent.buf, 0);
1782 let dst = mut_ptr_at_index(&mut self.parent.buf, target_start + fst_count);
1783 unsafe { core::ptr::copy(src, dst, snd_count); }
1784
1785 let remaining = remaining - snd_count;
1786 let src = ptr_at_index(&self.parent.buf, snd_count);
1787 let dst = mut_ptr_at_index(&mut self.parent.buf, 0);
1788 unsafe { core::ptr::copy(src, dst, remaining); }
1789 }
1790 (true, false, true) => {
1791 let fst_count = target_start - cap;
1793 let src = ptr_at_index(&self.parent.buf, 0);
1794 let dst = mut_ptr_at_index(&mut self.parent.buf, target_end - cap - fst_count);
1795 unsafe { core::ptr::copy(src, dst, fst_count); }
1796
1797 let remaining = distance_to_front - fst_count;
1798 let snd_count = usize::min(target_end - cap - fst_count, remaining);
1799 let dst_idx = target_end - cap - (fst_count + snd_count);
1800
1801 let src = ptr_at_index(&self.parent.buf, cap - snd_count);
1802 let dst = mut_ptr_at_index(&mut self.parent.buf, dst_idx);
1803 unsafe { core::ptr::copy(src, dst, snd_count); }
1804
1805 let new_front = (front + (target_end - target_start)) % cap;
1806 self.parent.front = I::from_usize(new_front);
1807
1808 let remaining = remaining - snd_count;
1809 let src = ptr_at_index(&self.parent.buf, front);
1810 let dst = mut_ptr_at_index(&mut self.parent.buf, new_front);
1811 unsafe { core::ptr::copy(src, dst, remaining); }
1812 }
1813 (_, true, true) => {
1814 unreachable!();
1816 }
1817 }
1818
1819 self.parent.len = I::from_usize(self.original_len - (target_end - target_start));
1820 }
1821}
1822
1823#[cfg(feature = "alloc")]
1824#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
1825impl<T: Copy, I: Capacity> crate::collections::AllocDeque<T, I> {
1826 pub fn with_capacity(capacity: I) -> Self {
1831 let cap = capacity.as_usize();
1832 if capacity != I::from_usize(cap) {
1833 buffer_too_large_for_index_type::<I>();
1834 }
1835
1836 Deque {
1837 front: I::from_usize(0),
1838 len: I::from_usize(0),
1839 buf: crate::storage::AllocStorage::with_capacity(cap),
1840 elem: PhantomData,
1841 }
1842 }
1843}
1844
1845#[cfg(feature = "alloc")]
1846#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
1847impl<T: Copy, I: Capacity> Clone for crate::collections::AllocDeque<T, I> {
1848 fn clone(&self) -> Self {
1849 let mut result = Self::with_capacity(I::from_usize(self.capacity()));
1850 result.extend(self.iter().copied());
1851 result
1852 }
1853}
1854
1855impl<T, I: Capacity, const C: usize> Deque<T, [core::mem::MaybeUninit<T>; C], I> {
1856 #[inline]
1868 pub fn new() -> Self {
1869 if C > I::MAX_REPRESENTABLE {
1870 buffer_too_large_for_index_type::<I>();
1871 }
1872
1873 Deque {
1874 front: I::from_usize(0),
1875 len: I::from_usize(0),
1876 buf: unsafe { core::mem::MaybeUninit::uninit().assume_init() },
1877 elem: PhantomData,
1878 }
1879 }
1880}
1881
1882impl<T, I: Capacity, const C: usize> Default for Deque<T, [core::mem::MaybeUninit<T>; C], I> {
1883 fn default() -> Self {
1884 Self::new()
1885 }
1886}
1887
1888impl<T: Clone, I: Capacity, const C: usize> Clone for Deque<T, [core::mem::MaybeUninit<T>; C], I> {
1889 fn clone(&self) -> Self {
1890 let mut ret = Self::new();
1891 ret.extend(self.iter().cloned());
1892 ret
1893 }
1894
1895 fn clone_from(&mut self, source: &Self) {
1896 self.clear();
1897 self.extend(source.iter().cloned());
1898 }
1899}
1900
1901#[cfg(test)]
1902mod tests {
1903 #[test]
1904 fn all_insertion_cases() {
1905 let mut backing_region = [core::mem::MaybeUninit::<i32>::uninit(); 8];
1906 let mut deque = crate::collections::SliceDeque::<i32>::from(&mut backing_region[..]);
1907
1908 deque.try_insert(0, 1).unwrap();
1909 assert_eq!(deque.as_slices(), (&[1][..], &[][..]));
1910
1911 deque.try_insert(0, 2).unwrap();
1912 assert_eq!(deque.as_slices(), (&[2][..], &[1][..]));
1913
1914 deque.try_insert(1, 3).unwrap();
1915 assert_eq!(deque.as_slices(), (&[2][..], &[3, 1][..]));
1916
1917 deque.try_insert(1, 4).unwrap();
1918 assert_eq!(deque.as_slices(), (&[2, 4][..], &[3, 1][..]));
1919
1920 deque.push_back(5);
1921 deque.push_back(6);
1922 deque.push_back(7);
1923 assert_eq!(deque.as_slices(), (&[2, 4][..], &[3, 1, 5, 6, 7][..]));
1924
1925 deque.try_insert(3, 8).unwrap();
1926 assert_eq!(deque.as_slices(), (&[2, 4, 3][..], &[8, 1, 5, 6, 7][..]));
1927
1928 deque.pop_back();
1929 deque.pop_back();
1930 deque.pop_back();
1931 deque.push_front(5);
1932 deque.push_front(6);
1933 assert_eq!(deque.as_slices(), (&[6, 5, 2, 4, 3][..], &[8, 1][..]));
1934
1935 deque.try_insert(4, 7).unwrap();
1936 assert_eq!(deque.as_slices(), (&[6, 5, 2, 4, 7][..], &[3, 8, 1][..]));
1937 }
1938
1939 #[test]
1940 fn all_removal_cases() {
1941 let mut backing_region = [core::mem::MaybeUninit::<i32>::uninit(); 8];
1942 let mut deque = crate::collections::SliceDeque::<i32>::from(&mut backing_region[..]);
1943
1944 deque.push_back(1);
1945 deque.push_back(2);
1946 deque.push_back(3);
1947 deque.push_back(4);
1948 assert_eq!(deque.as_slices(), (&[1, 2, 3, 4][..], &[][..]));
1949
1950 assert_eq!(deque.remove(2), Some(3));
1951 assert_eq!(deque.as_slices(), (&[1, 2, 4][..], &[][..]));
1952
1953 deque.push_back(3);
1954 assert_eq!(deque.remove(1), Some(2));
1955 assert_eq!(deque.as_slices(), (&[1, 4, 3][..], &[][..]));
1956
1957 deque.push_front(2);
1958 deque.push_front(5);
1959 deque.push_front(6);
1960 assert_eq!(deque.as_slices(), (&[6, 5][..], &[2, 1, 4, 3][..]));
1961
1962 assert_eq!(deque.remove(3), Some(1));
1963 assert_eq!(deque.as_slices(), (&[6, 5][..], &[2, 4, 3][..]));
1964
1965 deque.push_back(1);
1966 assert_eq!(deque.remove(2), Some(2));
1967 assert_eq!(deque.as_slices(), (&[6][..], &[5, 4, 3, 1][..]));
1968
1969 for _ in 0..3 {
1970 let x = deque.pop_back().unwrap();
1971 deque.push_front(x);
1972 }
1973
1974 assert_eq!(deque.as_slices(), (&[4, 3, 1, 6][..], &[5][..]));
1975
1976 assert_eq!(deque.remove(3), Some(6));
1977 assert_eq!(deque.as_slices(), (&[4, 3, 1, 5][..], &[][..]));
1978
1979 deque.push_back(2);
1980 deque.push_back(6);
1981 assert_eq!(deque.remove(1), Some(3));
1982 assert_eq!(deque.as_slices(), (&[4, 1, 5][..], &[2, 6][..]));
1983 }
1984
1985 #[test]
1986 fn all_drain_cases() {
1987 use crate::test_utils::*;
1988
1989 for len in 1..=8 {
1990 for offset in 0..=len {
1991 for end in 1..=len {
1992 for start in 0..end {
1993 let drop_count = DropCounter::new();
1994 let mut backing_region = [
1995 core::mem::MaybeUninit::<Droppable>::uninit(),
1996 core::mem::MaybeUninit::<Droppable>::uninit(),
1997 core::mem::MaybeUninit::<Droppable>::uninit(),
1998 core::mem::MaybeUninit::<Droppable>::uninit(),
1999 core::mem::MaybeUninit::<Droppable>::uninit(),
2000 core::mem::MaybeUninit::<Droppable>::uninit(),
2001 core::mem::MaybeUninit::<Droppable>::uninit(),
2002 core::mem::MaybeUninit::<Droppable>::uninit(),
2003 ];
2004 let mut deque = crate::collections::SliceDeque::<Droppable>::from(&mut backing_region[..]);
2005
2006 for _ in 0..offset {
2007 deque.push_front(drop_count.new_droppable(()));
2008 }
2009 for _ in offset..len {
2010 deque.push_back(drop_count.new_droppable(()));
2011 }
2012
2013 deque.drain(start..end);
2014 assert_eq!(deque.len(), len - (end - start));
2015 assert_eq!(drop_count.dropped(), end - start);
2016
2017 drop(deque);
2018 assert_eq!(drop_count.dropped(), len);
2019 }
2020 }
2021 }
2022 }
2023 }
2024}