1#![allow(clippy::missing_safety_doc)]
3
4use std::marker::PhantomData;
5use std::mem::{size_of, MaybeUninit};
6use std::ops::{Bound, Range, RangeBounds};
7use std::{ptr, slice};
8
9pub struct RingBuf<T> {
13 buf: Vec<T>,
15 head: usize,
16 len: usize,
17}
18
19pub struct RingBufSlice<'a, T> {
21 buf: &'a RingBuf<T>,
22 start: usize,
23 end: usize,
24}
25
26pub struct RingBufSliceMut<'a, T> {
28 buf: *mut RingBuf<T>,
29 start: usize,
30 end: usize,
31 _marker: PhantomData<&'a RingBuf<T>>,
33}
34
35pub struct Drain<'a, T> {
37 buf: &'a mut RingBuf<T>,
39 front: usize,
41 back: usize,
43 remaining: usize,
45 prev_head: usize,
47 }
50
51impl<T> RingBuf<T> {
52 const fn ensure_type_ok() {
54 assert!(size_of::<T>() > 0, "cannot deal with zero-sized types");
55 }
56
57 pub fn new() -> RingBuf<T> {
59 Self::ensure_type_ok();
60 RingBuf {
61 buf: Vec::new(),
62 head: 0,
63 len: 0,
64 }
65 }
66
67 #[allow(clippy::uninit_vec)] pub fn with_capacity(capacity: usize) -> RingBuf<T> {
70 Self::ensure_type_ok();
71 let mut vec = Vec::with_capacity(capacity);
72 unsafe { vec.set_len(vec.capacity()) };
74 RingBuf {
75 buf: vec,
76 head: 0,
77 len: 0,
78 }
79 }
80
81 pub fn capacity(&self) -> usize {
83 self.buf.len()
84 }
85
86 pub fn len(&self) -> usize {
88 self.len
89 }
90
91 pub fn is_empty(&self) -> bool {
93 self.len == 0
94 }
95
96 fn ptr(&self) -> *mut T {
98 self.buf.as_ptr() as *mut _
99 }
100
101 unsafe fn ptr_at(&self, offset: usize) -> *mut T {
103 self.ptr().add(offset)
104 }
105
106 unsafe fn copy(&mut self, src: usize, dst: usize, count: usize) {
108 ptr::copy(self.ptr_at(src), self.ptr_at(dst), count);
109 }
110
111 unsafe fn copy_nonoverlapping(&mut self, src: usize, dst: usize, count: usize) {
113 ptr::copy_nonoverlapping(self.ptr_at(src), self.ptr_at(dst), count);
114 }
115
116 unsafe fn buf_slice_at(&self, range: Range<usize>) -> &[T] {
118 slice::from_raw_parts(self.ptr_at(range.start), range.end - range.start)
119 }
120
121 #[allow(clippy::mut_from_ref)]
126 unsafe fn buf_slice_at_mut(&self, range: Range<usize>) -> &mut [T] {
127 slice::from_raw_parts_mut(self.ptr_at(range.start), range.end - range.start)
128 }
129
130 pub fn is_contiguous(&self) -> bool {
132 self.head <= self.capacity() - self.len
135 }
136
137 #[inline]
139 fn offset_of(&self, index: usize) -> usize {
140 self.offset_of_explicit(self.head, index)
141 }
142
143 fn offset_of_explicit(&self, head: usize, index: usize) -> usize {
145 debug_assert!(index < self.capacity(), "index cannot exceed capacity");
147 let remaining = self.capacity() - index;
148 if head < remaining {
149 head + index
151 } else {
152 head - remaining
154 }
155 }
156
157 fn offset_of_reverse(&self, negative_index: usize) -> usize {
159 debug_assert!(
161 negative_index < self.capacity(),
162 "index cannot exceed capacity"
163 );
164 if self.head >= negative_index {
165 self.head - negative_index
167 } else {
168 self.head + (self.capacity() - negative_index)
170 }
171 }
172
173 unsafe fn handle_buf_expand(&mut self, old_capacity: usize) {
175 let new_capacity = self.capacity();
176
177 if self.head <= old_capacity - self.len {
178 } else {
180 let head_segment_len = old_capacity - self.head;
181 let tail_segment_len = self.len - head_segment_len;
182
183 if head_segment_len > tail_segment_len
184 && new_capacity - old_capacity >= tail_segment_len
185 {
186 self.copy_nonoverlapping(0, old_capacity, tail_segment_len);
190 } else {
191 let new_head = new_capacity - head_segment_len;
195 self.copy(self.head, new_head, head_segment_len);
196 self.head = new_head;
197 }
198 }
199 }
200
201 pub fn realign(&mut self) {
203 if self.head == 0 {
204 return;
206 }
207
208 unsafe {
209 if self.is_contiguous() {
210 self.copy(self.head, 0, self.len);
212 } else {
213 let head_segment_len = self.capacity() - self.head;
217 let tail_segment_len = self.len - head_segment_len;
218 self.copy(self.head, tail_segment_len, head_segment_len);
219
220 let slice = self.buf_slice_at_mut(0..self.len);
224 slice.rotate_left(tail_segment_len);
225 }
226
227 self.head = 0;
228 }
229 }
230
231 #[allow(clippy::uninit_vec)] pub fn reserve(&mut self, count: usize) {
234 let desired_capacity = self.len.checked_add(count).expect("capacity overflow");
235 if desired_capacity > self.capacity() {
236 let old_capacity = self.capacity();
237 self.buf.reserve(desired_capacity - old_capacity);
238 unsafe {
239 self.buf.set_len(self.buf.capacity());
240 self.handle_buf_expand(old_capacity);
241 }
242 }
243 }
244
245 pub fn reserve_exact(&mut self, count: usize) {
247 let desired_capacity = self.len.checked_add(count).expect("capacity overflow");
248 if desired_capacity > self.capacity() {
249 let old_capacity = self.capacity();
250 self.buf.reserve_exact(desired_capacity - old_capacity);
251 unsafe {
252 self.buf.set_len(self.buf.capacity());
253 self.handle_buf_expand(old_capacity);
254 }
255 }
256 }
257
258 pub fn shrink_to(&mut self, target_capacity: usize) {
260 assert!(
261 target_capacity <= self.capacity(),
262 "cannot shrink to a greater capacity (old: {}, requested: {})",
263 self.capacity(),
264 target_capacity
265 );
266
267 self.realign();
269 let requested_capacity = usize::max(target_capacity, self.len);
270 let old_capacity = self.capacity();
271
272 unsafe {
273 self.buf.set_len(requested_capacity);
275 self.buf.shrink_to(requested_capacity);
276 let new_capacity = self.buf.capacity();
278 self.buf.set_len(new_capacity);
279 debug_assert!(
280 new_capacity <= old_capacity,
281 "Vec::shrink_to did not shrink?"
282 );
283 }
284 }
285
286 pub fn push_back(&mut self, val: T) {
288 self.reserve(1);
289 unsafe {
290 let target = self.ptr_at(self.offset_of(self.len));
292 ptr::write(target, val);
293 }
294 self.len += 1;
295 }
296
297 pub fn push_front(&mut self, val: T) {
299 self.reserve(1);
300 let new_head = self.offset_of_reverse(1);
302 unsafe {
303 let target = self.ptr_at(new_head);
304 ptr::write(target, val);
305 }
306 self.head = new_head;
307 self.len += 1;
308 }
309
310 pub fn pop_back(&mut self) -> Option<T> {
312 if self.len == 0 {
313 return None;
314 }
315
316 let out;
317 unsafe {
318 let target = self.ptr_at(self.offset_of(self.len));
319 out = ptr::read(target);
320 self.len -= 1;
321 }
322
323 Some(out)
324 }
325
326 pub fn pop_front(&mut self) -> Option<T> {
328 if self.len == 0 {
329 return None;
330 }
331
332 let out;
333 unsafe {
334 let target = self.ptr_at(self.offset_of(0));
335 out = ptr::read(target);
336 self.head = self.offset_of(1);
337 self.len -= 1;
338 }
339
340 Some(out)
341 }
342
343 fn check_range(&self, range: &Range<usize>) {
345 assert!(range.start <= range.end, "range cannot be reverse");
346 assert!(range.start < self.len, "range start out of bounds");
347 assert!(range.end <= self.len, "range end out of bounds");
348 }
349
350 #[inline]
352 fn map_range(&self, range: Range<usize>) -> (Range<usize>, Option<Range<usize>>) {
353 self.map_range_explicit(self.head, range)
354 }
355
356 fn map_range_explicit(
358 &self,
359 head: usize,
360 range: Range<usize>,
361 ) -> (Range<usize>, Option<Range<usize>>) {
362 if range.start == range.end {
363 return (head..head, None);
365 }
366 let start = self.offset_of_explicit(head, range.start);
367 let end = self.offset_of_explicit(head, range.end - 1);
368
369 if end >= start {
370 (start..end + 1, None)
372 } else {
373 (start..self.capacity(), Some(0..end + 1))
375 }
376 }
377
378 pub fn range(&self, range: Range<usize>) -> RingBufSlice<T> {
380 self.check_range(&range);
381 RingBufSlice {
382 buf: self,
383 start: range.start,
384 end: range.end,
385 }
386 }
387
388 pub fn range_mut(&mut self, range: Range<usize>) -> RingBufSliceMut<T> {
390 self.check_range(&range);
391 RingBufSliceMut {
392 buf: self,
393 start: range.start,
394 end: range.end,
395 _marker: PhantomData,
396 }
397 }
398
399 unsafe fn range_to_slices(&self, range: Range<usize>) -> (&[T], Option<&[T]>) {
401 let (a, b) = self.map_range(range);
402 (self.buf_slice_at(a), b.map(|r| self.buf_slice_at(r)))
403 }
404
405 unsafe fn range_to_slices_mut(&mut self, range: Range<usize>) -> (&mut [T], Option<&mut [T]>) {
409 let (a, b) = self.map_range(range);
410 (
411 self.buf_slice_at_mut(a),
412 b.map(|r| self.buf_slice_at_mut(r)),
413 )
414 }
415
416 pub fn get(&self, index: usize) -> Option<&T> {
418 if index < self.len {
419 unsafe { Some(&*self.ptr_at(self.offset_of(index))) }
420 } else {
421 None
422 }
423 }
424
425 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
427 if index < self.len {
428 unsafe { Some(&mut *self.ptr_at(self.offset_of(index))) }
429 } else {
430 None
431 }
432 }
433
434 pub fn clear(&mut self) {
436 unsafe {
437 let (a, b) = self.map_range(0..self.len);
439 self.head = 0;
440 self.len = 0;
441
442 let slice_a: *mut [T] = self.buf_slice_at_mut(a);
443 ptr::drop_in_place(slice_a);
444 if let Some(b) = b {
445 let slice_b: *mut [T] = self.buf_slice_at_mut(b);
446 ptr::drop_in_place(slice_b);
447 }
448 }
449 }
450
451 pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> Drain<T> {
456 let lower_bound = match range.start_bound() {
457 Bound::Included(&start) => {
458 assert!(start < self.len, "start index out of bounds");
459 Some(start)
460 }
461 Bound::Excluded(&start) => {
462 let start = start.checked_add(1).expect("start index out of bounds");
463 assert!(start < self.len, "start index out of bounds");
464 Some(start)
465 }
466 Bound::Unbounded => None,
467 };
468 let upper_bound = match range.end_bound() {
469 Bound::Included(&end) => {
470 let end = end.checked_add(1).expect("range out of bounds");
471 assert!(end <= self.len, "end index out of bounds");
472 Some(end)
473 }
474 Bound::Excluded(&end) => {
475 assert!(end <= self.len, "end index out of bounds");
476 Some(end)
477 }
478 Bound::Unbounded => None,
479 };
480
481 if let Some(start) = lower_bound {
482 if let Some(_end) = upper_bound {
483 unimplemented!("drain from middle unimplemented");
484 } else {
485 Drain::to_end(self, start)
487 }
488 } else if let Some(end) = upper_bound {
489 Drain::from_start(self, end)
491 } else {
492 Drain::from_start(self, self.len)
494 }
495 }
496}
497
498impl<T: Clone> RingBuf<T> {
499 pub fn fill_at_back(&mut self, count: usize, value: T) {
501 self.reserve(count);
502 let (a, b) = self.map_range(self.len..self.len + count);
503
504 unsafe {
505 a.for_each(|i| ptr::write(self.ptr_at(i), value.clone()));
507 if let Some(b) = b {
508 b.for_each(|i| ptr::write(self.ptr_at(i), value.clone()));
509 }
510 }
511
512 self.len += count;
513 }
514
515 pub fn fill_at_front(&mut self, count: usize, value: T) {
517 self.reserve(count);
518 let new_head = self.offset_of_reverse(count);
519 let (a, b) = self.map_range_explicit(new_head, 0..count);
520
521 unsafe {
522 a.for_each(|i| ptr::write(self.ptr_at(i), value.clone()));
523 if let Some(b) = b {
524 b.for_each(|i| ptr::write(self.ptr_at(i), value.clone()));
525 }
526 }
527
528 self.head = new_head;
529 self.len += count;
530 }
531}
532
533impl<T: Copy> RingBuf<T> {
535 unsafe fn copy_range_from_slice(
537 &mut self,
538 range_a: Range<usize>,
539 range_b: Option<Range<usize>>,
540 elements: &[T],
541 ) {
542 self.copy_range_from_ptr(range_a, range_b, elements.as_ptr(), elements.len());
543 }
544
545 unsafe fn copy_range_from_ptr(
547 &mut self,
548 range_a: Range<usize>,
549 range_b: Option<Range<usize>>,
550 source: *const T,
551 count: usize,
552 ) {
553 if let Some(b) = range_b {
554 debug_assert_eq!(
556 b.end - b.start + range_a.end - range_a.start,
557 count,
558 "range incorrect"
559 );
560 unsafe {
561 let dest_a = self.ptr_at(range_a.start);
563 let length_a = range_a.end - range_a.start;
564 ptr::copy_nonoverlapping(source, dest_a, length_a);
565 let dest_b = self.ptr_at(b.start);
567 let length_b = count - length_a;
568 ptr::copy_nonoverlapping(source.add(length_a), dest_b, length_b);
569 }
570 } else {
571 debug_assert_eq!(range_a.end - range_a.start, count, "range incorrect");
573 unsafe {
574 let dest = self.ptr_at(range_a.start);
575 ptr::copy_nonoverlapping(source, dest, range_a.end - range_a.start);
576 }
577 }
578 }
579
580 unsafe fn copy_range_to_slice(
582 &self,
583 range_a: Range<usize>,
584 range_b: Option<Range<usize>>,
585 to_slice: &mut [T],
586 ) {
587 self.copy_range_to_ptr(range_a, range_b, to_slice.as_mut_ptr(), to_slice.len());
588 }
589
590 unsafe fn copy_range_to_ptr(
592 &self,
593 range_a: Range<usize>,
594 range_b: Option<Range<usize>>,
595 dest: *mut T,
596 count: usize,
597 ) {
598 if let Some(b) = range_b {
599 debug_assert_eq!(
601 b.end - b.start + range_a.end - range_a.start,
602 count,
603 "range incorrect"
604 );
605 unsafe {
606 let source_a = self.ptr_at(range_a.start) as *const T;
608 let length_a = range_a.end - range_a.start;
609 ptr::copy_nonoverlapping(source_a, dest, length_a);
610 let source_b = self.ptr_at(b.start) as *const T;
612 let length_b = count - length_a;
613 ptr::copy_nonoverlapping(source_b, dest.add(length_a), length_b);
614 }
615 } else {
616 debug_assert_eq!(range_a.end - range_a.start, count, "range incorrect");
618 unsafe {
619 let source = self.ptr_at(range_a.start) as *const T;
620 ptr::copy_nonoverlapping(source, dest, range_a.end - range_a.start);
621 }
622 }
623 }
624
625 pub fn push_back_copy_from_slice(&mut self, elements: &[T]) {
627 self.reserve(elements.len());
628 let (a, b) = self.map_range(self.len..self.len + elements.len());
629 unsafe { self.copy_range_from_slice(a, b, elements) };
630 self.len += elements.len();
631 }
632
633 pub fn push_front_copy_from_slice(&mut self, elements: &[T]) {
635 self.reserve(elements.len());
636 let new_head = self.offset_of_reverse(elements.len());
637 let (a, b) = self.map_range_explicit(new_head, 0..elements.len());
638 unsafe { self.copy_range_from_slice(a, b, elements) };
639 self.head = new_head;
640 self.len += elements.len();
641 }
642
643 pub fn pop_back_copy_to_slice(&mut self, dest: &mut [T]) {
645 assert!(dest.len() <= self.len, "destination slice too large");
646 let new_len = self.len - dest.len();
647 let (a, b) = self.map_range(new_len..self.len);
648 self.len = new_len;
649 unsafe { self.copy_range_to_slice(a, b, dest) };
650 }
651
652 pub fn pop_front_copy_to_slice(&mut self, dest: &mut [T]) {
654 assert!(dest.len() <= self.len, "destination slice too large");
655 let (a, b) = self.map_range(0..dest.len());
656 self.head = self.offset_of(dest.len());
657 self.len -= dest.len();
658 unsafe { self.copy_range_to_slice(a, b, dest) };
659 }
660}
661
662fn validate_subrange(r1: Range<usize>, r2: &Range<usize>) -> Range<usize> {
663 assert!(r2.start <= r2.end, "range cannot be reverse");
664 let new_start = r1.start.checked_add(r2.start).expect("start out of range");
665 let new_end = r1.start.checked_add(r2.end).expect("end out of range");
666 assert!(new_start < r1.end, "start out of range");
667 assert!(new_end <= r1.end, "end out of range");
668 new_start..new_end
669}
670
671impl<'a, T> RingBufSlice<'a, T> {
672 pub fn len(&self) -> usize {
674 self.end - self.start
675 }
676
677 pub fn is_empty(&self) -> bool {
679 self.end == self.start
680 }
681
682 pub fn as_slices(&self) -> (&'a [T], Option<&'a [T]>) {
684 unsafe { self.buf.range_to_slices(self.start..self.end) }
685 }
686
687 pub fn range(&self, range: Range<usize>) -> RingBufSlice<'a, T> {
689 let new_range = validate_subrange(self.start..self.end, &range);
690 RingBufSlice {
691 buf: self.buf,
692 start: new_range.start,
693 end: new_range.end,
694 }
695 }
696}
697
698impl<'a, T: Copy> RingBufSlice<'a, T> {
699 pub fn copy_to_slice(&self, slice: &mut [T]) {
701 assert_eq!(self.len(), slice.len(), "length mismatch");
702 unsafe {
703 let (a, b) = self.buf.map_range(self.start..self.end);
704 self.buf.copy_range_to_slice(a, b, slice);
705 }
706 }
707
708 pub unsafe fn copy_to_ptr(&self, dest: *mut T, count: usize) {
710 assert_eq!(self.len(), count, "length mismatch");
711 let (a, b) = self.buf.map_range(self.start..self.end);
712 self.buf.copy_range_to_ptr(a, b, dest, count);
713 }
714
715 pub fn read_fixed<const N: usize>(&self) -> [T; N] {
717 unsafe {
718 let mut out: MaybeUninit<[T; N]> = MaybeUninit::uninit();
719 self.copy_to_ptr(out.as_mut_ptr() as *mut T, N);
720 out.assume_init()
721 }
722 }
723}
724
725impl<'a, T> RingBufSliceMut<'a, T> {
726 pub fn len(&self) -> usize {
728 self.end - self.start
729 }
730
731 pub fn is_empty(&self) -> bool {
733 self.end == self.start
734 }
735
736 pub fn as_slices(&self) -> (&'a [T], Option<&'a [T]>) {
738 unsafe { (*self.buf).range_to_slices(self.start..self.end) }
739 }
740
741 pub fn as_mut_slices(&mut self) -> (&'a mut [T], Option<&'a mut [T]>) {
743 unsafe { (*self.buf).range_to_slices_mut(self.start..self.end) }
744 }
745
746 pub fn range(&self, range: Range<usize>) -> RingBufSlice<'a, T> {
748 let new_range = validate_subrange(self.start..self.end, &range);
749 unsafe {
750 RingBufSlice {
752 buf: &*self.buf,
753 start: new_range.start,
754 end: new_range.end,
755 }
756 }
757 }
758
759 pub fn range_mut(&mut self, range: Range<usize>) -> RingBufSliceMut<'a, T> {
761 let new_range = validate_subrange(self.start..self.end, &range);
762 RingBufSliceMut {
763 buf: self.buf,
764 start: new_range.start,
765 end: new_range.end,
766 _marker: PhantomData,
767 }
768 }
769
770 pub fn split_at_mut(self, index: usize) -> (RingBufSliceMut<'a, T>, RingBufSliceMut<'a, T>) {
772 let split_index = index.checked_add(self.start).expect("index out of range");
773 assert!(split_index < self.end, "split index out of range");
774
775 (
776 RingBufSliceMut {
777 buf: self.buf,
778 start: self.start,
779 end: split_index,
780 _marker: PhantomData,
781 },
782 RingBufSliceMut {
783 buf: self.buf,
784 start: split_index,
785 end: self.end,
786 _marker: PhantomData,
787 },
788 )
789 }
790}
791
792impl<'a, T: Copy> RingBufSliceMut<'a, T> {
793 pub fn copy_to_slice(&self, slice: &mut [T]) {
795 assert_eq!(self.len(), slice.len(), "length mismatch");
796 unsafe {
797 let (a, b) = (*self.buf).map_range(self.start..self.end);
798 (*self.buf).copy_range_to_slice(a, b, slice);
799 }
800 }
801
802 pub unsafe fn copy_to_ptr(&self, dest: *mut T, count: usize) {
804 assert_eq!(self.len(), count, "length mismatch");
805 let (a, b) = (*self.buf).map_range(self.start..self.end);
806 (*self.buf).copy_range_to_ptr(a, b, dest, count);
807 }
808
809 pub fn read_fixed<const N: usize>(&self) -> [T; N] {
811 unsafe {
812 let mut out: MaybeUninit<[T; N]> = MaybeUninit::uninit();
813 self.copy_to_ptr(out.as_mut_ptr() as *mut T, N);
814 out.assume_init()
815 }
816 }
817
818 pub fn copy_from_slice(&mut self, slice: &[T]) {
820 assert_eq!(self.len(), slice.len(), "length mismatch");
821 unsafe {
822 let (a, b) = (*self.buf).map_range(self.start..self.end);
823 (*self.buf).copy_range_from_slice(a, b, slice);
824 }
825 }
826
827 pub unsafe fn copy_from_ptr(&mut self, dest: *const T, count: usize) {
829 assert_eq!(self.len(), count, "length mismatch");
830 unsafe {
831 let (a, b) = (*self.buf).map_range(self.start..self.end);
832 (*self.buf).copy_range_from_ptr(a, b, dest, count);
833 }
834 }
835}
836
837impl<T> Drop for RingBuf<T> {
838 fn drop(&mut self) {
839 unsafe {
840 self.realign();
842 self.buf.set_len(self.len);
843 }
844 }
845}
846
847impl<'a, T> Drain<'a, T> {
848 fn from_start(buf: &'a mut RingBuf<T>, until: usize) -> Drain<'a, T> {
850 let prev_head = buf.head;
851 let drain = Drain {
852 buf,
853 front: 0,
854 back: until,
855 remaining: until,
856 prev_head,
857 };
858 drain.buf.head = until;
859 drain.buf.len -= until;
860 drain
861 }
862
863 fn to_end(buf: &'a mut RingBuf<T>, starting_from: usize) -> Drain<'a, T> {
865 let prev_head = buf.head;
866 let back = buf.len;
867 let remaining = buf.len - starting_from;
868 let drain = Drain {
869 buf,
870 front: starting_from,
871 back,
872 remaining,
873 prev_head,
874 };
875 drain.buf.len -= starting_from;
876 drain
877 }
878}
879
880impl<'a, T> Drop for Drain<'a, T> {
881 fn drop(&mut self) {
882 if self.remaining == 0 {
883 return;
885 }
886
887 unsafe {
888 let (a, b) = self
890 .buf
891 .map_range_explicit(self.prev_head, self.front..self.back);
892 let slice_a: *mut [T] = self.buf.buf_slice_at_mut(a);
893 ptr::drop_in_place(slice_a);
894 if let Some(b) = b {
895 let slice_b: *mut [T] = self.buf.buf_slice_at_mut(b);
896 ptr::drop_in_place(slice_b);
897 }
898 }
899 }
900}
901
902impl<'a, T> Iterator for Drain<'a, T> {
903 type Item = T;
904
905 fn next(&mut self) -> Option<Self::Item> {
906 if self.remaining == 0 {
907 None
908 } else {
909 unsafe {
910 let element = self
911 .buf
912 .ptr_at(self.buf.offset_of_explicit(self.prev_head, self.front));
913 self.front += 1;
914 self.remaining -= 1;
915 Some(ptr::read(element))
916 }
917 }
918 }
919
920 fn size_hint(&self) -> (usize, Option<usize>) {
921 (self.remaining, Some(self.remaining))
922 }
923}
924
925impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
926 fn next_back(&mut self) -> Option<Self::Item> {
927 if self.remaining == 0 {
928 None
929 } else {
930 unsafe {
931 self.back -= 1;
932 self.remaining -= 1;
933 let element = self
934 .buf
935 .ptr_at(self.buf.offset_of_explicit(self.prev_head, self.back));
936 Some(ptr::read(element))
937 }
938 }
939 }
940}
941
942impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
943
944#[cfg(test)]
945mod test {
946 use super::*;
951
952 #[test]
953 fn new() {
954 let mut buf: RingBuf<String> = RingBuf::new();
955 buf.push_back("world".into());
956 buf.push_back("!".into());
957 buf.push_front(", ".into());
958 buf.push_front("Hello".into());
959
960 assert_eq!(buf.pop_front(), Some("Hello".into()));
961 assert_eq!(buf.pop_front(), Some(", ".into()));
962 assert_eq!(buf.pop_front(), Some("world".into()));
963 assert_eq!(buf.pop_front(), Some("!".into()));
964 assert_eq!(buf.pop_front(), None);
965 }
966
967 #[test]
968 fn copy_around_slices() {
969 let mut buf: RingBuf<u8> = RingBuf::new();
970 buf.push_back_copy_from_slice(&[5, 6, 7, 8, 9, 10, 11]);
971 buf.push_front_copy_from_slice(&[0, 1, 2, 3, 4]);
972 assert_eq!(buf.get(3), Some(&3));
973 assert_eq!(buf.get(7), Some(&7));
974
975 let sliced = buf.range(3..6);
976 let mut dest = [0u8; 3];
977 sliced.copy_to_slice(&mut dest);
978 assert_eq!(dest, [3, 4, 5]);
979
980 let mut dest = [0u8; 6];
981 buf.pop_front_copy_to_slice(&mut dest);
982 assert_eq!(dest, [0, 1, 2, 3, 4, 5]);
983 buf.pop_back_copy_to_slice(&mut dest);
984 assert_eq!(dest, [6, 7, 8, 9, 10, 11]);
985 }
986
987 #[test]
988 fn copy_from_slice() {
989 let mut buf: RingBuf<u8> = RingBuf::new();
990 buf.push_back_copy_from_slice(&[0u8; 4096]);
991
992 let mut slice_mut = buf.range_mut(1024..2048);
993 slice_mut.copy_from_slice(&[1u8; 1024]);
994
995 buf.drain(..1024);
996
997 assert_eq!(buf.get(1024), Some(&0));
998 assert_eq!(buf.get(512), Some(&1));
999
1000 buf.push_front_copy_from_slice(&[2u8; 1024]);
1001
1002 assert_eq!(buf.get(512), Some(&2));
1003 assert_eq!(buf.get(1536), Some(&1));
1004 assert_eq!(buf.get(3072), Some(&0));
1005 }
1006
1007 #[test]
1008 fn drain() {
1009 let mut buf: RingBuf<String> = RingBuf::new();
1010 buf.push_back("Hello, ".into());
1011 buf.push_back("world!".into());
1012
1013 for i in 0..10 {
1014 buf.push_back(i.to_string());
1015 }
1016
1017 let a: Vec<String> = buf.drain(..2).collect();
1018 assert_eq!(a.join(""), "Hello, world!");
1019 let b: Vec<String> = buf.drain(..).collect();
1020 assert_eq!(b.join(""), "0123456789");
1021 assert_eq!(buf.len(), 0);
1022 }
1023
1024 #[test]
1025 fn range_as_slices() {
1026 let mut data = [0u8; 256];
1027 for (i, v) in data.iter_mut().enumerate() {
1028 *v = i as u8;
1029 }
1030 let mut buf: RingBuf<u8> = RingBuf::with_capacity(data.len());
1031 buf.push_back_copy_from_slice(&data[24..]);
1032 buf.push_front_copy_from_slice(&data[..24]);
1033 let mut range = buf.range_mut(0..buf.len());
1034 let (a, b) = range.as_mut_slices();
1035 assert_eq!(a, &mut data[..24]);
1036 assert_eq!(b, Some(&mut data[24..]));
1037
1038 let range2 = buf.range(0..4);
1039 assert_eq!(range2.read_fixed::<4>(), [0, 1, 2, 3]);
1040 }
1041
1042 #[test]
1043 fn shrink() {
1044 let mut buf: RingBuf<u8> = RingBuf::with_capacity(256);
1045 buf.push_back_copy_from_slice(&[5u8; 64]);
1046 buf.push_front_copy_from_slice(&[6u8; 32]);
1047 buf.shrink_to(buf.len());
1048 assert_eq!(buf.get(0), Some(&6));
1049 assert_eq!(buf.get(16), Some(&6));
1050 assert_eq!(buf.get(48), Some(&5));
1051 assert_eq!(buf.get(95), Some(&5));
1052 }
1053}