allocator_api2/vec/
into_iter.rs1use core::fmt;
2use core::iter::FusedIterator;
3use core::marker::PhantomData;
4use core::mem::{self, size_of, ManuallyDrop};
5
6use core::ptr::{self, NonNull};
7use core::slice::{self};
8
9use crate::addr;
10
11use super::{Allocator, Global, RawVec};
12
13#[cfg(not(no_global_oom_handling))]
14use super::Vec;
15
16pub struct IntoIter<T, A: Allocator = Global> {
29 pub(super) buf: NonNull<T>,
30 pub(super) phantom: PhantomData<T>,
31 pub(super) cap: usize,
32 pub(super) alloc: ManuallyDrop<A>,
35 pub(super) ptr: *const T,
36 pub(super) end: *const T,
37}
38
39impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
40 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
41 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
42 }
43}
44
45impl<T, A: Allocator> IntoIter<T, A> {
46 pub fn as_slice(&self) -> &[T] {
59 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
60 }
61
62 pub fn as_mut_slice(&mut self) -> &mut [T] {
77 unsafe { &mut *self.as_raw_mut_slice() }
78 }
79
80 #[inline(always)]
82 pub fn allocator(&self) -> &A {
83 &self.alloc
84 }
85
86 fn as_raw_mut_slice(&mut self) -> *mut [T] {
87 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
88 }
89}
90
91impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
92 fn as_ref(&self) -> &[T] {
93 self.as_slice()
94 }
95}
96
97unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
98
99unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
100
101impl<T, A: Allocator> Iterator for IntoIter<T, A> {
102 type Item = T;
103
104 #[inline(always)]
105 fn next(&mut self) -> Option<T> {
106 if self.ptr == self.end {
107 None
108 } else if size_of::<T>() == 0 {
109 self.ptr = self.ptr.cast::<u8>().wrapping_add(1).cast();
113
114 Some(unsafe { mem::zeroed() })
116 } else {
117 let old = self.ptr;
118 self.ptr = unsafe { self.ptr.add(1) };
119
120 Some(unsafe { ptr::read(old) })
121 }
122 }
123
124 #[inline(always)]
125 fn size_hint(&self) -> (usize, Option<usize>) {
126 let exact = if size_of::<T>() == 0 {
127 addr(self.end).wrapping_sub(addr(self.ptr))
128 } else {
129 unsafe { self.end.offset_from(self.ptr) as usize }
130 };
131 (exact, Some(exact))
132 }
133
134 #[inline(always)]
135 fn count(self) -> usize {
136 self.len()
137 }
138}
139
140impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
141 #[inline(always)]
142 fn next_back(&mut self) -> Option<T> {
143 if self.end == self.ptr {
144 None
145 } else if size_of::<T>() == 0 {
146 self.end = self.end.cast::<u8>().wrapping_add(1).cast();
148
149 Some(unsafe { mem::zeroed() })
151 } else {
152 self.end = unsafe { self.end.sub(1) };
153
154 Some(unsafe { ptr::read(self.end) })
155 }
156 }
157}
158
159impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {}
160
161impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
162
163#[cfg(not(no_global_oom_handling))]
164impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
165 fn clone(&self) -> Self {
166 let mut vec = Vec::<T, A>::with_capacity_in(self.len(), (*self.alloc).clone());
167 vec.extend(self.as_slice().iter().cloned());
168 vec.into_iter()
169 }
170}
171
172impl<T, A: Allocator> Drop for IntoIter<T, A> {
173 fn drop(&mut self) {
174 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
175
176 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
177 fn drop(&mut self) {
178 unsafe {
179 let alloc = ManuallyDrop::take(&mut self.0.alloc);
181 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
183 }
184 }
185 }
186
187 let guard = DropGuard(self);
188 unsafe {
190 ptr::drop_in_place(guard.0.as_raw_mut_slice());
191 }
192 }
194}
195
196impl<T, A> Default for IntoIter<T, A>
197where
198 A: Allocator + Default,
199{
200 fn default() -> Self {
209 super::Vec::new_in(Default::default()).into_iter()
210 }
211}