bump_scope/bump_vec/
into_iter.rs1use core::{
2 alloc::Layout,
3 fmt::Debug,
4 iter::FusedIterator,
5 marker::PhantomData,
6 mem,
7 ptr::{self, NonNull},
8 slice,
9};
10
11#[cfg(feature = "panic-on-alloc")]
12use core::mem::MaybeUninit;
13
14use crate::{BumpAllocatorExt, SizedTypeProperties, polyfill::non_null};
15
16#[cfg(feature = "panic-on-alloc")]
17use crate::{BumpBox, BumpVec, FixedBumpVec, raw_fixed_bump_vec::RawFixedBumpVec};
18
19pub struct IntoIter<T, A: BumpAllocatorExt> {
26 pub(super) buf: NonNull<T>,
27 pub(super) cap: usize,
28
29 pub(super) ptr: NonNull<T>,
30
31 pub(super) end: NonNull<T>,
33
34 pub(super) allocator: A,
35
36 pub(super) marker: PhantomData<T>,
38}
39
40impl<T: Debug, A: BumpAllocatorExt> Debug for IntoIter<T, A> {
41 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
42 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
43 }
44}
45
46impl<T, A: BumpAllocatorExt> IntoIter<T, A> {
47 #[must_use]
63 pub fn as_slice(&self) -> &[T] {
64 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
65 }
66
67 #[must_use]
83 pub fn as_mut_slice(&mut self) -> &mut [T] {
84 unsafe { &mut *self.as_raw_mut_slice() }
85 }
86
87 #[must_use]
89 #[inline(always)]
90 pub fn allocator(&self) -> &A {
91 &self.allocator
92 }
93
94 fn as_raw_mut_slice(&mut self) -> *mut [T] {
95 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
96 }
97}
98
99impl<T, A: BumpAllocatorExt> AsRef<[T]> for IntoIter<T, A> {
100 #[inline]
101 fn as_ref(&self) -> &[T] {
102 self.as_slice()
103 }
104}
105
106impl<T, A: BumpAllocatorExt> Iterator for IntoIter<T, A> {
107 type Item = T;
108
109 #[inline]
110 fn next(&mut self) -> Option<Self::Item> {
111 if self.ptr == self.end {
112 None
113 } else if T::IS_ZST {
114 self.end = unsafe { non_null::wrapping_byte_sub(self.end, 1) };
117
118 Some(unsafe { mem::zeroed() })
120 } else {
121 let old = self.ptr;
122 self.ptr = unsafe { self.ptr.add(1) };
123
124 Some(unsafe { old.as_ptr().read() })
125 }
126 }
127
128 #[inline]
129 fn size_hint(&self) -> (usize, Option<usize>) {
130 let exact = if T::IS_ZST {
131 self.end.addr().get().wrapping_sub(self.ptr.addr().get())
132 } else {
133 unsafe { non_null::offset_from_unsigned(self.end, self.ptr) }
134 };
135 (exact, Some(exact))
136 }
137
138 #[inline]
139 fn count(self) -> usize {
140 self.len()
141 }
142}
143
144impl<T, A: BumpAllocatorExt> DoubleEndedIterator for IntoIter<T, A> {
145 #[inline]
146 fn next_back(&mut self) -> Option<Self::Item> {
147 if self.end == self.ptr {
148 None
149 } else if T::IS_ZST {
150 self.end = unsafe { non_null::wrapping_byte_sub(self.end, 1) };
152
153 Some(unsafe { mem::zeroed() })
155 } else {
156 self.end = unsafe { self.end.sub(1) };
157
158 Some(unsafe { self.end.as_ptr().read() })
159 }
160 }
161}
162
163impl<T, A: BumpAllocatorExt> ExactSizeIterator for IntoIter<T, A> {}
164impl<T, A: BumpAllocatorExt> FusedIterator for IntoIter<T, A> {}
165
166#[cfg(feature = "nightly-trusted-len")]
167unsafe impl<T, A: BumpAllocatorExt> core::iter::TrustedLen for IntoIter<T, A> {}
168
169#[cfg(feature = "panic-on-alloc")]
170impl<T: Clone, A: BumpAllocatorExt + Clone> Clone for IntoIter<T, A> {
171 fn clone(&self) -> Self {
172 let allocator = self.allocator.clone();
173 let ptr = self.allocator.allocate_slice::<MaybeUninit<T>>(self.len());
174 let slice = NonNull::slice_from_raw_parts(ptr, self.len());
175 let boxed = unsafe { BumpBox::from_raw(slice) };
176 let boxed = boxed.init_clone(self.as_slice());
177 let fixed = FixedBumpVec::from_init(boxed);
178 let fixed = unsafe { RawFixedBumpVec::from_cooked(fixed) };
179 let vec = BumpVec { fixed, allocator };
180 vec.into_iter()
181 }
182}
183
184impl<T, A: BumpAllocatorExt> Drop for IntoIter<T, A> {
185 fn drop(&mut self) {
186 struct DropGuard<'a, T, A: BumpAllocatorExt>(&'a mut IntoIter<T, A>);
187
188 impl<T, A: BumpAllocatorExt> Drop for DropGuard<'_, T, A> {
189 fn drop(&mut self) {
190 unsafe {
191 let ptr = self.0.buf.cast();
192 let layout = Layout::from_size_align_unchecked(self.0.cap * T::SIZE, T::ALIGN);
193 self.0.allocator.deallocate(ptr, layout);
194 }
195 }
196 }
197
198 let guard = DropGuard(self);
199 unsafe {
201 ptr::drop_in_place(guard.0.as_raw_mut_slice());
202 }
203 }
205}