bump_scope/mut_bump_vec/
into_iter.rs1use core::{
2 fmt::Debug,
3 iter::FusedIterator,
4 marker::PhantomData,
5 mem,
6 ptr::{self, NonNull},
7 slice,
8};
9
10use crate::{BumpAllocatorExt, SizedTypeProperties, polyfill::non_null};
11
12pub struct IntoIter<T, A> {
20 ptr: NonNull<T>,
21 end: NonNull<T>, #[expect(dead_code)]
25 allocator: A,
26
27 marker: PhantomData<(A, T)>,
30}
31
32unsafe impl<T: Send, A: Send> Send for IntoIter<T, A> {}
33unsafe impl<T: Sync, A: Sync> Sync for IntoIter<T, A> {}
34
35impl<T: Debug, A: BumpAllocatorExt> Debug for IntoIter<T, A> {
36 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
37 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
38 }
39}
40
41impl<T, A> IntoIter<T, A> {
42 pub(crate) unsafe fn new(slice: NonNull<[T]>, allocator: A) -> Self {
43 unsafe {
44 if T::IS_ZST {
45 IntoIter {
46 ptr: NonNull::dangling(),
47 end: non_null::wrapping_byte_add(NonNull::dangling(), slice.len()),
48 allocator,
49 marker: PhantomData,
50 }
51 } else {
52 let start = non_null::as_non_null_ptr(slice);
53 let end = start.add(slice.len());
54
55 IntoIter {
56 ptr: start,
57 end,
58 allocator,
59 marker: PhantomData,
60 }
61 }
62 }
63 }
64
65 #[must_use]
67 #[inline(always)]
68 pub fn len(&self) -> usize {
69 if T::IS_ZST {
70 self.end.addr().get().wrapping_sub(self.ptr.addr().get())
71 } else {
72 unsafe { non_null::offset_from_unsigned(self.end, self.ptr) }
73 }
74 }
75
76 #[must_use]
78 #[inline(always)]
79 pub fn is_empty(&self) -> bool {
80 self.ptr == self.end
81 }
82
83 #[must_use]
99 pub fn as_slice(&self) -> &[T] {
100 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
101 }
102
103 #[must_use]
119 pub fn as_mut_slice(&mut self) -> &mut [T] {
120 unsafe { &mut *self.as_raw_mut_slice() }
121 }
122
123 fn as_raw_mut_slice(&mut self) -> *mut [T] {
124 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
125 }
126}
127
128impl<T, A> AsRef<[T]> for IntoIter<T, A> {
129 #[inline]
130 fn as_ref(&self) -> &[T] {
131 self.as_slice()
132 }
133}
134
135impl<T, A> Iterator for IntoIter<T, A> {
136 type Item = T;
137
138 #[inline]
139 fn next(&mut self) -> Option<Self::Item> {
140 if self.ptr == self.end {
141 None
142 } else if T::IS_ZST {
143 self.end = unsafe { non_null::wrapping_byte_sub(self.end, 1) };
146
147 Some(unsafe { mem::zeroed() })
149 } else {
150 let old = self.ptr;
151 self.ptr = unsafe { self.ptr.add(1) };
152
153 Some(unsafe { old.as_ptr().read() })
154 }
155 }
156
157 #[inline]
158 fn size_hint(&self) -> (usize, Option<usize>) {
159 let exact = self.len();
160 (exact, Some(exact))
161 }
162
163 #[inline]
164 fn count(self) -> usize {
165 self.len()
166 }
167}
168
169impl<T, A> DoubleEndedIterator for IntoIter<T, A> {
170 #[inline]
171 fn next_back(&mut self) -> Option<Self::Item> {
172 if self.end == self.ptr {
173 None
174 } else if T::IS_ZST {
175 self.end = unsafe { non_null::wrapping_byte_sub(self.end, 1) };
177
178 Some(unsafe { mem::zeroed() })
180 } else {
181 self.end = unsafe { self.end.sub(1) };
182
183 Some(unsafe { self.end.as_ptr().read() })
184 }
185 }
186}
187
188impl<T, A> ExactSizeIterator for IntoIter<T, A> {}
189impl<T, A> FusedIterator for IntoIter<T, A> {}
190
191#[cfg(feature = "nightly-trusted-len")]
192unsafe impl<T, A> core::iter::TrustedLen for IntoIter<T, A> {}
193
194impl<T, A> Drop for IntoIter<T, A> {
195 #[inline]
196 fn drop(&mut self) {
197 unsafe {
198 NonNull::slice_from_raw_parts(self.ptr, self.len()).as_ptr().drop_in_place();
199 }
200 }
201}