1#![no_std]
2#![feature(layout_for_ptr, allocator_api)]
3
4pub(crate) extern crate alloc;
5
6mod r#trait;
7use alloc::alloc::Global;
8pub use r#trait::*;
9use core::{alloc::{Allocator, Layout}, ptr::NonNull, ops::Range, mem::needs_drop, iter::{FusedIterator}};
10
11pub struct IntoIter<T, A: Allocator = Global> {
12 pub(crate) ptr: NonNull<[T]>,
13 pub(crate) range: Range<*mut T>,
14 pub(crate) alloc: A
15}
16
17impl<T, A: Allocator> Iterator for IntoIter<T, A> {
18 type Item = T;
19
20 #[inline(always)]
21 fn next (&mut self) -> Option<Self::Item> {
22 self.nth(0)
23 }
24
25 #[inline(always)]
26 fn size_hint(&self) -> (usize, Option<usize>) {
27 let len = self.len();
28 (len, Some(len))
29 }
30
31 #[inline(always)]
32 fn count(self) -> usize where Self: Sized {
33 self.len()
34 }
35
36 #[inline]
37 fn last(mut self) -> Option<Self::Item> where Self: Sized {
38 if self.range.end <= self.range.start {
39 return None
40 }
41
42 let v = unsafe { core::ptr::read(self.range.end.sub(1)) };
43 self.range.end = self.range.start;
44 Some(v)
45 }
46
47 #[inline]
48 fn nth(&mut self, n: usize) -> Option<Self::Item> {
49 unsafe {
50 let ptr = self.range.start.add(n);
51
52 if ptr >= self.range.end {
53 self.range.start = self.range.end;
54 return None
55 }
56
57 self.range.start = ptr.add(1);
58 debug_assert!(!ptr.is_null());
59 Some(core::ptr::read(ptr))
60 }
61 }
62}
63
64impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
65 #[inline(always)]
66 fn next_back(&mut self) -> Option<Self::Item> {
67 self.nth_back(0)
68 }
69
70 #[inline]
71 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
72 unsafe {
73 self.range.end = self.range.end.sub(n + 1);
74 let ptr = self.range.end;
75
76 if ptr < self.range.start {
77 self.range.end = self.range.start;
78 return None
79 }
80
81 debug_assert!(!ptr.is_null());
82 Some(core::ptr::read(ptr))
83 }
84 }
85}
86
87impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
88 #[inline(always)]
89 fn len(&self) -> usize {
90 ((self.range.end as usize) - (self.range.start as usize)) / core::mem::size_of::<T>()
91 }
92}
93
94impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
95
96impl<T, A: Allocator> Drop for IntoIter<T, A> {
97 #[inline]
98 fn drop(&mut self) {
99 if needs_drop::<T>() {
100 let mut ptr = self.range.start;
101
102 while ptr < self.range.end {
103 unsafe {
104 core::ptr::drop_in_place(ptr);
105 ptr = ptr.add(1);
106 }
107 }
108 }
109
110 unsafe {
111 let layout = Layout::for_value_raw(self.ptr.as_ptr());
112 self.alloc.deallocate(self.ptr.cast(), layout);
113 }
114 }
115}
116
117unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
118unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
119
120#[cfg(test)]
121mod test {
122 extern crate std;
123 use std::{prelude::rust_2021::*, println};
124
125 use alloc::vec;
126 use crate::BoxIntoIter;
127
128 #[test]
129 fn test () {
130 let array = vec!["hello".to_string(), "world".to_string()].into_boxed_slice().into_iter();
131 println!("{:?}", array.last())
132 }
133}