base/
array.rs

1//! A dynamically sized array type.
2
3pub struct Array<T, A: Allocator = GlobalAllocator> {
4   size: usize,
5   buf: RawArray<T, A>,
6}
7
8impl<T, A: Allocator> Array<T, A> {
9   pub fn new_with(allocator: A) -> Self {
10      Array {
11         size: 0,
12         buf: RawArray::new(allocator),
13      }
14   }
15
16   pub fn resize_with<F>(&mut self, new_size: usize, f: F)
17   where
18      F: Fn() -> T, {
19      if new_size < self.size && needs_drop::<T>() {
20         for i in new_size..self.size {
21            unsafe {
22               drop_in_place(self.buf.pointer.as_ptr().offset(i as isize));
23            }
24         }
25      } else if new_size > self.size {
26         if new_size > self.buf.capacity {
27            self.reserve(new_size);
28         }
29
30         for i in self.size..new_size {
31            unsafe {
32               self.buf.pointer.as_ptr().offset(i as isize).write(f());
33            }
34         }
35      }
36
37      self.size = new_size;
38   }
39
40   pub fn resize(&mut self, new_size: usize, value: T)
41   where
42      T: Clone, {
43      self.resize_with(new_size, || value.clone());
44   }
45
46   pub fn resize_default(&mut self, new_size: usize)
47   where
48      T: Default, {
49      self.resize_with(new_size, || T::default());
50   }
51
52   pub fn reserve(&mut self, new_capacity: usize) {
53      self.buf.reserve(new_capacity);
54   }
55
56   fn grow_auto(&mut self) {
57      let single_layout = Layout::new::<T>();
58
59      let old_capacity_bytes = self.buf.capacity * single_layout.size;
60      assert!(old_capacity_bytes <= (usize::MAX / 4));
61
62      let new_capacity = if self.buf.capacity == 0 {
63         1
64      } else {
65         self.buf.capacity * 2
66      };
67
68      self.reserve(new_capacity);
69   }
70
71   #[inline]
72   pub fn len(&self) -> usize {
73      return self.size;
74   }
75
76   #[inline]
77   pub fn capacity(&self) -> usize {
78      return self.buf.capacity;
79   }
80
81   pub fn push(&mut self, value: T) {
82      if self.size == self.buf.capacity {
83         self.grow_auto();
84      }
85
86      unsafe {
87         self.buf.pointer.as_ptr().offset(self.size as isize).write(value);
88      }
89
90      self.size += 1;
91   }
92
93   pub fn pop(&mut self) -> Option<T> {
94      return if self.size == 0 {
95         None
96      } else {
97         let value = unsafe { self.buf.pointer.as_ptr().offset((self.size - 1) as isize).read() };
98
99         self.size -= 1;
100         Some(value)
101      };
102   }
103
104   pub fn clear(&mut self) {
105      if needs_drop::<T>() {
106         unsafe {
107            for i in 0..self.size {
108               drop_in_place(self.buf.pointer.as_ptr().offset(i as isize));
109            }
110         }
111      }
112
113      self.size = 0;
114   }
115
116   #[inline]
117   pub fn is_empty(&self) -> bool {
118      return self.size == 0;
119   }
120}
121
122impl<T> Array<T, GlobalAllocator> {
123   pub fn new() -> Self {
124      Self::new_with(GlobalAllocator)
125   }
126}
127
128impl<T, A: Allocator> Drop for Array<T, A> {
129   fn drop(&mut self) {
130      if !self.buf.pointer.as_ptr().is_null() {
131         self.clear();
132      }
133   }
134}
135
136impl<T, A: Allocator> Deref for Array<T, A> {
137   type Target = [T];
138
139   #[inline]
140   fn deref(&self) -> &Self::Target {
141      unsafe { slice::from_raw_parts(self.buf.pointer.as_ptr(), self.size) }
142   }
143}
144
145impl<T, A: Allocator> DerefMut for Array<T, A> {
146   fn deref_mut(&mut self) -> &mut Self::Target {
147      unsafe { slice::from_raw_parts_mut(self.buf.pointer.as_ptr(), self.size) }
148   }
149}
150
151impl<T, A: Allocator> Extend<T> for Array<T, A> {
152   fn extend<I>(&mut self, iter: I)
153   where
154      I: IntoIterator<Item = T>, {
155      for e in iter {
156         self.push(e);
157      }
158   }
159}
160
161impl<'a, T: 'a, A: Allocator> Extend<&'a T> for Array<T, A>
162where
163   T: Clone,
164{
165   fn extend<I>(&mut self, iter: I)
166   where
167      I: IntoIterator<Item = &'a T>, {
168      for e in iter {
169         self.push(e.clone());
170      }
171   }
172}
173
174impl<T, A: Allocator> FromIterator<T> for Array<T, A>
175where
176   A: Default,
177{
178   fn from_iter<I>(iter: I) -> Self
179   where
180      I: IntoIterator<Item = T>, {
181      let mut array = Array::new_with(A::default());
182      array.extend(iter);
183      return array;
184   }
185}
186
187pub struct IntoIter<T, A: Allocator> {
188   array: Array<T, A>,
189   current: usize,
190   size: usize,
191}
192
193impl<T, A: Allocator> Iterator for IntoIter<T, A> {
194   type Item = T;
195
196   fn next(&mut self) -> Option<T> {
197      return if self.current >= self.size {
198         None
199      } else {
200         unsafe {
201            let index = self.current;
202            self.current += 1;
203
204            Some(read(self.array.buf.pointer.as_ptr().offset(index as isize)))
205         }
206      };
207   }
208
209   fn size_hint(&self) -> (usize, Option<usize>) {
210      let remaining = self.size - self.current;
211      (remaining, Some(remaining))
212   }
213}
214
215impl<T, A: Allocator> Drop for IntoIter<T, A> {
216   fn drop(&mut self) {
217      // Drop the remaining elements if we didn't iter
218      // until the end.
219      if needs_drop::<T>() {
220         unsafe {
221            for i in self.current..self.size {
222               drop_in_place(self.array.buf.pointer.as_ptr().offset(i as isize))
223            }
224         }
225      }
226
227      // Set size of the array to 0 so it doesn't drop anything else.
228      self.array.size = 0;
229   }
230}
231
232pub struct RawArray<T, A: Allocator> {
233   pub pointer: NonNull<T>,
234   pub capacity: usize,
235   pub allocator: A,
236   _ghost: PhantomData<T>,
237}
238
239impl<T, A: Allocator> RawArray<T, A> {
240   pub fn new(allocator: A) -> Self {
241      let capacity = if size_of::<T>() == 0 { !0 } else { 0 };
242
243      return RawArray{
244         pointer: NonNull::dangling(),
245         capacity,
246         allocator,
247         _ghost: PhantomData,
248      };
249   }
250
251   pub fn reserve(&mut self, new_capacity: usize) {
252      if new_capacity <= self.capacity {
253         return;
254      }
255
256      let mut pointer = unsafe {
257         allocate_array::<T>(&mut self.allocator, new_capacity)
258            .expect("Allocation error")
259      };
260
261      if self.capacity > 0 {
262         unsafe {
263            ptr::copy(self.pointer.as_ptr() as *const T, pointer.as_ptr(), self.capacity);
264
265            self
266               .allocator
267               .deallocate_aligned(
268                  self.pointer.cast::<u8>().as_ptr(),
269                  Layout::from_size(self.capacity)
270               );
271         }
272      }
273
274      self.pointer = pointer.cast::<T>();
275      self.capacity = new_capacity;
276   }
277}
278
279impl<T, A: Allocator> Drop for RawArray<T, A> {
280   fn drop(&mut self) {
281      if !self.pointer.as_ptr().is_null() {
282         unsafe {
283            self.allocator
284               .deallocate_aligned(self.pointer.cast::<u8>().as_ptr(), Layout::from_size(self.capacity));
285         }
286      }
287   }
288}
289
290pub trait StackArray {
291   type Element;
292
293   fn len(&self) -> usize;
294   fn as_ptr(&self) -> *const Self::Element;
295   fn as_mut_ptr(&mut self) -> *mut Self::Element;
296}
297
298enum SmallArrayData<S, A = GlobalAllocator>
299where
300   S: StackArray,
301   A: Allocator, {
302   Stack(usize, ManuallyDrop<S>),
303   Heap(Array<S::Element, A>),
304}
305
306pub struct SmallArray<S, A = GlobalAllocator>
307where
308   S: StackArray,
309   A: Allocator, {
310   alloc: Option<A>,
311   data: SmallArrayData<S, A>,
312}
313
314impl<S, A> SmallArray<S, A>
315where
316   S: StackArray,
317   A: Allocator,
318{
319   #[inline]
320   pub fn len(&self) -> usize {
321      self.get_infos().1
322   }
323
324   #[inline]
325   pub fn capacity(&self) -> usize {
326      self.get_infos().2
327   }
328
329   pub fn reserve(&mut self, new_cap: usize) {
330      if new_cap <= self.capacity() {
331         return;
332      }
333
334      if let SmallArrayData::Stack(used, array) = &mut self.data {
335         if new_cap > array.len() {
336            let alloc = self.alloc.take().unwrap();
337            let mut new_array = Array::new_with(alloc);
338            new_array.reserve(new_cap);
339
340            let ptr = array.as_mut_ptr();
341            for i in 0..*used {
342               new_array.push(unsafe { ptr::read(ptr.offset(i as isize)) });
343            }
344
345            self.data = SmallArrayData::Heap(new_array);
346         }
347      }
348
349      if let SmallArrayData::Heap(array) = &mut self.data {
350         array.reserve(new_cap);
351      }
352   }
353
354   pub fn new_with(alloc: A) -> Self {
355      Self {
356         alloc: Some(alloc),
357         data: SmallArrayData::Stack(0, unsafe { MaybeUninit::uninit().assume_init() }),
358      }
359   }
360
361   #[inline]
362   fn get_infos(&self) -> (*const S::Element, usize, usize) {
363      match &self.data {
364         SmallArrayData::Stack(used, array) => (array.as_ptr(), *used, array.len()),
365         SmallArrayData::Heap(array) => (array.as_ptr(), array.len(), array.capacity()),
366      }
367   }
368
369   #[inline]
370   fn get_infos_mut(&mut self) -> (*mut S::Element, usize, usize) {
371      match &mut self.data {
372         SmallArrayData::Stack(used, array) => (array.as_mut_ptr(), *used, array.len()),
373         SmallArrayData::Heap(array) => (array.as_mut_ptr(), array.len(), array.capacity()),
374      }
375   }
376
377   pub fn push(&mut self, element: S::Element) {
378      let (_, len, cap) = self.get_infos_mut();
379      if len == cap {
380         self.reserve(len * 2);
381      }
382
383      match &mut self.data {
384         SmallArrayData::Stack(used, array) => {
385            unsafe {
386               ptr::write(array.as_mut_ptr().offset(*used as isize), element);
387            }
388            *used += 1;
389         }
390         SmallArrayData::Heap(array) => {
391            array.push(element);
392         }
393      }
394   }
395
396   pub fn pop(&mut self) -> Option<S::Element> {
397      if self.len() == 0 {
398         return None;
399      }
400
401      match &mut self.data {
402         SmallArrayData::Stack(used, array) => {
403            *used -= 1;
404            unsafe { Some(ptr::read(array.as_mut_ptr().offset(*used as isize))) }
405         }
406         SmallArrayData::Heap(array) => array.pop(),
407      }
408   }
409
410   pub fn clear(&mut self) {
411      match &mut self.data {
412         SmallArrayData::Stack(used, array) => {
413            if needs_drop::<S::Element>() {
414               for i in 0..*used {
415                  unsafe {
416                     ptr::drop_in_place(array.as_mut_ptr().offset(i as isize));
417                  }
418               }
419            }
420            *used = 0;
421         }
422         SmallArrayData::Heap(array) => array.clear(),
423      }
424   }
425
426   #[inline]
427   pub fn is_empty(&self) -> bool {
428      self.len() == 0
429   }
430
431   pub fn resize_with<F>(&mut self, new_size: usize, f: F)
432   where
433      F: Fn() -> S::Element, {
434      self.reserve(new_size);
435
436      let (ptr, len, _) = self.get_infos_mut();
437
438      match &mut self.data {
439         SmallArrayData::Stack(used, _) => {
440            if new_size < len && needs_drop::<S::Element>() {
441               for i in new_size..len {
442                  unsafe {
443                     ptr::drop_in_place(ptr.offset(i as isize));
444                  }
445               }
446            } else if new_size > len {
447               for i in len..new_size {
448                  unsafe {
449                     ptr::write(ptr.offset(i as isize), f());
450                  }
451               }
452            }
453
454            *used = new_size;
455         }
456         SmallArrayData::Heap(array) => array.resize_with(new_size, f),
457      }
458   }
459
460   pub fn resize(&mut self, new_size: usize, value: S::Element)
461   where
462      S::Element: Clone, {
463      self.resize_with(new_size, || value.clone());
464   }
465
466   pub fn resize_default(&mut self, new_size: usize)
467   where
468      S::Element: Default, {
469      self.resize_with(new_size, || S::Element::default());
470   }
471}
472
473impl<S, A> Drop for SmallArray<S, A>
474where
475   S: StackArray,
476   A: Allocator,
477{
478   fn drop(&mut self) {
479      self.clear();
480   }
481}
482
483impl<S, A> Deref for SmallArray<S, A>
484where
485   S: StackArray,
486   A: Allocator,
487{
488   type Target = [S::Element];
489
490   #[inline]
491   fn deref(&self) -> &Self::Target {
492      let (ptr, len, _) = self.get_infos();
493      unsafe { slice::from_raw_parts(ptr, len) }
494   }
495}
496
497impl<S, A> DerefMut for SmallArray<S, A>
498where
499   S: StackArray,
500   A: Allocator,
501{
502   #[inline]
503   fn deref_mut(&mut self) -> &mut Self::Target {
504      let (ptr, len, _) = self.get_infos_mut();
505      unsafe { slice::from_raw_parts_mut(ptr, len) }
506   }
507}
508
509impl<S> SmallArray<S, GlobalAllocator>
510where
511   S: StackArray,
512{
513   pub fn new() -> Self {
514      Self::new_with(GlobalAllocator)
515   }
516}
517
518impl<T, S, A: Allocator> Extend<T> for SmallArray<S, A>
519where
520   T: Borrow<S::Element>,
521   S: StackArray,
522   S::Element: Clone,
523{
524   fn extend<I>(&mut self, iter: I)
525   where
526      I: IntoIterator<Item = T>, {
527      for e in iter {
528         self.push(e.borrow().clone());
529      }
530   }
531}
532
533macro impl_stack_array($len:expr, $name:ident) {
534   impl<T> StackArray for [T; $len] {
535      type Element = T;
536
537      #[inline]
538      fn len(&self) -> usize {
539         $len
540      }
541
542      #[inline]
543      fn as_ptr(&self) -> *const Self::Element {
544         (self as &[Self::Element]).as_ptr()
545      }
546
547      #[inline]
548      fn as_mut_ptr(&mut self) -> *mut Self::Element {
549         (self as &mut [Self::Element]).as_mut_ptr()
550      }
551   }
552
553   pub type $name<T, A = GlobalAllocator> = SmallArray<[T; $len], A>;
554}
555
556// @Todo Re-do this when const generics
557impl_stack_array!(1, SmallArray1);
558impl_stack_array!(2, SmallArray2);
559impl_stack_array!(4, SmallArray4);
560impl_stack_array!(8, SmallArray8);
561impl_stack_array!(16, SmallArray16);
562impl_stack_array!(24, SmallArray24);
563impl_stack_array!(32, SmallArray32);
564impl_stack_array!(64, SmallArray64);
565impl_stack_array!(128, SmallArray128);
566
567// MODULES //
568
569/// Provides an intrusive linked list.
570pub mod linked_list;
571
572// EXPORTS //
573
574pub use self::linked_list::LinkedList;
575
576// IMPORTS //
577
578use {
579   crate::alloc::{allocate_array, Allocator, GlobalAllocator, Layout},
580   core::{
581      borrow::Borrow,
582      iter::{FromIterator, IntoIterator},
583      marker::PhantomData,
584      mem::{needs_drop, size_of, ManuallyDrop, MaybeUninit},
585      ops::{Deref, DerefMut},
586      ptr::{self, drop_in_place, read, NonNull},
587      slice,
588   },
589};