Skip to main content

any_vec/mem/
heap.rs

1extern crate alloc;
2
3use alloc::alloc::{alloc, dealloc, handle_alloc_error, Layout, realloc};
4use core::cmp;
5use core::mem::ManuallyDrop;
6use core::ptr::NonNull;
7use crate::mem::{dangling, Mem, MemBuilder, MemBuilderSizeable, MemRawParts, MemResizable};
8
9/// Heap allocated memory.
10#[derive(Default, Clone, Copy)]
11pub struct Heap;
12impl MemBuilder for Heap {
13    /// Implements [`MemResizable`], [`MemRawParts`].
14    type Mem = HeapMem;
15
16    #[inline]
17    fn build(&mut self, element_layout: Layout) -> HeapMem {
18        HeapMem {
19            mem: dangling(&element_layout),
20            size: 0,
21            element_layout
22        }
23    }
24}
25impl MemBuilderSizeable for Heap{
26    #[inline]
27    fn build_with_size(&mut self, element_layout: Layout, capacity: usize) -> Self::Mem
28    {
29        let mut mem = self.build(element_layout);
30        mem.resize(capacity);
31        mem
32    }
33}
34
35pub struct HeapMem {
36    mem: NonNull<u8>,
37    size: usize,        // in elements
38    element_layout: Layout, // size is aligned
39}
40
41impl Mem for HeapMem {
42    #[inline]
43    fn as_ptr(&self) -> *const u8 {
44        self.mem.as_ptr()
45    }
46
47    #[inline]
48    fn as_mut_ptr(&mut self) -> *mut u8 {
49        self.mem.as_ptr()
50    }
51
52    #[inline]
53    fn element_layout(&self) -> Layout {
54        self.element_layout
55    }
56
57    #[inline]
58    fn size(&self) -> usize {
59        self.size
60    }
61
62    fn expand(&mut self, additional: usize){
63        let requested_size = self.size() + additional;
64        let new_size = cmp::max(self.size() * 2, requested_size);
65        self.resize(new_size);
66    }
67}
68
69impl MemResizable for HeapMem {
70    fn resize(&mut self, new_size: usize) {
71        if self.size == new_size{
72            return;
73        }
74
75        if self.element_layout.size() != 0 {
76            unsafe{
77                // Non checked mul, because this memory size already allocated.
78                let mem_layout = Layout::from_size_align_unchecked(
79                    self.element_layout.size() * self.size,
80                    self.element_layout.align()
81                );
82
83                self.mem =
84                    if new_size == 0 {
85                        dealloc(self.mem.as_ptr(), mem_layout);
86                        dangling(&self.element_layout)
87                    } else {
88                        // mul carefully, to prevent overflow.
89                        let new_mem_size = self.element_layout.size()
90                            .checked_mul(new_size).unwrap();
91                        let new_mem_layout = Layout::from_size_align_unchecked(
92                            new_mem_size, self.element_layout.align()
93                        );
94
95                        if self.size == 0 {
96                            // allocate
97                            NonNull::new(alloc(new_mem_layout))
98                        } else {
99                            // reallocate
100                            NonNull::new(realloc(
101                                self.mem.as_ptr(), mem_layout,new_mem_size
102                            ))
103                        }
104                        .unwrap_or_else(|| handle_alloc_error(new_mem_layout))
105                    }
106            }
107        }
108        self.size = new_size;
109    }
110}
111
112impl MemRawParts for HeapMem{
113    type Handle = NonNull<u8>;
114
115    #[inline]
116    fn into_raw_parts(self) -> (Self::Handle, Layout, usize) {
117        let this = ManuallyDrop::new(self);
118        (this.mem, this.element_layout, this.size)
119    }
120
121    #[inline]
122    unsafe fn from_raw_parts(handle: Self::Handle, element_layout: Layout, size: usize) -> Self {
123        Self{
124            mem: handle,
125            size,
126            element_layout
127        }
128    }
129}
130
131impl Drop for HeapMem {
132    fn drop(&mut self) {
133        self.resize(0);
134    }
135}
136
137unsafe impl Send for HeapMem{}
138unsafe impl Sync for HeapMem{}