base/
alloc.rs

1#[global_allocator]
2pub static GLOBAL: GlobalAllocator = GlobalAllocator;
3
4/// Allocates a chunk of memory.
5#[no_mangle]
6pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
7   let layout = Layout::from_size_align(size, align);
8   unsafe {
9      HEAP
10         .lock()
11         .as_mut()
12         .expect("must first initialise heap before allocating memory")
13         .allocate(layout)
14         .unwrap()
15         .as_ptr()
16   }
17}
18
19/// Frees a chunk of memory.
20#[no_mangle]
21pub extern "C" fn __rust_deallocate(pointer: *mut u8, oldSize: usize, align: usize) {
22   let layout = Layout::from_size_align(oldSize, align);
23   unsafe {
24      HEAP
25         .lock()
26         .as_mut()
27         .expect("must first initialise heap before attempting to deallocate memory")
28         .deallocate(NonNull::new_unchecked(pointer), layout);
29   }
30}
31
32/// Reallocates a chunk of memory.
33#[no_mangle]
34pub extern "C" fn __rust_reallocate(
35   pointer: *mut u8,
36   oldSize: usize,
37   newSize: usize,
38   align: usize,
39) -> *mut u8 {
40   let newPointer = __rust_allocate(newSize, align);
41   return if newPointer.is_null() {
42      newPointer
43   } else {
44      unsafe {
45         ptr::copy(pointer, newPointer, min(newSize, oldSize));
46      }
47
48      __rust_deallocate(pointer, oldSize, align);
49      newPointer
50   };
51}
52
53/// We do not support in-place reallocation, so just return `oldSize`.
54#[no_mangle]
55pub extern "C" fn __rust_reallocate_inplace(
56   _pointer: *mut u8,
57   oldSize: usize,
58   _size: usize,
59   _align: usize,
60) -> usize {
61   oldSize
62}
63
64/// I have no idea what this actually does, but we're supposed to have one,
65/// and the other backends to implement it as something equivalent to the
66/// following.
67#[no_mangle]
68pub extern "C" fn __rust_usable_size(size: usize, _align: usize) -> usize {
69   size
70}
71
72/// Performs a single heap allocation, like `malloc`.
73/// Uses the size and align of `T` for the memory layout.
74///
75/// ## Arguments
76///
77/// * `allocator` - [Allocator](crate::alloc::Allocator) trait implementation.
78///
79/// ## Safety
80///
81/// Unsafe because of a call to [`allocate_aligned`](crate::alloc::Allocator::allocate_aligned),
82/// where the call is inherently unsafe because we aren't certain the allocation will succeed.
83pub unsafe fn allocate<T>(allocator: &mut dyn Allocator) -> Option<NonNull<T>> {
84   allocator.allocate_aligned(Layout::new::<T>()).map(|ptr| ptr.cast::<T>())
85}
86
87/// Allocates an array of size `size`.
88///
89/// ## Arguments
90///
91/// * `allocator` - [Allocator](crate::alloc::Allocator) trait implementation.
92/// * `size` - A [`usize`](prim@usize) providing the size of the array to be allocated.
93///
94/// ## Safety
95///
96/// Unsafe because of a call to [`allocate_aligned`](crate::alloc::Allocator::allocate_aligned),
97/// where the call is inherently unsafe because we aren't certain the allocation will succeed.
98pub unsafe fn allocate_array<T>(allocator: &mut dyn Allocator, size: usize) -> Option<NonNull<T>> {
99   allocator
100      .allocate_aligned(Layout::from_type_array::<T>(size))
101      .map(|ptr| ptr.cast::<T>())
102}
103
104#[derive(Debug)]
105pub struct AllocationError;
106
107impl Display for AllocationError {
108   fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
109      write!(f, "An error occurred allocating the requested memory")
110   }
111}
112
113/// A global allocator for our program.
114#[derive(Copy, Clone)]
115pub struct GlobalAllocator;
116
117/// A simple wrapper around `spin::Mutex` to enable trait implementations.
118pub struct LockedAllocator<A: Allocator>(pub Mutex<A>);
119
120impl<A> LockedAllocator<A>
121where
122   A: Allocator,
123{
124   pub const fn new(allocator: A) -> Self {
125      return LockedAllocator(Mutex::new(allocator));
126   }
127
128   pub fn lock(&self) -> MutexGuard<A> {
129      return self.0.lock();
130   }
131}
132
133pub unsafe trait Allocator {
134   fn allocate(&self, layout: Layout) -> Option<NonNull<u8>>;
135   unsafe fn deallocate(&self, pointer: *mut u8, layout: Layout);
136   unsafe fn reallocate(
137      &self,
138      pointer: *mut u8,
139      oldSize: usize,
140      layout: Layout,
141   ) -> Option<NonNull<u8>>;
142
143   unsafe fn allocate_aligned(&self, layout: Layout) -> Option<NonNull<u8>> {
144      let actualSize = layout.size + layout.align - 1 + size_of::<usize>();
145
146      let pointer = match self.allocate(Layout::from_size(actualSize)) {
147         Some(p) => p.as_ptr() as usize,
148         None => return None,
149      };
150
151      let alignedPointer = layout.align_up(pointer + size_of::<usize>());
152      let actualP2P = alignedPointer - size_of::<usize>();
153
154      ptr::write_unaligned(actualP2P as *mut usize, pointer);
155
156      return Some(NonNull::new_unchecked(alignedPointer as *mut u8));
157   }
158
159   unsafe fn deallocate_aligned(&self, pointer: *mut u8, layout: Layout) {
160      let alignedPointer = pointer as usize;
161      let actualP2P = alignedPointer - size_of::<usize>();
162      let actualPointer = ptr::read_unaligned(actualP2P as *const usize);
163
164      self.deallocate(actualPointer as *mut u8, layout);
165   }
166}
167
168unsafe impl<A: Allocator> Allocator for Mutex<A> {
169   fn allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
170      return self.lock().allocate(layout);
171   }
172
173   unsafe fn deallocate(&self, pointer: *mut u8, layout: Layout) {
174      self.lock().deallocate(pointer, layout);
175   }
176
177   unsafe fn reallocate(
178      &self,
179      pointer: *mut u8,
180      oldSize: usize,
181      layout: Layout,
182   ) -> Option<NonNull<u8>> {
183      return self.lock().reallocate(pointer, oldSize, layout);
184   }
185}
186
187unsafe impl<A: Allocator> Allocator for LockedAllocator<A> {
188   fn allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
189      return self.lock().allocate(layout);
190   }
191
192   unsafe fn deallocate(&self, pointer: *mut u8, layout: Layout) {
193      return self.lock().deallocate(pointer, layout);
194   }
195
196   unsafe fn reallocate(
197      &self,
198      pointer: *mut u8,
199      oldSize: usize,
200      layout: Layout,
201   ) -> Option<NonNull<u8>> {
202      return self.lock().reallocate(pointer, oldSize, layout);
203   }
204}
205
206unsafe impl<A: Allocator> Allocator for &RefCell<A> {
207   fn allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
208      return self.borrow_mut().allocate(layout);
209   }
210
211   unsafe fn deallocate(&self, pointer: *mut u8, layout: Layout) {
212      return self.borrow_mut().deallocate(pointer, layout);
213   }
214
215   unsafe fn reallocate(
216      &self,
217      pointer: *mut u8,
218      oldSize: usize,
219      layout: Layout,
220   ) -> Option<NonNull<u8>> {
221      return self.borrow_mut().reallocate(pointer, oldSize, layout);
222   }
223}
224
225unsafe impl Allocator for GlobalAllocator {
226   fn allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
227      return Some(NonNull::new(__rust_allocate(layout.size, layout.align)).unwrap());
228   }
229
230   unsafe fn deallocate(&self, pointer: *mut u8, layout: Layout) {
231      __rust_deallocate(pointer, layout.size, layout.align);
232   }
233
234   unsafe fn reallocate(
235      &self,
236      pointer: *mut u8,
237      oldSize: usize,
238      layout: Layout,
239   ) -> Option<NonNull<u8>> {
240      return Some(
241         NonNull::new(__rust_reallocate(
242            pointer,
243            oldSize,
244            layout.size,
245            layout.align,
246         ))
247         .unwrap(),
248      );
249   }
250}
251
252unsafe impl GlobalAlloc for GlobalAllocator {
253   unsafe fn alloc(&self, layout: StdLayout) -> *mut u8 {
254      let layout = Layout::from(layout);
255
256      self.allocate(layout)
257         .map_or(0 as *mut u8, |allocation| allocation.as_ptr())
258   }
259
260   unsafe fn dealloc(&self, ptr: *mut u8, layout: StdLayout) {
261      let layout = Layout::from(layout);
262      self.deallocate(ptr, layout);
263   }
264}
265
266// MODULES //
267
268/// A simple heap based on a buddy allocator.  For the theory of buddy
269/// allocators, see https://en.wikipedia.org/wiki/Buddy_memory_allocation
270/// or https://www.memorymanagement.org/mmref/alloc.html#buddy-system
271///
272/// The basic idea is that our heap size is a power of two, and the heap
273/// starts out as one giant free block.  When a memory allocation request
274/// is received, we round the requested size up to a power of two, and find
275/// the smallest available block we can use.  If the smallest free block is
276/// too big (more than twice as big as the memory we want to allocate), we
277/// split the smallest free block in half recursively until it's the right
278/// size.  This simplifies a lot of bookkeeping, because all our block
279/// sizes are a power of 2, which makes it easy to have one free array per
280/// block size.
281pub mod heap;
282
283/// Implements a simple memory layout structure.
284pub mod layout;
285
286/// Implements two memory allocators: a Buddy Allocator and a Best-Fit Allocator,
287/// respectively referred to as [`BUDDY_ALLOCATOR`][crate::alloc::paging::BUDDY_ALLOCATOR]
288/// and [`FIT_ALLOCATOR`][crate::alloc::paging::FIT_ALLOCATOR].
289pub mod paging;
290
291// EXPORTS //
292
293pub use self::layout::Layout;
294
295// IMPORTS //
296
297use {
298   self::heap::HEAP,
299   std_alloc::alloc::{GlobalAlloc, Layout as StdLayout},
300   core::{
301      cell::RefCell,
302      cmp::min,
303      fmt::{Display, Formatter},
304      mem::size_of,
305      ptr::{self, NonNull},
306   },
307   spin::{Mutex, MutexGuard},
308};