1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
//! A heap allocator for Cortex-M processors
//!
//! # Example
//!
//! ```
//! // Plug in the allocator
//! extern crate alloc_cortex_m;
//! extern crate collections;
//!
//! use alloc_cortex_m::HEAP;
//! use collections::Vec;
//!
//! #[no_mangle]
//! pub fn main() -> ! {
//!     // Initialize the heap BEFORE you use the allocator
//!     unsafe { HEAP.init(0x2000_0000, 1024) }
//!
//!     let mut xs = Vec::new();
//!     xs.push(1);
//!     // ...
//! }
//! ```

#![allocator]
#![feature(allocator)]
#![feature(const_fn)]
#![no_std]

extern crate cortex_m;
extern crate linked_list_allocator;

use core::{ptr, cmp};

use cortex_m::interrupt::Mutex;

/// A global UNINITIALIZED heap allocator
///
/// You must initialize this heap using the
/// [`init`](struct.Heap.html#method.init) method before using the allocator.
pub static HEAP: Mutex<Heap> = Mutex::new(Heap::empty());

/// A heap allocator
// NOTE newtype to hide all the other Heap methods
pub struct Heap {
    inner: linked_list_allocator::Heap,
}

impl Heap {
    const fn empty() -> Self {
        Heap { inner: linked_list_allocator::Heap::empty() }
    }

    /// Initializes the heap
    ///
    /// This method must be called before you run any code that makes use of the
    /// allocator.
    ///
    /// This method must be called exactly ONCE.
    ///
    /// `heap_bottom` is the address where the heap will be located. Note that
    /// heap grows "upwards", towards larger addresses.
    ///
    /// `heap_size` is the size of the heap in bytes
    pub unsafe fn init(&mut self, heap_bottom: usize, heap_size: usize) {
        self.inner.init(heap_bottom, heap_size);
    }
}

// Rust allocator interface

#[doc(hidden)]
#[no_mangle]
/// Rust allocation function (c.f. malloc)
pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
    HEAP.lock(|heap| {
        heap.inner.allocate_first_fit(size, align).expect("out of memory")
    })
}

/// Rust de-allocation function (c.f. free)
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_deallocate(ptr: *mut u8, size: usize, align: usize) {
    HEAP.lock(|heap| unsafe { heap.inner.deallocate(ptr, size, align) });
}

/// Rust re-allocation function (c.f. realloc)
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_reallocate(ptr: *mut u8,
                                    size: usize,
                                    new_size: usize,
                                    align: usize)
                                    -> *mut u8 {

    // from: https://github.com/rust-lang/rust/blob/
    //     c66d2380a810c9a2b3dbb4f93a830b101ee49cc2/
    //     src/liballoc_system/lib.rs#L98-L101

    let new_ptr = __rust_allocate(new_size, align);
    unsafe { ptr::copy(ptr, new_ptr, cmp::min(size, new_size)) };
    __rust_deallocate(ptr, size, align);
    new_ptr
}

/// Rust re-allocation function which guarantees not to move the data
/// somewhere else.
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_reallocate_inplace(_ptr: *mut u8,
                                            size: usize,
                                            _new_size: usize,
                                            _align: usize)
                                            -> usize {
    size
}

/// Some allocators (pool allocators generally) over-allocate. This checks how
/// much space there is at a location. Our allocator doesn't over allocate so
/// this just returns `size`
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_usable_size(size: usize, _align: usize) -> usize {
    size
}