1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
#![feature(const_fn)]
#![feature(allocator_api)]
#![feature(alloc_layout_extra)]
#![cfg_attr(not(test), no_std)]
extern crate alloc;
use core::mem::{self, size_of};
use core::ptr::NonNull;
use alloc::alloc::{AllocErr, Layout};
pub mod region;
pub use self::region::{Region, RegionFlags};
pub mod align;
pub use self::align::*;
pub mod locked;
pub use self::locked::LockedAllocator;
#[cfg(test)]
mod test;
pub struct Allocator {
pub first_region: usize,
}
impl Allocator {
pub const fn empty() -> Allocator {
Allocator {
first_region: 0,
}
}
pub fn min_allocation_size() -> usize {
size_of::<usize>() * 2
}
pub unsafe fn add_region(&mut self, addr: usize, size: usize) {
if size <= Region::min_size() {
return;
}
let mut region = Region::new(addr, size);
region.next = self.first_region;
let ptr = addr as *mut Region;
ptr.write(region);
self.first_region = ptr as usize;
}
pub fn get_region_first(&self) -> Option<&mut Region> {
if self.first_region == 0 {
return None;
}
unsafe { Some(&mut *(self.first_region as *mut Region)) }
}
pub fn get_region_first_free(&self, size: usize) -> Option<&mut Region> {
let mut region = self.get_region_first();
loop {
if let Some(ref r) = region {
if r.get_used() == false && r.size() >= size {
break;
}
} else {
break;
}
if region.is_some() {
region = region.unwrap().next();
} else {
break;
}
}
region
}
pub fn get_region_for_ptr(&self, ptr: *mut u8) -> Option<&mut Region> {
let mut region = self.get_region_first();
loop {
if let Some(ref r) = region {
if unsafe { r.data_ptr() } == ptr {
return region;
}
}
if region.is_some() {
region = region.unwrap().next();
} else {
break;
}
}
None
}
pub fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
let mut size = layout.size();
if size < Self::min_allocation_size() {
size = Self::min_allocation_size();
}
size = align_up(size, mem::align_of::<Region>());
let layout = Layout::from_size_align(size, layout.align()).unwrap();
let size_pad = layout.padding_needed_for(layout.align()) + layout.size();
let region = self.get_region_first_free(size_pad);
if region.is_none() {
return Err(AllocErr);
}
let mut region = region.unwrap();
region.set_used(true);
region.padding_start = size_pad - layout.size();
if (layout.size() + Region::min_size()) <= region.size() {
let data_addr = unsafe { region.data_ptr() as usize } + layout.size();
let new_size = region.size() - layout.size();
let mut new_region = Region::new(data_addr, new_size);
new_region.next = region.next;
unsafe {
let ptr = data_addr as *mut Region;
ptr.write(new_region);
region.next = ptr as usize;
}
region.size = layout.size();
}
Ok(unsafe {
NonNull::new_unchecked(region.data_ptr())
})
}
pub fn deallocate(&mut self, ptr: NonNull<u8>, _layout: Layout) {
let region = self.get_region_for_ptr(ptr.as_ptr())
.expect("failed to get region for given pointer");
let size = region.size();
region.padding_start = 0;
region.padding_end = 0;
region.size = size;
region.set_used(false);
}
}