1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#![feature(alloc, heap_api, ptr_as_ref)]
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use alloc::heap;
extern crate alloc;
pub struct Allocated<'a, T: 'a> {
item: &'a mut T,
}
impl<'a, T> Deref for Allocated<'a, T> {
type Target = T;
fn deref<'b>(&'b self) -> &'b T {
&*self.item
}
}
impl<'a, T> DerefMut for Allocated<'a, T> {
fn deref_mut<'b>(&'b mut self) -> &'b mut T {
self.item
}
}
impl<'a, T> Drop for Allocated<'a, T> {
#[inline]
fn drop(&mut self) {
unsafe { let _ = ptr::read(self.item as *mut T); }
}
}
pub struct ScopedAllocator {
current: *mut u8,
end: *mut u8,
start: *mut u8,
}
impl ScopedAllocator {
pub fn new(size: usize) -> ScopedAllocator {
let start = if size != 0 {
unsafe { heap::allocate(size, mem::align_of::<usize>()) }
} else {
heap::EMPTY as *mut u8
};
if start.is_null() {
panic!("Out of memory!");
}
ScopedAllocator {
current: start,
end: unsafe { start.offset(size as isize) },
start: start,
}
}
pub fn allocate<'a, T>(&'a self, val: T) -> Result<Allocated<'a, T>, T> {
match unsafe { self.allocate_raw(mem::size_of::<T>(), mem::align_of::<T>()) } {
Ok(ptr) => {
let item = ptr as *mut T;
unsafe { ptr::write(item, val) };
Ok(Allocated {
item: unsafe { item.as_mut().unwrap() },
})
}
Err(_) => Err(val)
}
}
pub unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result<*mut u8, ()> {
let current_ptr = self.current;
let aligned_ptr = ((current_ptr as usize + align - 1) & !(align - 1)) as *mut u8;
let end_ptr = aligned_ptr.offset(size as isize);
if end_ptr > self.end {
Err(())
} else {
self.set_current(end_ptr);
Ok(aligned_ptr)
}
}
#[inline]
pub fn scope<F, U>(&self, f: F) -> U where F: FnMut() -> U {
let mut f = f;
let old = self.current;
let u = f();
self.set_current(old);
u
}
#[inline(always)]
fn set_current(&self, new: *mut u8) {
let ptr = &self.current as *const _ as *mut _;
unsafe {*ptr = new }
}
}
impl Drop for ScopedAllocator {
#[inline]
fn drop(&mut self) {
let size = self.end as usize - self.start as usize;
if size > 0 {
unsafe { heap::deallocate(self.start, size, mem::align_of::<usize>()) }
}
}
}
#[test]
fn it_works() {
struct Bomb(i32);
impl Drop for Bomb {
fn drop(&mut self) { println!("Boom! {}", self.0) }
}
let alloc = ScopedAllocator::new(64);
let _my_int = alloc.allocate(23).ok().unwrap();
alloc.scope(|| {
let _bomb = {
alloc.allocate(Bomb(1)).ok().unwrap()
};
});
}