eastl_rs/fixed_pool/
with_overflow.rs1use crate::allocator::Allocator;
2use crate::fixed_pool::{FixedPool, PoolAllocator};
3use std::{mem, ptr};
4
5#[repr(C)]
8pub struct FixedPoolWithOverflow<Node: Sized, OverflowAllocator: Allocator> {
9 pub(crate) pool_allocator: FixedPool<Node>,
10 overflow_allocator: OverflowAllocator,
11 pub(crate) pool_begin: *mut (),
12}
13
14impl<Node: Sized, OverflowAllocator: Allocator> FixedPoolWithOverflow<Node, OverflowAllocator> {
15 #[allow(dead_code)]
20 pub unsafe fn new(memory: &mut [u8], overflow_allocator: OverflowAllocator) -> Self {
21 let mut res = Self::with_allocator(overflow_allocator);
22 res.init(memory);
23 res
24 }
25
26 pub unsafe fn with_allocator(overflow_allocator: OverflowAllocator) -> Self {
31 Self {
32 pool_allocator: FixedPool::default(),
33 overflow_allocator,
34 pool_begin: ptr::null_mut(),
35 }
36 }
37
38 #[allow(dead_code)]
41 pub fn can_allocate(&self) -> bool {
42 self.pool_allocator.can_allocate()
43 }
44}
45
46impl<Node: Sized, OverflowAllocator: Allocator> PoolAllocator
47 for FixedPoolWithOverflow<Node, OverflowAllocator>
48{
49 unsafe fn init(&mut self, memory: &mut [u8]) {
50 self.pool_allocator.init(memory);
51
52 self.pool_begin = memory.as_mut_ptr().cast();
54 }
55}
56
57impl<Node: Sized, OverflowAllocator: Allocator + Default>
58 FixedPoolWithOverflow<Node, OverflowAllocator>
59{
60 #[allow(dead_code)]
65 pub unsafe fn new_with_default_allocator(memory: &mut [u8]) -> Self {
66 Self::new(memory, OverflowAllocator::default())
67 }
68}
69
70unsafe impl<Node: Sized, OverflowAllocator: Allocator> Allocator
71 for FixedPoolWithOverflow<Node, OverflowAllocator>
72{
73 fn allocate_raw_aligned(&mut self, n: usize, align: usize) -> *mut () {
74 debug_assert!(n == mem::size_of::<Node>());
75 debug_assert!(align == mem::align_of::<Node>());
76
77 let p = self.pool_allocator.allocate_raw_aligned(n, align);
79 if !p.is_null() {
80 p
81 } else {
82 self.overflow_allocator.allocate_raw_aligned(n, align)
83 }
84 }
85
86 unsafe fn deallocate_raw_aligned(&mut self, p: *mut (), _n: usize, _align: usize) {
87 if self.pool_begin <= p && p <= self.pool_allocator.capacity.cast() {
89 self.pool_allocator.deallocate_raw_aligned(p, _n, _align)
90 } else {
91 self.overflow_allocator
92 .deallocate_raw_aligned(p, _n, _align)
93 }
94 }
95}
96
97impl<Node: Sized, OverflowAllocator: Allocator + Default> Default
98 for FixedPoolWithOverflow<Node, OverflowAllocator>
99{
100 fn default() -> Self {
101 Self {
102 pool_allocator: FixedPool::default(),
103 overflow_allocator: OverflowAllocator::default(),
104 pool_begin: ptr::null_mut(),
105 }
106 }
107}
108
109#[cfg(test)]
110mod test {
111 use crate::allocator::{Allocator, DefaultAllocator};
112 use crate::fixed_pool::with_overflow::FixedPoolWithOverflow;
113 use crate::fixed_pool::FixedPool;
114 use memoffset::offset_of;
115 use std::mem;
116
117 #[repr(C, align(0x10))]
118 struct TestNode {
119 a: usize,
120 }
121
122 #[test]
123 fn layout() {
124 assert_eq!(
125 offset_of!(FixedPoolWithOverflow<TestNode, DefaultAllocator>, pool_allocator),
126 0
127 );
128 assert_eq!(
129 offset_of!(FixedPoolWithOverflow<TestNode, DefaultAllocator>, overflow_allocator),
130 mem::size_of::<FixedPool<TestNode>>()
131 );
132 assert_eq!(
133 offset_of!(FixedPoolWithOverflow<TestNode, DefaultAllocator>, pool_begin),
134 mem::size_of::<FixedPool<TestNode>>() + mem::size_of::<usize>()
135 );
136
137 assert_eq!(
138 mem::size_of::<FixedPoolWithOverflow<TestNode, DefaultAllocator>>(),
139 mem::size_of::<FixedPool<TestNode>>() + mem::size_of::<usize>() * 2
140 );
141 }
142
143 #[test]
144 fn simple_alloc_happy_case() {
145 let mut buf = [0; mem::size_of::<TestNode>() * 2];
146
147 let mut allocator = unsafe {
148 FixedPoolWithOverflow::<TestNode, _>::new(&mut buf, DefaultAllocator::default())
149 };
150 assert!(allocator.can_allocate());
151
152 let p: *mut TestNode = allocator.allocate(1);
153
154 assert!(!p.is_null());
156 assert_eq!((p as usize & (mem::align_of::<TestNode>() - 1)), 0);
157 assert!(
158 p as usize + mem::size_of::<TestNode>() <= allocator.pool_allocator.capacity as usize
159 );
160 }
161
162 #[test]
163 fn simple_alloc_overflow() {
164 let mut buf = [0; (mem::size_of::<TestNode>() * 2) - 1];
166
167 let mut allocator = unsafe {
168 FixedPoolWithOverflow::<TestNode, _>::new(&mut buf, DefaultAllocator::default())
169 };
170 assert!(allocator.can_allocate());
171
172 let _: *mut TestNode = allocator.allocate(1);
174
175 assert!(!allocator.can_allocate());
177 let p: *mut TestNode = allocator.allocate(1);
178
179 assert!(!p.is_null());
181 assert_eq!((p as usize & (mem::align_of::<TestNode>() - 1)), 0);
182
183 assert!(p < allocator.pool_begin.cast() || p >= allocator.pool_allocator.capacity.cast());
185 }
186
187 #[test]
188 fn simple_alloc_realloc() {
189 let mut buf = [0; (mem::size_of::<TestNode>() * 2) - 1];
191
192 let mut allocator = unsafe {
193 FixedPoolWithOverflow::<TestNode, _>::new(&mut buf, DefaultAllocator::default())
194 };
195 assert!(allocator.can_allocate());
196
197 let pool_allocated: *mut TestNode = allocator.allocate(1);
199 assert!(!pool_allocated.is_null());
200
201 assert!(!allocator.can_allocate());
203 let overflow_allocated: *mut TestNode = allocator.allocate(1);
204 assert!(
206 overflow_allocated < allocator.pool_begin.cast()
207 || overflow_allocated >= allocator.pool_allocator.capacity.cast()
208 );
209
210 unsafe { allocator.deallocate(overflow_allocated, 1) };
212 assert!(!allocator.can_allocate());
214
215 unsafe { allocator.deallocate(pool_allocated, 1) };
217 assert!(allocator.can_allocate());
219 }
220}