preemptive_threads/mem/
stack_pool.rs1use portable_atomic::{AtomicUsize, Ordering};
7use spin::Mutex;
8use core::ptr::NonNull;
9#[cfg(feature = "std-shim")]
14extern crate std;
15
16#[cfg(feature = "std-shim")]
17use std::vec::Vec;
18
19#[cfg(not(feature = "std-shim"))]
20extern crate alloc;
21
22#[cfg(not(feature = "std-shim"))]
23use alloc::vec::Vec;
24
25#[derive(Debug, Clone, Copy, PartialEq, Eq)]
30pub enum StackSizeClass {
31 Small = 4096,
33 Medium = 16384,
35 Large = 65536,
37 ExtraLarge = 262144,
39}
40
41impl StackSizeClass {
42 pub fn size(self) -> usize {
44 self as usize
45 }
46
47 pub fn for_size(requested_size: usize) -> Option<Self> {
57 match requested_size {
58 0..=4096 => Some(Self::Small),
59 4097..=16384 => Some(Self::Medium),
60 16385..=65536 => Some(Self::Large),
61 65537..=262144 => Some(Self::ExtraLarge),
62 _ => None, }
64 }
65}
66
67#[derive(Clone)]
73pub struct Stack {
74 memory: NonNull<u8>,
76 total_size: usize,
78 usable_size: usize,
80 size_class: StackSizeClass,
82 has_guard_pages: bool,
84}
85
86impl Stack {
87 pub fn size(&self) -> usize {
89 self.usable_size
90 }
91
92 pub fn size_class(&self) -> StackSizeClass {
94 self.size_class
95 }
96
97 pub fn stack_bottom(&self) -> *mut u8 {
102 unsafe {
103 self.memory.as_ptr().add(
104 if self.has_guard_pages {
105 4096 + self.usable_size } else {
107 self.usable_size
108 }
109 )
110 }
111 }
112
113 pub fn stack_top(&self) -> *const u8 {
115 unsafe {
116 if self.has_guard_pages {
117 self.memory.as_ptr().add(4096) } else {
119 self.memory.as_ptr()
120 }
121 }
122 }
123
124 pub fn bottom(&self) -> *mut u8 {
126 self.stack_bottom()
127 }
128
129 pub fn top(&self) -> *const u8 {
131 self.stack_top()
132 }
133
134 pub fn has_guard_pages(&self) -> bool {
136 self.has_guard_pages
137 }
138
139 pub fn install_canary(&self, canary: u64) {
148 let canary_location = self.stack_top() as *mut u64;
149 unsafe {
150 canary_location.write(canary);
151 }
152 }
153
154 pub fn check_canary(&self, expected_canary: u64) -> bool {
164 let canary_location = self.stack_top() as *const u64;
165 unsafe { canary_location.read() == expected_canary }
166 }
167}
168
169pub struct StackPool {
174 free_stacks: [Mutex<Vec<Stack>>; 4],
176 stats: StackPoolStats,
178}
179
180#[derive(Debug, Default)]
182struct StackPoolStats {
183 allocated: AtomicUsize,
185 deallocated: AtomicUsize,
187 in_use: AtomicUsize,
189}
190
191impl StackPool {
192 pub const fn new() -> Self {
194 Self {
195 free_stacks: [
196 Mutex::new(Vec::new()),
197 Mutex::new(Vec::new()),
198 Mutex::new(Vec::new()),
199 Mutex::new(Vec::new()),
200 ],
201 stats: StackPoolStats {
202 allocated: AtomicUsize::new(0),
203 deallocated: AtomicUsize::new(0),
204 in_use: AtomicUsize::new(0),
205 },
206 }
207 }
208
209 pub fn allocate(&self, size_class: StackSizeClass) -> Option<Stack> {
222 let class_index = self.size_class_index(size_class);
223
224 if let Some(mut free_list) = self.free_stacks[class_index].try_lock() {
226 if let Some(stack) = free_list.pop() {
227 self.stats.in_use.fetch_add(1, Ordering::AcqRel);
228 return Some(stack);
229 }
230 }
231
232 self.allocate_new_stack(size_class)
234 }
235
236 pub fn deallocate(&self, stack: Stack) {
242 let class_index = self.size_class_index(stack.size_class);
243
244 #[cfg(feature = "hardened")]
246 {
247 unsafe {
249 core::ptr::write_bytes(
250 stack.memory.as_ptr(),
251 0,
252 stack.usable_size,
253 );
254 }
255 }
256
257 if let Some(mut free_list) = self.free_stacks[class_index].try_lock() {
258 free_list.push(stack);
259 self.stats.in_use.fetch_sub(1, Ordering::AcqRel);
260 self.stats.deallocated.fetch_add(1, Ordering::AcqRel);
261 } else {
262 }
265 }
266
267 pub fn stats(&self) -> (usize, usize, usize) {
269 (
270 self.stats.allocated.load(Ordering::Acquire),
271 self.stats.deallocated.load(Ordering::Acquire),
272 self.stats.in_use.load(Ordering::Acquire),
273 )
274 }
275
276 fn size_class_index(&self, size_class: StackSizeClass) -> usize {
278 match size_class {
279 StackSizeClass::Small => 0,
280 StackSizeClass::Medium => 1,
281 StackSizeClass::Large => 2,
282 StackSizeClass::ExtraLarge => 3,
283 }
284 }
285
286 fn allocate_new_stack(&self, size_class: StackSizeClass) -> Option<Stack> {
288 let usable_size = size_class.size();
289 let has_guard_pages = cfg!(feature = "mmu");
290
291 let total_size = if has_guard_pages {
293 usable_size + 8192 } else {
295 usable_size
296 };
297
298 #[cfg(feature = "std-shim")]
300 {
301 extern crate std;
302 use std::alloc::{alloc, Layout};
303
304 let layout = Layout::from_size_align(total_size, 4096).ok()?;
305 let memory = unsafe { alloc(layout) };
306
307 if memory.is_null() {
308 return None;
309 }
310
311 let memory = unsafe { NonNull::new_unchecked(memory) };
312
313 #[cfg(feature = "mmu")]
315 if has_guard_pages {
316 self.setup_guard_pages(&memory, total_size);
317 }
318
319 let stack = Stack {
320 memory,
321 total_size,
322 usable_size,
323 size_class,
324 has_guard_pages,
325 };
326
327 self.stats.allocated.fetch_add(1, Ordering::AcqRel);
328 self.stats.in_use.fetch_add(1, Ordering::AcqRel);
329
330 Some(stack)
331 }
332
333 #[cfg(not(feature = "std-shim"))]
334 {
335 unimplemented!("Stack allocation requires a custom allocator in no_std environments")
336 }
337 }
338
339 #[cfg(feature = "mmu")]
341 fn setup_guard_pages(&self, _memory: &NonNull<u8>, _total_size: usize) {
342 unimplemented!("Guard page setup requires platform-specific MMU manipulation")
345 }
346}
347
348impl Drop for Stack {
349 fn drop(&mut self) {
350 #[cfg(feature = "std-shim")]
353 {
354 extern crate std;
355 use std::alloc::{dealloc, Layout};
356
357 let layout = Layout::from_size_align(self.total_size, 4096).unwrap();
358 unsafe {
359 dealloc(self.memory.as_ptr(), layout);
360 }
361 }
362 }
363}
364
365unsafe impl Send for Stack {}
366unsafe impl Sync for Stack {}
367
368#[cfg(test)]
369mod tests {
370 use super::*;
371
372 #[test]
373 fn test_stack_size_class_for_size() {
374 assert_eq!(StackSizeClass::for_size(1024), Some(StackSizeClass::Small));
375 assert_eq!(StackSizeClass::for_size(4096), Some(StackSizeClass::Small));
376 assert_eq!(StackSizeClass::for_size(8192), Some(StackSizeClass::Medium));
377 assert_eq!(StackSizeClass::for_size(32768), Some(StackSizeClass::Large));
378 assert_eq!(StackSizeClass::for_size(131072), Some(StackSizeClass::ExtraLarge));
379 assert_eq!(StackSizeClass::for_size(500000), None);
380 }
381
382 #[cfg(feature = "std-shim")]
383 #[test]
384 fn test_stack_pool_basic() {
385 let pool = StackPool::new();
386 let stack = pool.allocate(StackSizeClass::Small).unwrap();
387
388 assert_eq!(stack.size_class(), StackSizeClass::Small);
389 assert_eq!(stack.size(), StackSizeClass::Small.size());
390
391 pool.deallocate(stack);
392
393 let (allocated, deallocated, in_use) = pool.stats();
394 assert_eq!(allocated, 1);
395 assert_eq!(deallocated, 1);
396 assert_eq!(in_use, 0);
397 }
398
399 #[cfg(feature = "std-shim")]
400 #[test]
401 fn test_stack_canary() {
402 let pool = StackPool::new();
403 let stack = pool.allocate(StackSizeClass::Small).unwrap();
404
405 let canary_value = 0xDEADBEEFCAFEBABE;
406 stack.install_canary(canary_value);
407 assert!(stack.check_canary(canary_value));
408 assert!(!stack.check_canary(0x1234567890ABCDEF));
409
410 pool.deallocate(stack);
411 }
412}