preemptive_threads/
stack_guard.rs1use core::sync::atomic::{AtomicU64, Ordering};
2
3pub struct StackGuard {
5 pub guard_size: usize,
7 pub canary_value: u64,
9 pub red_zone: usize,
11}
12
13impl Default for StackGuard {
14 fn default() -> Self {
15 Self {
16 guard_size: 4096, canary_value: 0xDEADBEEFCAFEBABE,
18 red_zone: 128, }
20 }
21}
22
23pub struct ProtectedStack {
25 base: *mut u8,
27 total_size: usize,
29 stack_size: usize,
31 guard: StackGuard,
33 watermark: AtomicU64,
35}
36
37impl ProtectedStack {
38 pub unsafe fn new(memory: &'static mut [u8], guard: StackGuard) -> Self {
43 let base = memory.as_mut_ptr();
44 let total_size = memory.len();
45
46 assert!(
48 total_size > guard.guard_size * 2 + 4096,
49 "Stack too small for guards"
50 );
51
52 let stack_size = total_size - guard.guard_size * 2;
53
54 let guard_start = base;
56 let _guard_end = unsafe { base.add(guard.guard_size) };
57
58 let canary_ptr = guard_start as *mut u64;
60 for i in 0..(guard.guard_size / 8) {
61 unsafe {
62 canary_ptr.add(i).write_volatile(guard.canary_value);
63 }
64 }
65
66 let stack_top = unsafe { base.add(total_size - guard.guard_size) as u64 };
68
69 Self {
70 base,
71 total_size,
72 stack_size,
73 guard,
74 watermark: AtomicU64::new(stack_top),
75 }
76 }
77
78 pub fn get_stack(&self) -> &'static mut [u8] {
80 unsafe {
81 let stack_start = self.base.add(self.guard.guard_size);
82 core::slice::from_raw_parts_mut(stack_start, self.stack_size)
83 }
84 }
85
86 pub fn check_overflow(&self) -> StackStatus {
88 unsafe {
89 let canary_start = self.base as *const u64;
91 let canary_count = self.guard.guard_size / 8;
92
93 let mut corrupted_canaries = 0;
94 for i in 0..canary_count {
95 if canary_start.add(i).read_volatile() != self.guard.canary_value {
96 corrupted_canaries += 1;
97 }
98 }
99
100 if corrupted_canaries > 0 {
101 return StackStatus::Corrupted {
102 corrupted_bytes: corrupted_canaries * 8,
103 location: StackCorruption::GuardPage,
104 };
105 }
106
107 let current_sp = get_stack_pointer();
109 let stack_bottom = self.base.add(self.guard.guard_size) as u64;
110
111 if current_sp < stack_bottom {
112 return StackStatus::Overflow {
113 overflow_bytes: (stack_bottom - current_sp) as usize,
114 };
115 }
116
117 self.watermark.fetch_min(current_sp, Ordering::Relaxed);
119 let high_water_mark = self.watermark.load(Ordering::Relaxed);
120 let used =
121 (self.base.add(self.total_size - self.guard.guard_size) as u64) - high_water_mark;
122
123 if used as usize > self.stack_size - self.guard.red_zone {
124 return StackStatus::NearOverflow {
125 bytes_remaining: self.stack_size - used as usize,
126 };
127 }
128
129 StackStatus::Ok {
130 used_bytes: used as usize,
131 free_bytes: self.stack_size - used as usize,
132 }
133 }
134 }
135
136 pub fn get_stats(&self) -> StackStats {
138 let current_sp = get_stack_pointer();
139 let stack_top = unsafe { self.base.add(self.total_size - self.guard.guard_size) as u64 };
140 let high_water_mark = self.watermark.load(Ordering::Relaxed);
141
142 StackStats {
143 total_size: self.total_size,
144 usable_size: self.stack_size,
145 guard_size: self.guard.guard_size,
146 current_usage: (stack_top - current_sp) as usize,
147 peak_usage: (stack_top - high_water_mark) as usize,
148 red_zone_size: self.guard.red_zone,
149 }
150 }
151}
152
153#[derive(Debug, Clone, Copy)]
154pub enum StackStatus {
155 Ok {
156 used_bytes: usize,
157 free_bytes: usize,
158 },
159 NearOverflow {
160 bytes_remaining: usize,
161 },
162 Overflow {
163 overflow_bytes: usize,
164 },
165 Corrupted {
166 corrupted_bytes: usize,
167 location: StackCorruption,
168 },
169}
170
171#[derive(Debug, Clone, Copy)]
172pub enum StackCorruption {
173 GuardPage,
174 StackFrame,
175 Unknown,
176}
177
178#[derive(Debug, Clone, Copy)]
179pub struct StackStats {
180 pub total_size: usize,
181 pub usable_size: usize,
182 pub guard_size: usize,
183 pub current_usage: usize,
184 pub peak_usage: usize,
185 pub red_zone_size: usize,
186}
187
188#[inline(always)]
190fn get_stack_pointer() -> u64 {
191 let sp: u64;
192 unsafe {
193 core::arch::asm!("mov {}, rsp", out(reg) sp);
194 }
195 sp
196}
197
198#[macro_export]
200macro_rules! protected_stack {
201 ($size:expr) => {{
202 static mut STACK_MEMORY: [u8; $size] = [0; $size];
203 unsafe {
204 $crate::stack_guard::ProtectedStack::new(
205 &mut STACK_MEMORY,
206 $crate::stack_guard::StackGuard::default(),
207 )
208 }
209 }};
210 ($size:expr, $guard_size:expr) => {{
211 static mut STACK_MEMORY: [u8; $size] = [0; $size];
212 unsafe {
213 let guard = $crate::stack_guard::StackGuard {
214 guard_size: $guard_size,
215 ..$crate::stack_guard::StackGuard::default()
216 };
217 $crate::stack_guard::ProtectedStack::new(&mut STACK_MEMORY, guard)
218 }
219 }};
220}