preemptive_threads/
stack_guard.rs

1use core::sync::atomic::{AtomicU64, Ordering};
2
3/// Stack guard configuration
4pub struct StackGuard {
5    /// Guard pages at bottom of stack (in bytes)
6    pub guard_size: usize,
7    /// Canary values for overflow detection
8    pub canary_value: u64,
9    /// Red zone size (bytes below stack pointer that must not be used)
10    pub red_zone: usize,
11}
12
13impl Default for StackGuard {
14    fn default() -> Self {
15        Self {
16            guard_size: 4096, // 1 page
17            canary_value: 0xDEADBEEFCAFEBABE,
18            red_zone: 128, // x86_64 ABI red zone
19        }
20    }
21}
22
23/// Enhanced stack with multiple protection mechanisms
24pub struct ProtectedStack {
25    /// Base address of allocated memory
26    base: *mut u8,
27    /// Total size including guards
28    total_size: usize,
29    /// Usable stack size
30    stack_size: usize,
31    /// Guard configuration
32    guard: StackGuard,
33    /// Stack watermark for high water mark tracking
34    watermark: AtomicU64,
35}
36
37impl ProtectedStack {
38    /// Create a new protected stack
39    ///
40    /// # Safety
41    /// Caller must provide a valid memory region
42    pub unsafe fn new(memory: &'static mut [u8], guard: StackGuard) -> Self {
43        let base = memory.as_mut_ptr();
44        let total_size = memory.len();
45
46        // Ensure we have enough space for guards
47        assert!(
48            total_size > guard.guard_size * 2 + 4096,
49            "Stack too small for guards"
50        );
51
52        let stack_size = total_size - guard.guard_size * 2;
53
54        // Initialize guard pages with canary pattern
55        let guard_start = base;
56        let _guard_end = base.add(guard.guard_size);
57
58        // Fill bottom guard with canary values
59        let canary_ptr = guard_start as *mut u64;
60        for i in 0..(guard.guard_size / 8) {
61            canary_ptr.add(i).write_volatile(guard.canary_value);
62        }
63
64        // Initialize watermark to stack top
65        let stack_top = base.add(total_size - guard.guard_size) as u64;
66
67        Self {
68            base,
69            total_size,
70            stack_size,
71            guard,
72            watermark: AtomicU64::new(stack_top),
73        }
74    }
75
76    /// Get usable stack memory
77    pub fn get_stack(&self) -> &'static mut [u8] {
78        unsafe {
79            let stack_start = self.base.add(self.guard.guard_size);
80            core::slice::from_raw_parts_mut(stack_start, self.stack_size)
81        }
82    }
83
84    /// Check for stack overflow using multiple methods
85    pub fn check_overflow(&self) -> StackStatus {
86        unsafe {
87            // Method 1: Check canary values
88            let canary_start = self.base as *const u64;
89            let canary_count = self.guard.guard_size / 8;
90
91            let mut corrupted_canaries = 0;
92            for i in 0..canary_count {
93                if canary_start.add(i).read_volatile() != self.guard.canary_value {
94                    corrupted_canaries += 1;
95                }
96            }
97
98            if corrupted_canaries > 0 {
99                return StackStatus::Corrupted {
100                    corrupted_bytes: corrupted_canaries * 8,
101                    location: StackCorruption::GuardPage,
102                };
103            }
104
105            // Method 2: Check current stack pointer
106            let current_sp = get_stack_pointer();
107            let stack_bottom = self.base.add(self.guard.guard_size) as u64;
108
109            if current_sp < stack_bottom {
110                return StackStatus::Overflow {
111                    overflow_bytes: (stack_bottom - current_sp) as usize,
112                };
113            }
114
115            // Method 3: Update and check watermark
116            self.watermark.fetch_min(current_sp, Ordering::Relaxed);
117            let high_water_mark = self.watermark.load(Ordering::Relaxed);
118            let used =
119                (self.base.add(self.total_size - self.guard.guard_size) as u64) - high_water_mark;
120
121            if used as usize > self.stack_size - self.guard.red_zone {
122                return StackStatus::NearOverflow {
123                    bytes_remaining: self.stack_size - used as usize,
124                };
125            }
126
127            StackStatus::Ok {
128                used_bytes: used as usize,
129                free_bytes: self.stack_size - used as usize,
130            }
131        }
132    }
133
134    /// Get detailed stack statistics
135    pub fn get_stats(&self) -> StackStats {
136        let current_sp = get_stack_pointer();
137        let stack_top = unsafe { self.base.add(self.total_size - self.guard.guard_size) as u64 };
138        let high_water_mark = self.watermark.load(Ordering::Relaxed);
139
140        StackStats {
141            total_size: self.total_size,
142            usable_size: self.stack_size,
143            guard_size: self.guard.guard_size,
144            current_usage: (stack_top - current_sp) as usize,
145            peak_usage: (stack_top - high_water_mark) as usize,
146            red_zone_size: self.guard.red_zone,
147        }
148    }
149}
150
151#[derive(Debug, Clone, Copy)]
152pub enum StackStatus {
153    Ok {
154        used_bytes: usize,
155        free_bytes: usize,
156    },
157    NearOverflow {
158        bytes_remaining: usize,
159    },
160    Overflow {
161        overflow_bytes: usize,
162    },
163    Corrupted {
164        corrupted_bytes: usize,
165        location: StackCorruption,
166    },
167}
168
169#[derive(Debug, Clone, Copy)]
170pub enum StackCorruption {
171    GuardPage,
172    StackFrame,
173    Unknown,
174}
175
176#[derive(Debug, Clone, Copy)]
177pub struct StackStats {
178    pub total_size: usize,
179    pub usable_size: usize,
180    pub guard_size: usize,
181    pub current_usage: usize,
182    pub peak_usage: usize,
183    pub red_zone_size: usize,
184}
185
186/// Get current stack pointer
187#[inline(always)]
188fn get_stack_pointer() -> u64 {
189    let sp: u64;
190    unsafe {
191        core::arch::asm!("mov {}, rsp", out(reg) sp);
192    }
193    sp
194}
195
196/// Stack allocation with automatic guard setup
197#[macro_export]
198macro_rules! protected_stack {
199    ($size:expr) => {{
200        static mut STACK_MEMORY: [u8; $size] = [0; $size];
201        unsafe {
202            $crate::stack_guard::ProtectedStack::new(
203                &mut STACK_MEMORY,
204                $crate::stack_guard::StackGuard::default(),
205            )
206        }
207    }};
208    ($size:expr, $guard_size:expr) => {{
209        static mut STACK_MEMORY: [u8; $size] = [0; $size];
210        unsafe {
211            let guard = $crate::stack_guard::StackGuard {
212                guard_size: $guard_size,
213                ..$crate::stack_guard::StackGuard::default()
214            };
215            $crate::stack_guard::ProtectedStack::new(&mut STACK_MEMORY, guard)
216        }
217    }};
218}