cortex_m_stack/
lib.rs

1#![no_std]
2#![doc = include_str!(concat!("../", env!("CARGO_PKG_README")))]
3
4use core::{arch::asm, mem::size_of, ops::Range};
5
6/// The value used to paint the stack.
7pub const STACK_PAINT_VALUE: u32 = 0xCCCC_CCCC;
8
9/// The [Range] currently in use for the stack.
10///
11/// Note: the stack is defined in reverse, as it runs from 'start' to 'end' downwards.
12/// Hence this range is technically empty because `start >= end`.
13///
14/// If you want to use this range to do range-like things, use [stack_rev] instead.
15#[inline]
16pub const fn stack() -> Range<*mut u32> {
17    unsafe extern "C" {
18        static mut _stack_start: u32;
19        static mut _stack_end: u32;
20    }
21
22    core::ptr::addr_of_mut!(_stack_start)..core::ptr::addr_of_mut!(_stack_end)
23}
24
25/// The [Range] currently in use for the stack,
26/// defined in reverse such that [Range] operations are viable.
27///
28/// Hence the `end` of this [Range] is where the stack starts.
29#[inline]
30pub const fn stack_rev() -> Range<*mut u32> {
31    stack().end..stack().start
32}
33
34/// Convenience function to fetch the current stack pointer.
35#[inline]
36pub fn current_stack_ptr() -> *mut u32 {
37    cortex_m::register::msp::read() as *mut u32
38}
39
40/// The number of bytes that are reserved for the stack at compile time.
41#[inline]
42pub const fn stack_size() -> u32 {
43    // Safety: start >= end. If this is not the case your linker did something wrong.
44    (unsafe { stack().start.byte_offset_from_unsigned(stack().end) }) as u32
45}
46
47/// The number of bytes of the stack that are currently in use.
48#[inline]
49pub fn current_stack_in_use() -> u32 {
50    // Safety: start >= end. If this is not the case your linker did something wrong.
51    (unsafe { stack().start.byte_offset_from_unsigned(current_stack_ptr()) }) as u32
52}
53
54/// The number of bytes of the stack that are currently free.
55///
56/// If the stack has overflowed, this function returns 0.
57#[inline]
58pub fn current_stack_free() -> u32 {
59    stack_size().saturating_sub(current_stack_in_use())
60}
61
62/// What fraction of the stack is currently in use.
63#[inline]
64pub fn current_stack_fraction() -> f32 {
65    current_stack_in_use() as f32 / stack_size() as f32
66}
67
68/// Paint the part of the stack that is currently not in use.
69///
70/// **Note:** this can take some time, and an ISR could possibly interrupt this process,
71/// dirtying up your freshly painted stack.
72/// If you wish to prevent this, run this inside a critical section using [cortex_m::interrupt::free].
73///
74/// Runs in *O(n)* where *n* is the size of the stack.
75/// This function is inefficient in the sense that it repaints the entire stack,
76/// even the parts that still have the [STACK_PAINT_VALUE].
77#[inline(never)]
78pub fn repaint_stack() {
79    unsafe {
80        asm!(
81            "0:",
82            "cmp sp, r0",
83            "bls 1f",
84            "stmia r0!, {{r1}}",
85            "b 0b",
86            "1:",
87            in("r0") stack().end,
88            in("r1") STACK_PAINT_VALUE,
89        )
90    };
91}
92
93/// Finds the number of bytes that have not been overwritten on the stack since the last repaint.
94///
95/// In other words: shows the worst case free stack space since [repaint_stack] was last called.
96///
97/// Runs in *O(n)* where *n* is the size of the stack.
98#[allow(clippy::manual_slice_size_calculation)]
99pub fn stack_painted() -> u32 {
100    // Safety: we should be able to read anywhere on the stack using this.
101    let slice = unsafe {
102        &*core::ptr::slice_from_raw_parts(stack().end, current_stack_free() as usize / 4)
103    };
104    for (word_i, word) in slice.iter().enumerate() {
105        if *word != STACK_PAINT_VALUE {
106            return (word_i * size_of::<u32>()) as u32;
107        }
108    }
109    (slice.len() * size_of::<u32>()) as u32
110}
111
112/// Finds the number of bytes that have not been overwritten on the stack since the last repaint using binary search.
113///
114/// In other words: shows the worst case free stack space since [repaint_stack] was last called.
115///
116/// Uses binary search to find the point after which the stack is written.
117/// This will assume that the stack is written in a consecutive fashion.
118/// Writing somewhere out-of-order into the painted stack will not be detected.
119///
120/// Runs in *O(log(n))* where *n* is the size of the stack.
121///
122/// **Danger:** if the current (active) stack contains the [STACK_PAINT_VALUE] this computation may be very incorrect.
123pub fn stack_painted_binary() -> u32 {
124    // Safety: we should be able to read anywhere on the stack using this.
125    let slice = unsafe {
126        &*core::ptr::slice_from_raw_parts(stack().end, current_stack_free() as usize / 4)
127    };
128    (slice.partition_point(|&word| word == STACK_PAINT_VALUE) * size_of::<u32>()) as u32
129}