cortex_m_stack/
lib.rs

1#![no_std]
2#![doc = include_str!(concat!("../", env!("CARGO_PKG_README")))]
3
4use core::{arch::asm, mem::size_of, ops::Range};
5
6/// The value used to paint the stack.
7pub const STACK_PAINT_VALUE: u32 = 0xCCCC_CCCC;
8
9/// The [Range] currently in use for the stack.
10///
11/// Note: the stack is defined in reverse, as it runs from 'start' to 'end' downwards.
12/// Hence this range is technically empty because `start >= end`.
13///
14/// If you want to use this range to do range-like things, use [stack_rev] instead.
15#[inline]
16pub const fn stack() -> Range<*mut u32> {
17    unsafe extern "C" {
18        static mut _stack_start: u32;
19        static mut _stack_end: u32;
20    }
21
22    core::ptr::addr_of_mut!(_stack_start)..core::ptr::addr_of_mut!(_stack_end)
23}
24
25/// The [Range] currently in use for the stack,
26/// defined in reverse such that [Range] operations are viable.
27///
28/// Hence the `end` of this [Range] is where the stack starts.
29#[inline]
30pub const fn stack_rev() -> Range<*mut u32> {
31    stack().end..stack().start
32}
33
34/// Convenience function to fetch the current stack pointer.
35#[inline]
36pub fn current_stack_ptr() -> *mut u32 {
37    let res;
38    unsafe { asm!("mov {}, sp", out(reg) res) };
39    res
40}
41
42/// The number of bytes that are reserved for the stack at compile time.
43#[inline]
44pub const fn stack_size() -> u32 {
45    // Safety: start >= end. If this is not the case your linker did something wrong.
46    (unsafe { stack().start.byte_offset_from_unsigned(stack().end) }) as u32
47}
48
49/// The number of bytes of the stack that are currently in use.
50#[inline]
51pub fn current_stack_in_use() -> u32 {
52    // Safety: start >= end. If this is not the case your linker did something wrong.
53    (unsafe { stack().start.byte_offset_from_unsigned(current_stack_ptr()) }) as u32
54}
55
56/// The number of bytes of the stack that are currently free.
57///
58/// If the stack has overflowed, this function returns 0.
59#[inline]
60pub fn current_stack_free() -> u32 {
61    stack_size().saturating_sub(current_stack_in_use())
62}
63
64/// What fraction of the stack is currently in use.
65#[inline]
66pub fn current_stack_fraction() -> f32 {
67    current_stack_in_use() as f32 / stack_size() as f32
68}
69
70/// Paint the part of the stack that is currently not in use.
71///
72/// **Note:** this can take some time, and an ISR could possibly interrupt this process,
73/// dirtying up your freshly painted stack.
74/// If you wish to prevent this, run this inside a critical section using [cortex_m::interrupt::free].
75///
76/// Runs in *O(n)* where *n* is the size of the stack.
77/// This function is inefficient in the sense that it repaints the entire stack,
78/// even the parts that still have the [STACK_PAINT_VALUE].
79#[inline(never)]
80pub fn repaint_stack() {
81    unsafe {
82        asm!(
83            "0:",
84            "cmp sp, r0",
85            "bls 1f",
86            "stmia r0!, {{r1}}",
87            "b 0b",
88            "1:",
89            in("r0") stack().end,
90            in("r1") STACK_PAINT_VALUE,
91        )
92    };
93}
94
95/// Finds the number of bytes that have not been overwritten on the stack since the last repaint.
96///
97/// In other words: shows the worst case free stack space since [repaint_stack] was last called.
98///
99/// Runs in *O(n)* where *n* is the size of the stack.
100#[allow(clippy::manual_slice_size_calculation)]
101pub fn stack_painted() -> u32 {
102    // Safety: we should be able to read anywhere on the stack using this.
103    let slice = unsafe {
104        &*core::ptr::slice_from_raw_parts(stack().end, current_stack_free() as usize / 4)
105    };
106    for (word_i, word) in slice.iter().enumerate() {
107        if *word != STACK_PAINT_VALUE {
108            return (word_i * size_of::<u32>()) as u32;
109        }
110    }
111    (slice.len() * size_of::<u32>()) as u32
112}
113
114/// Finds the number of bytes that have not been overwritten on the stack since the last repaint using binary search.
115///
116/// In other words: shows the worst case free stack space since [repaint_stack] was last called.
117///
118/// Uses binary search to find the point after which the stack is written.
119/// This will assume that the stack is written in a consecutive fashion.
120/// Writing somewhere out-of-order into the painted stack will not be detected.
121///
122/// Runs in *O(log(n))* where *n* is the size of the stack.
123///
124/// **Danger:** if the current (active) stack contains the [STACK_PAINT_VALUE] this computation may be very incorrect.
125pub fn stack_painted_binary() -> u32 {
126    // Safety: we should be able to read anywhere on the stack using this.
127    let slice = unsafe {
128        &*core::ptr::slice_from_raw_parts(stack().end, current_stack_free() as usize / 4)
129    };
130    (slice.partition_point(|&word| word == STACK_PAINT_VALUE) * size_of::<u32>()) as u32
131}