1#![no_std]
2
3#[cfg(feature = "alloc")]
4extern crate alloc;
5
6#[cfg(feature = "alloc")]
7use alloc::{vec, vec::Vec};
8use core::{
9 fmt,
10 ops::Range,
11 sync::atomic::{AtomicUsize, Ordering},
12};
13
14use spin::Once;
15
16#[cfg(feature = "dwarf")]
17mod dwarf;
18
19#[cfg(feature = "dwarf")]
20pub use dwarf::{DwarfReader, FrameIter};
21
22static IP_RANGE: Once<Range<usize>> = Once::new();
23static FP_RANGE: Once<Range<usize>> = Once::new();
24
25pub fn init(ip_range: Range<usize>, fp_range: Range<usize>) {
27 IP_RANGE.call_once(|| ip_range);
28 FP_RANGE.call_once(|| fp_range);
29 #[cfg(feature = "dwarf")]
30 dwarf::init();
31}
32
33#[repr(C)]
35#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
36pub struct Frame {
37 pub fp: usize,
39 pub ip: usize,
41}
42
43impl Frame {
44 #[cfg(feature = "alloc")]
45 #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
46 const OFFSET: usize = 0;
47 #[cfg(feature = "alloc")]
48 #[cfg(not(any(target_arch = "x86_64", target_arch = "aarch64")))]
49 const OFFSET: usize = 1;
50
51 #[cfg(feature = "alloc")]
52 fn read(fp: usize) -> Option<Self> {
53 if fp == 0 || !fp.is_multiple_of(core::mem::align_of::<Frame>()) {
54 return None;
55 }
56
57 Some(unsafe { (fp as *const Frame).sub(Self::OFFSET).read() })
58 }
59
60 pub fn adjust_ip(&self) -> usize {
62 self.ip.wrapping_sub(1)
63 }
64}
65
66impl fmt::Display for Frame {
67 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
68 write!(f, "fp={:#x}, ip={:#x}", self.fp, self.ip)
69 }
70}
71
72#[cfg(feature = "alloc")]
74pub fn unwind_stack(mut fp: usize) -> Vec<Frame> {
75 let mut frames = vec![];
76
77 let Some(fp_range) = FP_RANGE.get() else {
78 log::error!("Backtrace not initialized. Call `axbacktrace::init` first.");
80 return frames;
81 };
82
83 let mut depth = 0;
84 let max_depth = max_depth();
85
86 while fp_range.contains(&fp)
87 && depth < max_depth
88 && let Some(frame) = Frame::read(fp)
89 {
90 frames.push(frame);
91
92 if let Some(large_stack_end) = fp.checked_add(8 * 1024 * 1024)
93 && frame.fp >= large_stack_end
94 {
95 break;
96 }
97
98 fp = frame.fp;
99 depth += 1;
100 }
101
102 frames
103}
104
105static MAX_DEPTH: AtomicUsize = AtomicUsize::new(32);
106
107pub fn set_max_depth(depth: usize) {
109 if depth > 0 {
110 MAX_DEPTH.store(depth, Ordering::Relaxed);
111 }
112}
113pub fn max_depth() -> usize {
115 MAX_DEPTH.load(Ordering::Relaxed)
116}
117
118pub const fn is_enabled() -> bool {
120 cfg!(feature = "dwarf")
121}
122
123#[allow(dead_code)]
124#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
125enum Inner {
126 Unsupported,
127 Disabled,
128 #[cfg(feature = "dwarf")]
129 Captured(Vec<Frame>),
130}
131
132#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
134pub struct Backtrace {
135 inner: Inner,
136}
137
138impl Backtrace {
139 pub fn capture() -> Self {
141 #[cfg(not(feature = "dwarf"))]
142 {
143 Self {
144 inner: Inner::Disabled,
145 }
146 }
147 #[cfg(feature = "dwarf")]
148 {
149 use core::arch::asm;
150
151 let fp: usize;
152 cfg_if::cfg_if! {
153 if #[cfg(target_arch = "x86_64")] {
154 unsafe { asm!("mov {ptr}, rbp", ptr = out(reg) fp) };
155 } else if #[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))] {
156 unsafe { asm!("addi {ptr}, s0, 0", ptr = out(reg) fp) };
157 } else if #[cfg(target_arch = "aarch64")] {
158 unsafe { asm!("mov {ptr}, x29", ptr = out(reg) fp) };
159 } else if #[cfg(target_arch = "loongarch64")] {
160 unsafe { asm!("move {ptr}, $fp", ptr = out(reg) fp) };
161 } else {
162 return Self {
163 inner: Inner::Unsupported,
164 };
165 }
166 }
167
168 let frames = unwind_stack(fp);
169
170 core::hint::black_box(());
172
173 Self {
174 inner: Inner::Captured(frames),
175 }
176 }
177 }
178
179 #[allow(unused_variables)]
181 pub fn capture_trap(fp: usize, ip: usize, ra: usize) -> Self {
182 #[cfg(not(feature = "dwarf"))]
183 {
184 Self {
185 inner: Inner::Disabled,
186 }
187 }
188 #[cfg(feature = "dwarf")]
189 {
190 let mut frames = unwind_stack(fp);
191 if let Some(first) = frames.first_mut()
192 && let Some(ip_range) = IP_RANGE.get()
193 && !ip_range.contains(&first.ip)
194 {
195 first.ip = ra;
196 }
197
198 frames.insert(
199 0,
200 Frame {
201 fp,
202 ip: ip.wrapping_add(1),
203 },
204 );
205
206 Self {
207 inner: Inner::Captured(frames),
208 }
209 }
210 }
211
212 #[cfg(feature = "dwarf")]
216 pub fn frames<'a>(&'a self) -> Option<FrameIter<'a>> {
217 let Inner::Captured(capture) = &self.inner else {
218 return None;
219 };
220
221 Some(FrameIter::new(capture))
222 }
223}
224
225impl fmt::Display for Backtrace {
226 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
227 match &self.inner {
228 Inner::Unsupported => {
229 writeln!(f, "<unwinding unsupported>")
230 }
231 Inner::Disabled => {
232 writeln!(f, "<backtrace disabled>")
233 }
234 #[cfg(feature = "dwarf")]
235 Inner::Captured(frames) => {
236 writeln!(f, "Backtrace:")?;
237 dwarf::fmt_frames(f, frames)
238 }
239 }
240 }
241}
242
243impl fmt::Debug for Backtrace {
244 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
245 fmt::Display::fmt(self, f)
246 }
247}