1#![no_std]
2
3#[cfg(feature = "alloc")]
4extern crate alloc;
5
6#[cfg(feature = "alloc")]
7use alloc::{vec, vec::Vec};
8use core::{
9 fmt,
10 ops::Range,
11 sync::atomic::{AtomicUsize, Ordering},
12};
13
14use spin::Once;
15
16#[cfg(feature = "dwarf")]
17mod dwarf;
18
19#[cfg(feature = "dwarf")]
20pub use dwarf::{DwarfReader, FrameIter};
21
22static IP_RANGE: Once<Range<usize>> = Once::new();
23static FP_RANGE: Once<Range<usize>> = Once::new();
24
25pub fn init(ip_range: Range<usize>, fp_range: Range<usize>) {
27 IP_RANGE.call_once(|| ip_range);
28 FP_RANGE.call_once(|| fp_range);
29 #[cfg(feature = "dwarf")]
30 dwarf::init();
31}
32
33#[repr(C)]
35#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
36pub struct Frame {
37 pub fp: usize,
39 pub ip: usize,
41}
42
43impl Frame {
44 #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
45 const OFFSET: usize = 0;
46 #[cfg(not(any(target_arch = "x86_64", target_arch = "aarch64")))]
47 const OFFSET: usize = 1;
48
49 fn read(fp: usize) -> Option<Self> {
50 if fp == 0 || fp % core::mem::align_of::<Frame>() != 0 {
51 return None;
52 }
53
54 Some(unsafe { (fp as *const Frame).sub(Self::OFFSET).read() })
55 }
56
57 pub fn adjust_ip(&self) -> usize {
59 self.ip.wrapping_sub(1)
60 }
61}
62
63impl fmt::Display for Frame {
64 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65 write!(f, "fp={:#x}, ip={:#x}", self.fp, self.ip)
66 }
67}
68
69#[cfg(feature = "alloc")]
71pub fn unwind_stack(mut fp: usize) -> Vec<Frame> {
72 let mut frames = vec![];
73
74 let Some(fp_range) = FP_RANGE.get() else {
75 log::error!("Backtrace not initialized. Call `axbacktrace::init` first.");
77 return frames;
78 };
79
80 let mut depth = 0;
81 let max_depth = max_depth();
82
83 while fp_range.contains(&fp)
84 && depth < max_depth
85 && let Some(frame) = Frame::read(fp)
86 {
87 frames.push(frame);
88
89 if let Some(large_stack_end) = fp.checked_add(8 * 1024 * 1024)
90 && frame.fp >= large_stack_end
91 {
92 break;
93 }
94
95 fp = frame.fp;
96 depth += 1;
97 }
98
99 frames
100}
101
102static MAX_DEPTH: AtomicUsize = AtomicUsize::new(32);
103
104pub fn set_max_depth(depth: usize) {
106 if depth > 0 {
107 MAX_DEPTH.store(depth, Ordering::Relaxed);
108 }
109}
110pub fn max_depth() -> usize {
112 MAX_DEPTH.load(Ordering::Relaxed)
113}
114
115pub const fn is_enabled() -> bool {
117 cfg!(feature = "dwarf")
118}
119
120#[allow(dead_code)]
121#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
122enum Inner {
123 Unsupported,
124 Disabled,
125 #[cfg(feature = "dwarf")]
126 Captured(Vec<Frame>),
127}
128
129#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
131pub struct Backtrace {
132 inner: Inner,
133}
134
135impl Backtrace {
136 pub fn capture() -> Self {
138 #[cfg(not(feature = "dwarf"))]
139 {
140 Self {
141 inner: Inner::Disabled,
142 }
143 }
144 #[cfg(feature = "dwarf")]
145 {
146 use core::arch::asm;
147
148 let fp: usize;
149 cfg_if::cfg_if! {
150 if #[cfg(target_arch = "x86_64")] {
151 unsafe { asm!("mov {ptr}, rbp", ptr = out(reg) fp) };
152 } else if #[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))] {
153 unsafe { asm!("addi {ptr}, s0, 0", ptr = out(reg) fp) };
154 } else if #[cfg(target_arch = "aarch64")] {
155 unsafe { asm!("mov {ptr}, x29", ptr = out(reg) fp) };
156 } else if #[cfg(target_arch = "loongarch64")] {
157 unsafe { asm!("move {ptr}, $fp", ptr = out(reg) fp) };
158 } else {
159 return Self {
160 inner: Inner::Unsupported,
161 };
162 }
163 }
164
165 let frames = unwind_stack(fp);
166
167 core::hint::black_box(());
169
170 Self {
171 inner: Inner::Captured(frames),
172 }
173 }
174 }
175
176 #[allow(unused_variables)]
178 pub fn capture_trap(fp: usize, ip: usize, ra: usize) -> Self {
179 #[cfg(not(feature = "dwarf"))]
180 {
181 Self {
182 inner: Inner::Disabled,
183 }
184 }
185 #[cfg(feature = "dwarf")]
186 {
187 let mut frames = unwind_stack(fp);
188 if let Some(first) = frames.first_mut()
189 && let Some(ip_range) = IP_RANGE.get()
190 && !ip_range.contains(&first.ip)
191 {
192 first.ip = ra;
193 }
194
195 frames.insert(
196 0,
197 Frame {
198 fp,
199 ip: ip.wrapping_add(1),
200 },
201 );
202
203 Self {
204 inner: Inner::Captured(frames),
205 }
206 }
207 }
208
209 #[cfg(feature = "dwarf")]
213 pub fn frames<'a>(&'a self) -> Option<FrameIter<'a>> {
214 let Inner::Captured(capture) = &self.inner else {
215 return None;
216 };
217
218 Some(FrameIter::new(capture))
219 }
220}
221
222impl fmt::Display for Backtrace {
223 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
224 match &self.inner {
225 Inner::Unsupported => {
226 writeln!(f, "<unwinding unsupported>")
227 }
228 Inner::Disabled => {
229 writeln!(f, "<backtrace disabled>")
230 }
231 #[cfg(feature = "dwarf")]
232 Inner::Captured(frames) => {
233 writeln!(f, "Backtrace:")?;
234 dwarf::fmt_frames(f, frames)
235 }
236 }
237 }
238}
239
240impl fmt::Debug for Backtrace {
241 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
242 fmt::Display::fmt(self, f)
243 }
244}