use std::alloc::Layout;
use std::collections::{HashMap, HashSet};
use std::ffi::c_void;
use std::fmt::{self, Debug, Formatter};
use std::{mem, ptr};
use backtrace::{BacktraceFmt, Frame, PrintFmt};
#[derive(Clone)]
struct TracedAllocation {
layout: Layout,
caller: usize,
}
#[derive(Clone)]
struct TracedError {
ptr: *const u8,
alloc: Option<TracedAllocation>,
free: TracedAllocation,
}
pub(super) struct TracingState {
base_ip: *mut c_void,
callers: Vec<Vec<Frame>>,
callers_map: HashMap<Vec<*mut c_void>, usize>,
allocations: HashMap<*const u8, TracedAllocation>,
ip_buffer: Vec<*mut c_void>,
errors: Vec<TracedError>,
free_callers: HashMap<usize, HashSet<usize>>,
}
#[derive(Copy, Clone)]
pub struct Backtrace<'a> {
frames: &'a [Frame],
}
impl Debug for Backtrace<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let cwd = std::env::current_dir();
let mut print_path =
move |fmt: &mut fmt::Formatter<'_>, path: backtrace::BytesOrWideString<'_>| {
let path = path.into_path_buf();
if let Ok(cwd) = &cwd {
if let Ok(suffix) = path.strip_prefix(cwd) {
return fmt::Display::fmt(&suffix.display(), fmt);
}
}
fmt::Display::fmt(&path.display(), fmt)
};
let mut bt = BacktraceFmt::new(f, PrintFmt::Full, &mut print_path);
bt.add_context()?;
for frame in self.frames {
let mut res = Ok(());
backtrace::resolve_frame(frame, |symbol| res = bt.frame().symbol(frame, symbol));
res?;
}
bt.finish()
}
}
#[derive(Debug, Clone)]
pub struct LeakInfo<'a> {
ptr: *const u8,
layout: Layout,
caller: Backtrace<'a>,
free_callers: Vec<Backtrace<'a>>,
}
impl<'a> LeakInfo<'a> {
pub fn ptr(&self) -> *const u8 {
self.ptr
}
pub fn layout(&self) -> Layout {
self.layout
}
pub fn alloc_backtrace(&self) -> Backtrace<'a> {
self.caller
}
pub fn expected_dealloc_backtraces(&'a self) -> impl Iterator<Item = Backtrace<'a>> {
self.free_callers.iter().copied()
}
}
#[derive(Debug, Copy, Clone)]
pub struct ErrorInfo<'a> {
ptr: *const u8,
alloc_layout: Option<Layout>,
alloc_caller: Option<Backtrace<'a>>,
free_layout: Layout,
free_caller: Backtrace<'a>,
}
impl<'a> ErrorInfo<'a> {
pub fn ptr(&self) -> *const u8 {
self.ptr
}
pub fn alloc_layout(&self) -> Option<Layout> {
self.alloc_layout
}
pub fn alloc_backtrace(&'a self) -> Option<Backtrace<'a>> {
self.alloc_caller
}
pub fn dealloc_layout(&self) -> Layout {
self.free_layout
}
pub fn dealloc_backtrace(&'a self) -> Backtrace<'a> {
self.free_caller
}
}
#[derive(Clone, Default)]
pub struct TracingInfo {
callers: HashMap<usize, Vec<Frame>>,
leaks: HashMap<*const u8, TracedAllocation>,
errors: Vec<TracedError>,
free_callers: HashMap<usize, HashSet<usize>>,
}
impl TracingInfo {
fn backtrace(&self, id: usize) -> Backtrace {
Backtrace {
frames: self.callers[&id].as_slice(),
}
}
pub fn leaks(&self) -> impl Iterator<Item = LeakInfo> {
self.leaks.iter().map(move |(&ptr, v)| LeakInfo {
ptr,
layout: v.layout,
caller: self.backtrace(v.caller),
free_callers: self
.free_callers
.get(&v.caller)
.map(|free_callers| {
free_callers
.iter()
.map(|&free_caller| self.backtrace(free_caller))
.collect()
})
.unwrap_or_default(),
})
}
pub fn errors(&self) -> impl Iterator<Item = ErrorInfo> {
self.errors.iter().map(move |e| ErrorInfo {
ptr: e.ptr,
alloc_layout: e.alloc.as_ref().map(|x| x.layout),
alloc_caller: e.alloc.as_ref().map(|x| self.backtrace(x.caller)),
free_layout: e.free.layout,
free_caller: self.backtrace(e.free.caller),
})
}
}
impl Debug for TracingInfo {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("TracingInfo")
.field("leaks", &self.leaks().collect::<Vec<_>>())
.field("errors", &self.errors().collect::<Vec<_>>())
.finish()
}
}
impl Default for TracingState {
fn default() -> Self {
Self {
base_ip: ptr::null_mut(),
callers: Default::default(),
callers_map: Default::default(),
allocations: Default::default(),
ip_buffer: Default::default(),
errors: Default::default(),
free_callers: Default::default(),
}
}
}
impl TracingState {
pub(super) fn start(&mut self) {
let mut frames_to_skip = 9;
#[cfg(not(miri))]
backtrace::trace(|frame| {
if frames_to_skip == 0 {
self.base_ip = frame.ip();
false
} else {
frames_to_skip -= 1;
true
}
});
}
pub(super) fn finish(&mut self) -> TracingInfo {
let mut callers = HashMap::new();
let self_callers = &mut self.callers;
let mut_callers = &mut callers;
let mut visit_caller = move |caller| {
mut_callers
.entry(caller)
.or_insert_with(|| mem::replace(&mut self_callers[caller], Default::default()));
};
for leak in self.allocations.values() {
visit_caller(leak.caller);
if let Some(free_callers) = self.free_callers.get(&leak.caller) {
for &free_caller in free_callers {
visit_caller(free_caller);
}
}
}
for error in &self.errors {
if let Some(alloc) = &error.alloc {
visit_caller(alloc.caller);
}
visit_caller(error.free.caller);
}
TracingInfo {
callers,
leaks: mem::replace(&mut self.allocations, Default::default()),
errors: mem::replace(&mut self.errors, Default::default()),
free_callers: mem::replace(&mut self.free_callers, Default::default()),
}
}
fn trace_caller(&mut self) -> usize {
let mut frames_to_skip = 2;
#[cfg(not(miri))]
backtrace::trace(|frame| {
if frames_to_skip > 0 {
frames_to_skip -= 1;
return true;
}
let ip = frame.ip();
if ip == self.base_ip {
false
} else {
self.ip_buffer.push(ip);
true
}
});
if let Some(&id) = self.callers_map.get(&self.ip_buffer) {
self.ip_buffer.clear();
id
} else {
let mut frames = Vec::with_capacity(self.ip_buffer.len());
let mut frames_to_skip = 2;
#[cfg(not(miri))]
backtrace::trace(|frame| {
if frames_to_skip > 0 {
frames_to_skip -= 1;
return true;
}
let ip = frame.ip();
if ip == self.base_ip {
false
} else {
frames.push(frame.clone());
true
}
});
let id = self.callers.len();
self.callers.push(frames);
self.callers_map.insert(self.ip_buffer.clone(), id);
self.ip_buffer.clear();
id
}
}
pub(super) fn record_alloc(&mut self, ptr: *const u8, layout: Layout) {
let caller = self.trace_caller();
self.allocations
.insert(ptr, TracedAllocation { layout, caller });
}
pub(super) fn record_free(&mut self, ptr: *const u8, layout: Layout) {
let caller = self.trace_caller();
if let Some(alloc) = self.allocations.remove(&ptr) {
self.free_callers
.entry(alloc.caller)
.or_default()
.insert(caller);
if alloc.layout != layout {
self.errors.push(TracedError {
ptr,
alloc: Some(alloc),
free: TracedAllocation { caller, layout },
});
}
} else {
self.errors.push(TracedError {
ptr,
alloc: None,
free: TracedAllocation { caller, layout },
});
}
}
}