#![deny(
missing_debug_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unused_import_braces,
unused_imports,
unused_qualifications,
missing_docs
)]
#![cfg_attr(doc_cfg, feature(allocator_api))]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
use std::{
alloc::{GlobalAlloc, Layout, System},
sync::atomic::{AtomicIsize, AtomicUsize, Ordering},
};
pub trait PeakAllocTrait {
fn reset_peak_memory(&self);
fn get_peak_memory(&self) -> usize;
}
#[derive(Default, Debug)]
pub struct PeakAlloc<T: GlobalAlloc> {
peak_bytes_allocated_tracker: AtomicIsize,
peak_bytes_allocated: AtomicUsize,
inner: T,
}
pub static INSTRUMENTED_SYSTEM: PeakAlloc<System> = PeakAlloc {
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner: System,
};
impl PeakAlloc<System> {
pub const fn system() -> Self {
PeakAlloc {
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner: System,
}
}
}
impl<T: GlobalAlloc> PeakAllocTrait for PeakAlloc<T> {
#[inline]
fn reset_peak_memory(&self) {
self.peak_bytes_allocated.store(0, Ordering::SeqCst);
self.peak_bytes_allocated_tracker.store(0, Ordering::SeqCst);
}
#[inline]
fn get_peak_memory(&self) -> usize {
self.peak_bytes_allocated.load(Ordering::SeqCst)
}
}
impl<T: GlobalAlloc> PeakAlloc<T> {
pub const fn new(inner: T) -> Self {
PeakAlloc {
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner,
}
}
#[inline]
fn track_alloc(&self, bytes: usize) {
let prev = self
.peak_bytes_allocated_tracker
.fetch_add(bytes as isize, Ordering::SeqCst);
let current_peak = (prev + bytes as isize).max(0) as usize;
self.peak_bytes_allocated
.fetch_max(current_peak, Ordering::SeqCst);
}
#[inline]
fn track_dealloc(&self, bytes: usize) {
self.peak_bytes_allocated_tracker
.fetch_sub(bytes as isize, Ordering::SeqCst);
}
}
unsafe impl<'a, T: GlobalAlloc + 'a> GlobalAlloc for &'a PeakAlloc<T> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
(*self).alloc(layout)
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
(*self).dealloc(ptr, layout)
}
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
(*self).alloc_zeroed(layout)
}
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
(*self).realloc(ptr, layout, new_size)
}
}
unsafe impl<T: GlobalAlloc> GlobalAlloc for PeakAlloc<T> {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.track_alloc(layout.size());
self.inner.alloc(layout)
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.track_dealloc(layout.size());
self.inner.dealloc(ptr, layout)
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
self.track_alloc(layout.size());
self.inner.alloc_zeroed(layout)
}
#[inline]
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
match new_size.cmp(&layout.size()) {
std::cmp::Ordering::Greater => {
let difference = new_size - layout.size();
self.track_alloc(difference);
}
std::cmp::Ordering::Less => {
let difference = layout.size() - new_size;
self.track_dealloc(difference);
}
_ => {}
}
self.inner.realloc(ptr, layout, new_size)
}
}