#![allow(unsafe_code)]
use std::alloc::{GlobalAlloc, Layout};
use std::sync::atomic::{AtomicU64, Ordering};
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub struct AllocSnapshot {
pub allocs: u64,
pub deallocs: u64,
pub bytes_allocated: u64,
pub bytes_deallocated: u64,
}
impl AllocSnapshot {
#[inline]
#[must_use]
pub fn since(self, earlier: Self) -> Self {
Self {
allocs: self.allocs.saturating_sub(earlier.allocs),
deallocs: self.deallocs.saturating_sub(earlier.deallocs),
bytes_allocated: self.bytes_allocated.saturating_sub(earlier.bytes_allocated),
bytes_deallocated: self
.bytes_deallocated
.saturating_sub(earlier.bytes_deallocated),
}
}
}
pub struct CountingAllocator<Inner: GlobalAlloc> {
inner: Inner,
allocs: AtomicU64,
deallocs: AtomicU64,
bytes_allocated: AtomicU64,
bytes_deallocated: AtomicU64,
}
impl<Inner: GlobalAlloc> CountingAllocator<Inner> {
pub const fn new(inner: Inner) -> Self {
Self {
inner,
allocs: AtomicU64::new(0),
deallocs: AtomicU64::new(0),
bytes_allocated: AtomicU64::new(0),
bytes_deallocated: AtomicU64::new(0),
}
}
#[inline]
pub fn allocs(&self) -> u64 {
self.allocs.load(Ordering::Relaxed)
}
#[inline]
pub fn deallocs(&self) -> u64 {
self.deallocs.load(Ordering::Relaxed)
}
#[inline]
pub fn bytes_allocated(&self) -> u64 {
self.bytes_allocated.load(Ordering::Relaxed)
}
#[inline]
pub fn bytes_deallocated(&self) -> u64 {
self.bytes_deallocated.load(Ordering::Relaxed)
}
#[inline]
pub fn snapshot(&self) -> AllocSnapshot {
AllocSnapshot {
allocs: self.allocs(),
deallocs: self.deallocs(),
bytes_allocated: self.bytes_allocated(),
bytes_deallocated: self.bytes_deallocated(),
}
}
}
unsafe impl<Inner: GlobalAlloc> GlobalAlloc for CountingAllocator<Inner> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.allocs.fetch_add(1, Ordering::Relaxed);
self.bytes_allocated
.fetch_add(layout.size() as u64, Ordering::Relaxed);
unsafe { self.inner.alloc(layout) }
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.deallocs.fetch_add(1, Ordering::Relaxed);
self.bytes_deallocated
.fetch_add(layout.size() as u64, Ordering::Relaxed);
unsafe { self.inner.dealloc(ptr, layout) }
}
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
self.allocs.fetch_add(1, Ordering::Relaxed);
self.bytes_allocated
.fetch_add(layout.size() as u64, Ordering::Relaxed);
unsafe { self.inner.alloc_zeroed(layout) }
}
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
self.allocs.fetch_add(1, Ordering::Relaxed);
self.deallocs.fetch_add(1, Ordering::Relaxed);
self.bytes_allocated
.fetch_add(new_size as u64, Ordering::Relaxed);
self.bytes_deallocated
.fetch_add(layout.size() as u64, Ordering::Relaxed);
unsafe { self.inner.realloc(ptr, layout, new_size) }
}
}