#![allow(unsafe_code, missing_docs, dead_code, clippy::cast_possible_wrap)]
use std::alloc::{GlobalAlloc, Layout, System};
use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
pub struct TrackingAllocator {
inner: System,
}
static CURRENT_BYTES: AtomicUsize = AtomicUsize::new(0);
static PEAK_BYTES: AtomicUsize = AtomicUsize::new(0);
static ALLOC_COUNT: AtomicUsize = AtomicUsize::new(0);
static DEALLOC_COUNT: AtomicUsize = AtomicUsize::new(0);
impl Default for TrackingAllocator {
fn default() -> Self {
Self::new()
}
}
impl TrackingAllocator {
pub const fn new() -> Self {
Self { inner: System }
}
}
unsafe impl GlobalAlloc for TrackingAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let ptr = unsafe { self.inner.alloc(layout) };
if !ptr.is_null() {
let size = layout.size();
let prev = CURRENT_BYTES.fetch_add(size, SeqCst);
let new = prev + size;
let mut peak = PEAK_BYTES.load(SeqCst);
while new > peak {
match PEAK_BYTES.compare_exchange_weak(peak, new, SeqCst, SeqCst) {
Ok(_) => break,
Err(actual) => peak = actual,
}
}
ALLOC_COUNT.fetch_add(1, SeqCst);
}
ptr
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
CURRENT_BYTES.fetch_sub(layout.size(), SeqCst);
DEALLOC_COUNT.fetch_add(1, SeqCst);
unsafe { self.inner.dealloc(ptr, layout) };
}
}
#[derive(Debug, Clone, Copy)]
pub struct AllocSnapshot {
pub current_bytes: usize,
pub peak_bytes: usize,
pub alloc_count: usize,
pub dealloc_count: usize,
}
impl AllocSnapshot {
pub fn now() -> Self {
Self {
current_bytes: CURRENT_BYTES.load(SeqCst),
peak_bytes: PEAK_BYTES.load(SeqCst),
alloc_count: ALLOC_COUNT.load(SeqCst),
dealloc_count: DEALLOC_COUNT.load(SeqCst),
}
}
pub fn reset() -> Self {
let current = CURRENT_BYTES.load(SeqCst);
PEAK_BYTES.store(current, SeqCst);
ALLOC_COUNT.store(0, SeqCst);
DEALLOC_COUNT.store(0, SeqCst);
Self {
current_bytes: current,
peak_bytes: current,
alloc_count: 0,
dealloc_count: 0,
}
}
pub fn snapshot() -> Self {
Self::now()
}
pub fn delta_from(self, before: Self) -> AllocDelta {
AllocDelta {
peak_increase: self.peak_bytes.saturating_sub(before.current_bytes),
alloc_count: self.alloc_count.saturating_sub(before.alloc_count),
dealloc_count: self.dealloc_count.saturating_sub(before.dealloc_count),
net_bytes: (self.current_bytes as isize) - (before.current_bytes as isize),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct AllocDelta {
pub peak_increase: usize,
pub alloc_count: usize,
pub dealloc_count: usize,
pub net_bytes: isize,
}
pub fn format_bytes(bytes: usize) -> String {
if bytes >= 1_073_741_824 {
format!("{:.2} GB", bytes as f64 / 1_073_741_824.0)
} else if bytes >= 1_048_576 {
format!("{:.2} MB", bytes as f64 / 1_048_576.0)
} else if bytes >= 1024 {
format!("{:.1} KB", bytes as f64 / 1024.0)
} else {
format!("{bytes} B")
}
}
pub fn format_count(n: usize) -> String {
let s = n.to_string();
let mut result = String::new();
for (i, c) in s.chars().rev().enumerate() {
if i > 0 && i % 3 == 0 {
result.push(',');
}
result.push(c);
}
result.chars().rev().collect()
}