use std::collections::HashMap;
use crate::ir::{FrameCategory, FrameId, ProfileIR};
#[derive(Debug, Clone)]
pub struct AllocationStats {
pub frame_id: FrameId,
pub name: String,
pub location: String,
pub category: FrameCategory,
pub self_size: u64,
pub total_size: u64,
pub allocation_count: u32,
}
impl AllocationStats {
#[expect(clippy::cast_precision_loss)]
pub fn self_percent(&self, total: u64) -> f64 {
if total == 0 {
0.0
} else {
(self.self_size as f64 / total as f64) * 100.0
}
}
pub fn format_size(bytes: u64) -> String {
const KB: u64 = 1024;
const MB: u64 = KB * 1024;
const GB: u64 = MB * 1024;
if bytes >= GB {
format!("{:.2} GB", bytes as f64 / GB as f64)
} else if bytes >= MB {
format!("{:.2} MB", bytes as f64 / MB as f64)
} else if bytes >= KB {
format!("{:.2} KB", bytes as f64 / KB as f64)
} else {
format!("{bytes} B")
}
}
}
#[derive(Debug)]
pub struct HeapAnalysis {
pub total_size: u64,
pub total_allocations: usize,
pub functions: Vec<AllocationStats>,
pub category_breakdown: CategorySizeBreakdown,
}
#[derive(Debug, Clone, Default)]
pub struct CategorySizeBreakdown {
pub app: u64,
pub deps: u64,
pub node_internal: u64,
pub v8_internal: u64,
pub native: u64,
}
impl CategorySizeBreakdown {
pub fn total(&self) -> u64 {
self.app + self.deps + self.node_internal + self.v8_internal + self.native
}
}
pub struct HeapAnalyzer {
min_percent: f64,
top_n: usize,
include_internals: bool,
}
impl HeapAnalyzer {
pub fn new() -> Self {
Self {
min_percent: 0.0,
top_n: 50,
include_internals: false,
}
}
pub fn min_percent(mut self, percent: f64) -> Self {
self.min_percent = percent;
self
}
pub fn top_n(mut self, n: usize) -> Self {
self.top_n = n;
self
}
pub fn include_internals(mut self, include: bool) -> Self {
self.include_internals = include;
self
}
#[expect(clippy::cast_precision_loss)]
pub fn analyze(&self, profile: &ProfileIR) -> HeapAnalysis {
let total_size = profile.total_weight();
let total_allocations = profile.sample_count();
let mut self_sizes: HashMap<FrameId, u64> = HashMap::new();
let mut total_sizes: HashMap<FrameId, u64> = HashMap::new();
let mut alloc_counts: HashMap<FrameId, u32> = HashMap::new();
let mut category_breakdown = CategorySizeBreakdown::default();
for sample in &profile.samples {
let size = sample.weight;
if let Some(stack) = profile.get_stack(sample.stack_id) {
if let Some(&leaf_frame) = stack.frames.last() {
*self_sizes.entry(leaf_frame).or_default() += size;
*alloc_counts.entry(leaf_frame).or_default() += 1;
if let Some(frame) = profile.get_frame(leaf_frame) {
match frame.category {
FrameCategory::App => category_breakdown.app += size,
FrameCategory::Deps => category_breakdown.deps += size,
FrameCategory::NodeInternal => {
category_breakdown.node_internal += size;
}
FrameCategory::V8Internal => category_breakdown.v8_internal += size,
FrameCategory::Native => category_breakdown.native += size,
}
}
}
for &frame_id in &stack.frames {
*total_sizes.entry(frame_id).or_default() += size;
}
}
}
let mut functions: Vec<AllocationStats> = profile
.frames
.iter()
.filter_map(|frame| {
let self_size = self_sizes.get(&frame.id).copied().unwrap_or(0);
let total_size = total_sizes.get(&frame.id).copied().unwrap_or(0);
if self_size == 0 && total_size == 0 {
return None;
}
if !self.include_internals && frame.category.is_internal() {
return None;
}
let self_pct = if total_size > 0 {
(self_size as f64 / total_size as f64) * 100.0
} else {
0.0
};
if self_pct < self.min_percent && self.min_percent > 0.0 {
return None;
}
Some(AllocationStats {
frame_id: frame.id,
name: frame.display_name().to_string(),
location: frame.location(),
category: frame.category,
self_size,
total_size,
allocation_count: alloc_counts.get(&frame.id).copied().unwrap_or(0),
})
})
.collect();
functions.sort_by(|a, b| b.self_size.cmp(&a.self_size));
functions.truncate(self.top_n);
HeapAnalysis {
total_size,
total_allocations,
functions,
category_breakdown,
}
}
}
impl Default for HeapAnalyzer {
fn default() -> Self {
Self::new()
}
}