use std::io::Write;
use crate::analysis::{
AllocationStats, CpuAnalysis, HeapAnalysis, HotFunctionDetail, HotPath, Priority,
Recommendation, RecommendationEngine,
};
use crate::ir::{FrameCategory, FrameKind, ProfileIR};
use super::{Formatter, OutputError, format_time_ms, format_time_us};
pub struct MarkdownFormatter;
impl Formatter for MarkdownFormatter {
#[expect(clippy::cast_precision_loss)]
#[expect(clippy::too_many_lines)]
fn write_cpu_analysis(
&self,
profile: &ProfileIR,
analysis: &CpuAnalysis,
writer: &mut dyn Write,
) -> Result<(), OutputError> {
if let Some(ref pkg) = analysis.metadata.focus_package {
writeln!(writer, "# Profile Inspect Report — CPU (Package: `{pkg}`)")?;
} else {
writeln!(writer, "# Profile Inspect Report — CPU")?;
}
writeln!(writer)?;
if let Some(ref source) = analysis.metadata.source_file {
if analysis.metadata.profiles_merged > 1 {
writeln!(
writer,
"**Profile:** `{source}` ({} profiles merged)",
analysis.metadata.profiles_merged
)?;
} else {
writeln!(writer, "**Profile:** `{source}`")?;
}
}
let internals_str = if analysis.metadata.internals_filtered {
"hidden"
} else {
"shown"
};
let sourcemaps_total =
analysis.metadata.sourcemaps_loaded + analysis.metadata.sourcemaps_inline;
let sourcemaps_str = if sourcemaps_total > 0 {
if analysis.metadata.sourcemaps_inline > 0 && analysis.metadata.sourcemaps_loaded > 0 {
format!(
"{} frames resolved ({} inline)",
sourcemaps_total, analysis.metadata.sourcemaps_inline
)
} else if analysis.metadata.sourcemaps_inline > 0 {
format!("{} frames resolved (inline)", sourcemaps_total)
} else {
format!("{} frames resolved", sourcemaps_total)
}
} else {
"OFF".to_string()
};
let cpu_time_str = format_time_ms(analysis.metadata.duration_ms);
if let Some(wall_ms) = analysis.metadata.wall_time_ms {
let wall_str = format_time_ms(wall_ms);
let util = analysis.metadata.cpu_utilization().unwrap_or(0.0);
let util_pct = util * 100.0;
let util_note = if analysis.metadata.profiles_merged > 1 {
" (aggregated)"
} else {
""
};
writeln!(
writer,
"**Wall time:** {} | **CPU time:** {} | **CPU utilization:** ~{:.0}%{}",
wall_str, cpu_time_str, util_pct, util_note
)?;
writeln!(
writer,
"**Samples:** {} | **Interval:** ~{:.2} ms",
analysis.metadata.sample_count, analysis.metadata.sample_interval_ms
)?;
let workload_class =
Self::classify_workload(util_pct, analysis.metadata.profiles_merged);
writeln!(writer, "**Workload:** {}", workload_class)?;
let breakdown = &analysis.category_breakdown;
let total = breakdown.total();
if total > 0 {
let top_category = Self::top_category_summary(breakdown, total);
writeln!(writer, "**Top category (self time):** {}", top_category)?;
}
if analysis.metadata.profiles_merged > 1 && util_pct > 100.0 {
writeln!(writer)?;
writeln!(
writer,
"> ℹ️ CPU utilization exceeds 100% because {} profiles were merged (multiple processes running in parallel).",
analysis.metadata.profiles_merged
)?;
}
} else {
writeln!(
writer,
"**Duration:** {} | **Samples:** {} | **Interval:** ~{:.2} ms",
cpu_time_str, analysis.metadata.sample_count, analysis.metadata.sample_interval_ms
)?;
}
if let Some(ref pkg) = analysis.metadata.focus_package {
writeln!(writer, "**Package filter:** `{pkg}`")?;
} else {
writeln!(
writer,
"**Node/V8 internals:** {internals_str} | **Sourcemaps:** {sourcemaps_str}"
)?;
}
if let Some(scope_line) = Self::scope_line(&analysis.metadata) {
writeln!(writer, "{scope_line}")?;
}
let quality_notes = Self::profile_quality_notes(&analysis.metadata);
if !quality_notes.is_empty() {
for note in quality_notes {
writeln!(writer, "> ℹ️ {note}")?;
}
}
if analysis.metadata.profiles_merged > 1 {
writeln!(writer)?;
writeln!(
writer,
"> **Note:** {} profiles were merged. This happens when Node.js spawns multiple processes",
analysis.metadata.profiles_merged
)?;
writeln!(
writer,
"> (e.g., `npx` launching your script, worker threads, or child processes)."
)?;
writeln!(
writer,
"> Each process generates its own `.cpuprofile` file."
)?;
}
writeln!(writer)?;
writeln!(writer, "---")?;
writeln!(writer)?;
Self::write_tldr(writer, profile, analysis)?;
if Self::has_filters(&analysis.metadata) {
writeln!(writer, "## Executive Summary (Full Profile)")?;
writeln!(writer)?;
writeln!(
writer,
"> **Scope note:** Function tables, hot paths, and recommendations honor filters. Category totals below reflect the full profile."
)?;
writeln!(writer)?;
} else {
writeln!(writer, "## Executive Summary")?;
writeln!(writer)?;
}
writeln!(writer, "| Category | Self | % | Stack | % | Assessment |")?;
writeln!(
writer,
"|----------|-----------|---|-----------|---|------------|"
)?;
let breakdown = &analysis.category_breakdown;
let inclusive = &analysis.category_breakdown_inclusive;
let total = breakdown.total();
Self::write_summary_row_with_inclusive(
writer,
"App code",
breakdown.app,
inclusive.app,
total,
)?;
Self::write_summary_row_with_inclusive(
writer,
"Dependencies",
breakdown.deps,
inclusive.deps,
total,
)?;
Self::write_summary_row_with_inclusive(
writer,
"Node.js internals",
breakdown.node_internal,
inclusive.node_internal,
total,
)?;
Self::write_summary_row_with_inclusive(
writer,
"V8/Native",
breakdown.v8_internal + breakdown.native,
inclusive.v8_internal + inclusive.native,
total,
)?;
writeln!(writer)?;
writeln!(
writer,
"> **Self:** CPU time spent directly executing this category's code (exclusive, sums to 100%)."
)?;
writeln!(
writer,
"> **Stack:** CPU time when this category appears anywhere in the call stack (inclusive)."
)?;
writeln!(
writer,
"> Stack percentages can exceed 100% because categories overlap (e.g., App calls Deps)."
)?;
writeln!(writer)?;
writeln!(writer, "**Key takeaways:**")?;
Self::write_key_takeaways(writer, analysis)?;
writeln!(writer)?;
writeln!(writer, "---")?;
writeln!(writer)?;
if let Some(ref phases) = analysis.phase_analysis {
Self::write_phase_analysis(writer, phases)?;
}
writeln!(writer, "## Top Hotspots by Self Time")?;
writeln!(writer)?;
writeln!(
writer,
"> Self time = CPU time spent directly in this function."
)?;
writeln!(writer)?;
writeln!(
writer,
"| # | Self | % | Samples | Total | Function | Location | Category |"
)?;
writeln!(
writer,
"|---|------|---|---------|-------|----------|----------|----------|"
)?;
for (i, func) in analysis.functions.iter().take(25).enumerate() {
let self_time = format_time_us(func.self_time);
let self_pct = func.self_percent(analysis.total_time);
let total_time = format_time_us(func.total_time);
let category_badge = Self::category_badge(func.category);
writeln!(
writer,
"| {} | {} | {} | {} | {} | `{}` | `{}` | {} |",
i + 1,
self_time,
Self::format_percent(self_pct),
func.self_samples,
total_time,
Self::escape_markdown(&func.name),
Self::format_location(&func.location),
category_badge
)?;
}
writeln!(writer)?;
writeln!(writer, "## Top Hotspots by Total Time")?;
writeln!(writer)?;
writeln!(
writer,
"> Total time = CPU time when this function is on the stack (including callees)."
)?;
writeln!(writer)?;
writeln!(
writer,
"| # | Total | % | Self | Function | Location | Category |"
)?;
writeln!(
writer,
"|---|-------|---|------|----------|----------|----------|"
)?;
for (i, func) in analysis.functions_by_total.iter().take(15).enumerate() {
let self_time = format_time_us(func.self_time);
let total_time = format_time_us(func.total_time);
let total_pct = func.total_percent(analysis.total_time);
let category_badge = Self::category_badge(func.category);
writeln!(
writer,
"| {} | {} | {} | {} | `{}` | `{}` | {} |",
i + 1,
total_time,
Self::format_percent(total_pct),
self_time,
Self::escape_markdown(&func.name),
Self::format_location(&func.location),
category_badge
)?;
}
writeln!(writer)?;
if !analysis.hot_paths.is_empty() {
writeln!(writer, "## Hot Paths")?;
writeln!(writer)?;
writeln!(writer, "> Sorted by CPU time descending.")?;
writeln!(writer)?;
let mut significant_paths: Vec<&HotPath> = analysis
.hot_paths
.iter()
.filter(|path| {
path.percent >= 0.2 || path.time >= 10_000 || path.sample_count >= 10
})
.collect();
if significant_paths.is_empty() {
significant_paths = analysis.hot_paths.iter().take(1).collect();
}
for (i, path) in significant_paths.iter().take(5).enumerate() {
let cpu_time_str = format_time_us(path.time);
let path_pct = Self::format_percent(path.percent);
let low_signal = path.percent < 0.1 || path.sample_count < 10;
let signal_note = if low_signal { " — low signal" } else { "" };
writeln!(
writer,
"### Path #{} — {} ({}, {} samples){}",
i + 1,
path_pct,
cpu_time_str,
path.sample_count,
signal_note
)?;
writeln!(writer)?;
writeln!(writer, "```")?;
Self::write_hot_path_visualization(writer, profile, path)?;
writeln!(writer, "```")?;
writeln!(writer)?;
Self::write_path_explanation(writer, profile, path, analysis)?;
writeln!(writer)?;
}
}
if !analysis.hot_function_details.is_empty() {
writeln!(writer, "## Caller & Callee Attribution")?;
writeln!(writer)?;
for detail in &analysis.hot_function_details {
Self::write_hot_function_detail(writer, detail, analysis)?;
}
}
if !analysis.recursive_functions.is_empty() {
Self::write_recursive_functions(writer, analysis)?;
}
if !analysis.file_stats.is_empty() {
if analysis.metadata.focus_package.is_some() {
writeln!(writer, "## By Source File")?;
writeln!(writer)?;
writeln!(
writer,
"> Omitted under package filter — file stats are computed from the full profile."
)?;
writeln!(writer)?;
} else {
let file_stats: Vec<_> = analysis
.file_stats
.iter()
.filter(|fs| Self::category_allowed(&analysis.metadata, fs.category))
.collect();
if !file_stats.is_empty() {
writeln!(writer, "## By Source File")?;
writeln!(writer)?;
writeln!(writer, "| File | Self | Total | Samples | Category |")?;
writeln!(writer, "|------|------|-------|---------|----------|")?;
for fs in file_stats.iter().take(15) {
let self_time = format_time_us(fs.self_time);
let total_time = format_time_us(fs.total_time);
let category_badge = Self::category_badge(fs.category);
writeln!(
writer,
"| `{}` | {} | {} | {} | {} |",
Self::format_location(&fs.file),
self_time,
total_time,
fs.call_count,
category_badge
)?;
}
writeln!(writer)?;
}
}
}
let show_package_stats = analysis.metadata.focus_package.is_none()
&& (analysis.metadata.filter_categories.is_empty()
|| analysis
.metadata
.filter_categories
.contains(&FrameCategory::Deps));
if !analysis.package_stats.is_empty() && show_package_stats {
writeln!(writer, "## By Dependency Package")?;
writeln!(writer)?;
writeln!(writer, "| Package | Time | % of Deps | Top Function |")?;
writeln!(writer, "|---------|------|-----------|--------------|")?;
for pkg in &analysis.package_stats {
let time_str = format_time_us(pkg.time);
writeln!(
writer,
"| `{}` | {} | {:.1}% | `{}` |",
pkg.package,
time_str,
pkg.percent_of_deps,
Self::escape_markdown(&pkg.top_function)
)?;
}
writeln!(writer)?;
}
if analysis.native_time > 0 {
writeln!(writer, "## Native/Runtime Frames (Leaf)")?;
writeln!(writer)?;
writeln!(
writer,
"> Time in frames explicitly marked `Native` (leaf frames only)."
)?;
writeln!(
writer,
"> V8/Node JS internals still appear in the category breakdown above."
)?;
writeln!(writer)?;
let native_time = format_time_us(analysis.native_time);
let native_pct = if analysis.total_time > 0 {
(analysis.native_time as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
};
writeln!(writer, "**Total:** {} ({:.1}%)", native_time, native_pct)?;
writeln!(writer)?;
let native_entries: Vec<_> = analysis
.functions
.iter()
.filter(|f| {
profile
.get_frame(f.frame_id)
.is_some_and(|frame| frame.kind == FrameKind::Native)
})
.take(5)
.collect();
if !native_entries.is_empty() {
writeln!(writer, "**Visible native frames:**")?;
for func in native_entries {
let time_str = format_time_us(func.self_time);
writeln!(writer, "- `{}` — {}", func.name, time_str)?;
}
writeln!(writer)?;
}
writeln!(writer, "**What this means:**")?;
writeln!(
writer,
"- This time is spent in compiled code (V8/Node native internals, syscalls, or addons)"
)?;
writeln!(
writer,
"- To attribute to specific libraries, capture a native profile (Instruments/perf)"
)?;
writeln!(
writer,
"- Focus optimization on reducing how often your JS code triggers native operations"
)?;
writeln!(writer)?;
}
if let Some(ref gc) = analysis.gc_analysis {
Self::write_gc_analysis(
writer,
gc,
analysis.total_time,
analysis.phase_analysis.as_ref(),
)?;
} else if analysis.gc_time > 0 {
writeln!(writer, "## GC & Allocation Signals")?;
writeln!(writer)?;
let gc_time = format_time_us(analysis.gc_time);
let gc_pct = (analysis.gc_time as f64 / analysis.total_time as f64) * 100.0;
writeln!(writer, "**GC time:** {} ({:.1}%)", gc_time, gc_pct)?;
writeln!(writer)?;
}
Self::write_recommendations(writer, profile, analysis)?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_heap_analysis(
&self,
profile: &ProfileIR,
analysis: &HeapAnalysis,
writer: &mut dyn Write,
) -> Result<(), OutputError> {
writeln!(writer, "# Profile Inspect Report — Heap")?;
writeln!(writer)?;
if let Some(ref source) = profile.source_file {
writeln!(writer, "**Profile:** `{source}`")?;
}
writeln!(
writer,
"**Total allocated:** {} | **Allocations:** {}",
AllocationStats::format_size(analysis.total_size),
analysis.total_allocations
)?;
writeln!(writer)?;
writeln!(writer, "---")?;
writeln!(writer)?;
writeln!(writer, "## Allocation by Category")?;
writeln!(writer)?;
writeln!(writer, "| Category | Size | % |")?;
writeln!(writer, "|----------|------|---|")?;
let breakdown = &analysis.category_breakdown;
let total = breakdown.total();
Self::write_heap_category_row(writer, "App code", breakdown.app, total)?;
Self::write_heap_category_row(writer, "Dependencies", breakdown.deps, total)?;
Self::write_heap_category_row(writer, "Node.js internals", breakdown.node_internal, total)?;
Self::write_heap_category_row(
writer,
"V8/Native",
breakdown.v8_internal + breakdown.native,
total,
)?;
writeln!(writer)?;
writeln!(writer, "## Top Allocations by Size")?;
writeln!(writer)?;
writeln!(
writer,
"| # | Self | % | Allocs | Total | Function | Location | Category |"
)?;
writeln!(
writer,
"|---|------|---|--------|-----------|----------|----------|----------|"
)?;
for (i, func) in analysis.functions.iter().enumerate() {
let self_pct = func.self_percent(analysis.total_size);
let self_str = AllocationStats::format_size(func.self_size);
let total_str = AllocationStats::format_size(func.total_size);
let category = Self::category_badge(func.category);
writeln!(
writer,
"| {} | {} | {:.1}% | {} | {} | `{}` | `{}` | {} |",
i + 1,
self_str,
self_pct,
func.allocation_count,
total_str,
Self::escape_markdown(&func.name),
Self::escape_markdown(&func.location),
category
)?;
}
writeln!(writer)?;
writeln!(writer, "## Recommendations")?;
writeln!(writer)?;
let large_allocators: Vec<_> = analysis
.functions
.iter()
.filter(|f| f.self_percent(analysis.total_size) >= 10.0)
.collect();
if !large_allocators.is_empty() {
writeln!(writer, "**Large allocators (>=10% of total):**")?;
writeln!(writer)?;
for func in &large_allocators {
let pct = func.self_percent(analysis.total_size);
writeln!(
writer,
"- `{}` — {:.1}% ({})",
func.name,
pct,
AllocationStats::format_size(func.self_size)
)?;
}
} else {
writeln!(writer, "No single function dominates allocations.")?;
writeln!(writer)?;
writeln!(writer, "Memory is well-distributed across the codebase.")?;
}
Ok(())
}
}
impl MarkdownFormatter {
#[expect(clippy::cast_precision_loss)]
fn write_tldr(
writer: &mut dyn Write,
_profile: &ProfileIR,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
writeln!(writer, "## Quick Assessment")?;
writeln!(writer)?;
let filters = &analysis.metadata.filter_categories;
let has_filter = !filters.is_empty();
if has_filter {
let filter_names = Self::format_category_list(filters);
writeln!(
writer,
"> **Filter active:** Function lists, hot paths, and recommendations show only {} categories",
filter_names
)?;
writeln!(writer)?;
}
let breakdown = &analysis.category_breakdown;
let total = breakdown.total();
let app_pct = if total > 0 {
(breakdown.app as f64 / total as f64) * 100.0
} else {
0.0
};
let deps_pct = if total > 0 {
(breakdown.deps as f64 / total as f64) * 100.0
} else {
0.0
};
let internal_pct = if total > 0 {
((breakdown.v8_internal + breakdown.native + breakdown.node_internal) as f64
/ total as f64)
* 100.0
} else {
0.0
};
let visible_total = Self::visible_total_time(analysis);
let show_filtered_pct = has_filter
|| analysis.metadata.internals_filtered
|| analysis.metadata.focus_package.is_some();
let verdict = Self::generate_verdict(app_pct, deps_pct, internal_pct, analysis);
writeln!(writer, "**{}**", verdict)?;
writeln!(writer)?;
writeln!(writer, "| Category | Time | Status |")?;
writeln!(writer, "|----------|------|--------|")?;
let show_app = !has_filter || filters.contains(&FrameCategory::App);
let show_deps = !has_filter || filters.contains(&FrameCategory::Deps);
let show_internal = !has_filter
|| filters.contains(&FrameCategory::NodeInternal)
|| filters.contains(&FrameCategory::V8Internal)
|| filters.contains(&FrameCategory::Native);
let format_pct_label = |value: u64| -> String {
let pct_total = if total > 0 {
(value as f64 / total as f64) * 100.0
} else {
0.0
};
if show_filtered_pct {
if let Some(filtered_total) = visible_total {
if filtered_total > 0 && filtered_total != total {
let pct_filtered = (value as f64 / filtered_total as f64) * 100.0;
return format!("{:.0}% total; {:.0}% filtered", pct_total, pct_filtered);
}
}
}
format!("{:.0}%", pct_total)
};
if show_app {
let app_status = if app_pct > 50.0 {
"⚠️ Focus here"
} else if app_pct > 20.0 {
"👀 Worth checking"
} else {
"✅ Healthy"
};
writeln!(
writer,
"| App code | {} ({}) | {} |",
format_time_us(breakdown.app),
format_pct_label(breakdown.app),
app_status
)?;
}
if show_deps {
let deps_status = if deps_pct > 40.0 {
"⚠️ Heavy deps"
} else if deps_pct > 20.0 {
"👀 Review usage"
} else {
"✅ Normal"
};
writeln!(
writer,
"| Dependencies | {} ({}) | {} |",
format_time_us(breakdown.deps),
format_pct_label(breakdown.deps),
deps_status
)?;
}
if show_internal {
let internal_status = if internal_pct > 70.0 {
"ℹ️ Startup overhead"
} else {
"✅ Normal"
};
writeln!(
writer,
"| V8/Node internals | {} ({}) | {} |",
format_time_us(breakdown.v8_internal + breakdown.native + breakdown.node_internal),
format_pct_label(
breakdown.v8_internal + breakdown.native + breakdown.node_internal
),
internal_status
)?;
}
writeln!(writer)?;
if let Some(top) = analysis.functions.first() {
let top_pct = top.self_percent(analysis.total_time);
if top_pct >= 2.0 {
let potential_savings = top.self_time / 2; let potential_pct = if analysis.total_time > 0 {
(potential_savings as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
};
writeln!(writer, "**Top hotspot:** `{}` at {:.1}%", top.name, top_pct)?;
if top_pct >= 5.0 {
writeln!(
writer,
"**If optimized 50%:** Save {} ({:.1}% faster)",
format_time_us(potential_savings),
potential_pct
)?;
}
match top.category {
FrameCategory::Deps => {
let pkg_hint = Self::extract_package_name(&top.location);
writeln!(
writer,
"**Note:** This is dependency code{}. Check if it's necessary or can be optimized.",
pkg_hint.map_or(String::new(), |p| format!(" ({})", p))
)?;
}
FrameCategory::App => {
writeln!(
writer,
"**Location:** `{}`",
Self::format_location(&top.location)
)?;
}
FrameCategory::NodeInternal
| FrameCategory::V8Internal
| FrameCategory::Native => {
writeln!(
writer,
"**Note:** This is runtime/engine code. Focus on what triggers it from your code."
)?;
}
}
} else {
writeln!(
writer,
"**Top function:** `{}` at {:.1}% (low impact — no dominant CPU hotspot)",
top.name, top_pct
)?;
}
}
if !analysis.functions.is_empty() && analysis.total_time > 0 {
let top_n = 5usize.min(analysis.functions.len());
let top_sum: u64 = analysis
.functions
.iter()
.take(top_n)
.map(|f| f.self_time)
.sum();
let total_pct = (top_sum as f64 / analysis.total_time as f64) * 100.0;
let (scope_pct, scope_label) = if let Some(filtered_total) = visible_total {
if filtered_total > 0 && filtered_total != total {
let filtered_pct = (top_sum as f64 / filtered_total as f64) * 100.0;
(filtered_pct, "filtered")
} else {
(total_pct, "total")
}
} else {
(total_pct, "total")
};
let concentration = if scope_pct < 10.0 {
"very flat"
} else if scope_pct < 25.0 {
"moderately flat"
} else {
"concentrated"
};
if scope_label == "filtered" {
writeln!(
writer,
"**Hotspot concentration:** Top {top_n} functions = {:.1}% of filtered ({total_pct:.1}% of total) — {concentration}",
scope_pct
)?;
} else {
writeln!(
writer,
"**Hotspot concentration:** Top {top_n} functions = {:.1}% of total — {concentration}",
scope_pct
)?;
}
}
writeln!(writer)?;
writeln!(writer, "---")?;
writeln!(writer)?;
Ok(())
}
fn generate_verdict(
app_pct: f64,
deps_pct: f64,
internal_pct: f64,
analysis: &CpuAnalysis,
) -> String {
if let Some(gc) = &analysis.gc_analysis {
let gc_pct = if analysis.total_time > 0 {
(gc.total_time as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
};
if gc_pct > 10.0 {
return format!(
"🔴 High GC pressure ({:.0}%) — reduce allocations to improve performance",
gc_pct
);
}
}
if app_pct > 50.0 {
if let Some(top) = analysis.functions.first() {
if top.self_percent(analysis.total_time) > 20.0 {
return format!(
"🔴 Single function dominates — `{}` uses {:.0}% of CPU",
top.name,
top.self_percent(analysis.total_time)
);
}
}
return "🟡 App code dominates — optimization opportunities exist".to_string();
}
if deps_pct > 40.0 {
return "🟡 Heavy dependency usage — review if all are necessary".to_string();
}
if internal_pct > 70.0 {
return "ℹ️ Profile is startup-heavy (V8/Node internals dominate). Profile under sustained load for better signal.".to_string();
}
if let Some(top) = analysis.functions.first() {
let top_pct = top.self_percent(analysis.total_time);
if top_pct < 5.0 {
return "✅ No clear bottleneck — CPU time is well-distributed".to_string();
}
}
"✅ Profile looks healthy — no critical issues detected".to_string()
}
fn has_filters(metadata: &crate::analysis::ProfileMetadata) -> bool {
metadata.internals_filtered
|| metadata.focus_package.is_some()
|| !metadata.filter_categories.is_empty()
}
fn scope_line(metadata: &crate::analysis::ProfileMetadata) -> Option<String> {
let mut parts = Vec::new();
if !metadata.filter_categories.is_empty() {
parts.push(format!(
"Categories: {}",
Self::format_category_list(&metadata.filter_categories)
));
}
if metadata.internals_filtered {
parts.push("Internals hidden".to_string());
}
if let Some(pkg) = &metadata.focus_package {
parts.push(format!("Package: `{pkg}`"));
}
if parts.is_empty() {
None
} else {
Some(format!("**Scope:** Filtered view ({})", parts.join("; ")))
}
}
fn profile_quality_notes(metadata: &crate::analysis::ProfileMetadata) -> Vec<String> {
let mut notes = Vec::new();
if metadata.duration_ms < 1_000.0 || metadata.sample_count < 1_000 {
notes.push("Short profile (<1s or <1000 samples). Results may be noisy.".to_string());
}
if metadata.sample_interval_ms > 5.0 {
notes.push(
"Coarse sampling interval (>5ms). Fine-grained hotspots may be missed.".to_string(),
);
}
notes
}
fn format_category_list(categories: &[FrameCategory]) -> String {
categories
.iter()
.map(|c| format!("`{}`", Self::category_label(*c)))
.collect::<Vec<_>>()
.join(", ")
}
fn category_label(category: FrameCategory) -> &'static str {
match category {
FrameCategory::App => "App",
FrameCategory::Deps => "Dependencies",
FrameCategory::NodeInternal => "Node internals",
FrameCategory::V8Internal => "V8 internals",
FrameCategory::Native => "Native",
}
}
fn visible_total_time(analysis: &CpuAnalysis) -> Option<u64> {
if analysis.metadata.focus_package.is_some() {
return None;
}
let mut categories = if analysis.metadata.filter_categories.is_empty() {
vec![
FrameCategory::App,
FrameCategory::Deps,
FrameCategory::NodeInternal,
FrameCategory::V8Internal,
FrameCategory::Native,
]
} else {
analysis.metadata.filter_categories.clone()
};
if analysis.metadata.internals_filtered {
categories.retain(|c| !c.is_internal());
}
let breakdown = &analysis.category_breakdown;
let mut total = 0;
for category in categories {
total += match category {
FrameCategory::App => breakdown.app,
FrameCategory::Deps => breakdown.deps,
FrameCategory::NodeInternal => breakdown.node_internal,
FrameCategory::V8Internal => breakdown.v8_internal,
FrameCategory::Native => breakdown.native,
};
}
Some(total)
}
fn category_allowed(
metadata: &crate::analysis::ProfileMetadata,
category: FrameCategory,
) -> bool {
if metadata.internals_filtered && category.is_internal() {
return false;
}
if !metadata.filter_categories.is_empty() && !metadata.filter_categories.contains(&category)
{
return false;
}
true
}
fn extract_package_name(location: &str) -> Option<String> {
let path = location.strip_prefix("file://").unwrap_or(location);
if let Some(nm_idx) = path.rfind("node_modules/") {
let after_nm = &path[nm_idx + 13..];
if after_nm.starts_with('@') {
let parts: Vec<&str> = after_nm.splitn(3, '/').collect();
if parts.len() >= 2 {
return Some(format!("{}/{}", parts[0], parts[1]));
}
} else {
let parts: Vec<&str> = after_nm.splitn(2, '/').collect();
if !parts.is_empty() {
return Some(parts[0].to_string());
}
}
}
None
}
#[expect(clippy::cast_precision_loss)]
fn write_summary_row_with_inclusive(
writer: &mut dyn Write,
name: &str,
self_time: u64,
inclusive_time: u64,
total: u64,
) -> Result<(), OutputError> {
let self_str = format_time_us(self_time);
let inclusive_str = format_time_us(inclusive_time);
let self_pct = if total > 0 {
(self_time as f64 / total as f64) * 100.0
} else {
0.0
};
let inclusive_pct = if total > 0 {
(inclusive_time as f64 / total as f64) * 100.0
} else {
0.0
};
let assessment = if self_pct < 20.0 {
"normal"
} else if self_pct < 50.0 {
"notable"
} else {
"dominant"
};
writeln!(
writer,
"| {name} | {self_str} | {self_pct:.1}% | {inclusive_str} | {inclusive_pct:.1}% | {assessment} |"
)?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_heap_category_row(
writer: &mut dyn Write,
name: &str,
size: u64,
total: u64,
) -> Result<(), OutputError> {
let size_str = AllocationStats::format_size(size);
let pct = if total > 0 {
(size as f64 / total as f64) * 100.0
} else {
0.0
};
writeln!(writer, "| {name} | {size_str} | {pct:.1}% |")?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_gc_analysis(
writer: &mut dyn Write,
gc: &crate::analysis::GcAnalysis,
total_time: u64,
phase_analysis: Option<&crate::analysis::PhaseAnalysis>,
) -> Result<(), OutputError> {
writeln!(writer, "## GC & Allocation Signals")?;
writeln!(writer)?;
let gc_time_str = format_time_us(gc.total_time);
let gc_pct = if total_time > 0 {
(gc.total_time as f64 / total_time as f64) * 100.0
} else {
0.0
};
let avg_pause_str = format_time_us(gc.avg_pause_us);
let (severity, assessment) = if gc_pct > 10.0 {
("🔴", "High GC pressure — likely allocation hotspot")
} else if gc_pct > 5.0 {
("🟡", "Moderate GC — worth investigating")
} else if gc_pct > 2.0 {
("🟢", "Normal GC overhead")
} else {
("⚪", "Minimal GC activity")
};
writeln!(
writer,
"**{} GC overhead:** {} ({:.1}%) across {} samples — {}",
severity, gc_time_str, gc_pct, gc.sample_count, assessment
)?;
writeln!(writer)?;
let target_gc_pct = 2.0; let potential_savings_us = if gc_pct > target_gc_pct {
let excess_pct = gc_pct - target_gc_pct;
(excess_pct / 100.0 * total_time as f64) as u64
} else {
0
};
let potential_speedup_pct = if total_time > 0 {
(potential_savings_us as f64 / total_time as f64) * 100.0
} else {
0.0
};
writeln!(writer, "| Metric | Value |")?;
writeln!(writer, "|--------|-------|")?;
writeln!(
writer,
"| Total GC time | {} ({:.1}%) |",
gc_time_str, gc_pct
)?;
writeln!(writer, "| GC samples | {} |", gc.sample_count)?;
writeln!(writer, "| Avg pause | {} |", avg_pause_str)?;
if phase_analysis.is_some() && gc.startup_gc_time > 0 {
let startup_pct = (gc.startup_gc_time as f64 / gc.total_time as f64) * 100.0;
let steady_pct = (gc.steady_gc_time as f64 / gc.total_time as f64) * 100.0;
writeln!(
writer,
"| Startup GC | {} ({:.0}%) |",
format_time_us(gc.startup_gc_time),
startup_pct
)?;
writeln!(
writer,
"| Steady-state GC | {} ({:.0}%) |",
format_time_us(gc.steady_gc_time),
steady_pct
)?;
}
writeln!(writer)?;
if gc_pct > 3.0 && potential_savings_us > 0 && potential_speedup_pct > 0.5 {
writeln!(writer, "### 📈 Improvement Potential")?;
writeln!(writer)?;
writeln!(
writer,
"Reducing GC from {:.0}% to ~{:.0}% could save approximately **{}** (~{:.0}% faster)",
gc_pct,
target_gc_pct,
format_time_us(potential_savings_us),
potential_speedup_pct
)?;
writeln!(writer)?;
if !gc.allocation_hotspots.is_empty() {
writeln!(writer, "| Optimize | Est. Savings | Impact |")?;
writeln!(writer, "|----------|--------------|--------|")?;
for hotspot in gc.allocation_hotspots.iter().take(5) {
let hotspot_savings_us =
(hotspot.gc_correlation / 100.0 * potential_savings_us as f64) as u64;
let hotspot_impact_pct = if total_time > 0 {
(hotspot_savings_us as f64 / total_time as f64) * 100.0
} else {
0.0
};
if hotspot_savings_us > 0 {
writeln!(
writer,
"| `{}` | {} | {:.1}% faster |",
Self::escape_markdown(&hotspot.name),
format_time_us(hotspot_savings_us),
hotspot_impact_pct
)?;
}
}
writeln!(writer)?;
writeln!(
writer,
"> **Note:** Estimates assume optimizing each function eliminates its GC contribution."
)?;
writeln!(
writer,
"> Actual savings depend on allocation patterns and may overlap between functions."
)?;
writeln!(writer)?;
}
}
if !gc.allocation_hotspots.is_empty() {
writeln!(writer, "### Allocation Hotspots")?;
writeln!(writer)?;
writeln!(
writer,
"> Functions frequently on the call stack during GC — likely allocating heavily."
)?;
writeln!(writer)?;
writeln!(
writer,
"| Function | GC Correlation | GC Samples | Category |"
)?;
writeln!(
writer,
"|----------|----------------|------------|----------|"
)?;
for hotspot in &gc.allocation_hotspots {
writeln!(
writer,
"| `{}` | {:.0}% | {} | {} |",
Self::escape_markdown(&hotspot.name),
hotspot.gc_correlation,
hotspot.gc_samples,
Self::category_badge(hotspot.category)
)?;
}
writeln!(writer)?;
}
if gc_pct > 5.0 {
writeln!(writer, "### Optimization Strategies")?;
writeln!(writer)?;
if !gc.allocation_hotspots.is_empty() {
let top = &gc.allocation_hotspots[0];
let top_savings = (top.gc_correlation / 100.0 * potential_savings_us as f64) as u64;
writeln!(
writer,
"**Priority target: `{}`** ({:.0}% of GC events, ~{} potential savings)",
top.name,
top.gc_correlation,
format_time_us(top_savings)
)?;
writeln!(writer)?;
}
writeln!(writer, "**Common fixes:**")?;
writeln!(
writer,
"- **Object reuse**: Pool frequently created objects instead of allocating new ones"
)?;
writeln!(
writer,
"- **Avoid closures in loops**: Each closure allocates; move them outside hot paths"
)?;
writeln!(
writer,
"- **Use typed arrays**: `Float64Array` instead of `[]` for numeric data"
)?;
writeln!(
writer,
"- **Batch operations**: Reduce intermediate array/object creation"
)?;
writeln!(
writer,
"- **String concatenation**: Use array join or template literals instead of `+` in loops"
)?;
if gc_pct > 10.0 {
writeln!(writer)?;
writeln!(writer, "**For severe GC pressure (>10%):**")?;
writeln!(
writer,
"- Increase heap with `node --max-old-space-size=4096` (if memory allows)"
)?;
writeln!(
writer,
"- Profile heap with `profile-inspect heap` to find large allocators"
)?;
}
writeln!(writer)?;
}
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_phase_analysis(
writer: &mut dyn Write,
phases: &crate::analysis::PhaseAnalysis,
) -> Result<(), OutputError> {
writeln!(writer, "## Timing Phase Analysis")?;
writeln!(writer)?;
writeln!(
writer,
"> Separates startup overhead from steady-state performance."
)?;
writeln!(writer)?;
let startup = &phases.startup;
let startup_duration = format_time_us(startup.end_us - startup.start_us);
let startup_pct = if phases.total_duration_us > 0 {
((startup.end_us - startup.start_us) as f64 / phases.total_duration_us as f64) * 100.0
} else {
0.0
};
writeln!(
writer,
"### Startup Phase ({}, {:.1}% of profile)",
startup_duration, startup_pct
)?;
writeln!(writer)?;
if !startup.top_functions.is_empty() {
writeln!(writer, "| Function | Self Time | % | Category |")?;
writeln!(writer, "|----------|-----------|---|----------|")?;
for func in &startup.top_functions {
writeln!(
writer,
"| `{}` | {} | {:.1}% | {} |",
Self::escape_markdown(&func.name),
format_time_us(func.self_time),
func.percent,
Self::category_badge(func.category)
)?;
}
writeln!(writer)?;
}
let total_startup = startup.category_breakdown.total();
if total_startup > 0 {
let v8_native =
startup.category_breakdown.v8_internal + startup.category_breakdown.native;
let v8_pct = (v8_native as f64 / total_startup as f64) * 100.0;
if v8_pct > 50.0 {
writeln!(
writer,
"**Startup insight:** {:.0}% V8/Native — typical for module loading/compilation",
v8_pct
)?;
writeln!(writer)?;
}
}
let steady = &phases.steady_state;
let steady_duration = format_time_us(steady.end_us - steady.start_us);
writeln!(writer, "### Steady State ({})", steady_duration)?;
writeln!(writer)?;
if !steady.top_functions.is_empty() {
writeln!(writer, "| Function | Self Time | % | Category |")?;
writeln!(writer, "|----------|-----------|---|----------|")?;
for func in &steady.top_functions {
writeln!(
writer,
"| `{}` | {} | {:.1}% | {} |",
Self::escape_markdown(&func.name),
format_time_us(func.self_time),
func.percent,
Self::category_badge(func.category)
)?;
}
writeln!(writer)?;
}
let total_steady = steady.category_breakdown.total();
if total_startup > 0 && total_steady > 0 {
let startup_app_pct =
(startup.category_breakdown.app as f64 / total_startup as f64) * 100.0;
let steady_app_pct =
(steady.category_breakdown.app as f64 / total_steady as f64) * 100.0;
if steady_app_pct > startup_app_pct * 2.0 {
writeln!(
writer,
"**Steady state insight:** App code increases from {:.0}% to {:.0}% — good, your code dominates runtime",
startup_app_pct, steady_app_pct
)?;
writeln!(writer)?;
}
}
writeln!(writer, "---")?;
writeln!(writer)?;
Ok(())
}
fn write_recursive_functions(
writer: &mut dyn Write,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
writeln!(writer, "## Recursive Functions")?;
writeln!(writer)?;
writeln!(
writer,
"> Functions that call themselves. Deep recursion can cause stack overflow and performance issues."
)?;
writeln!(writer)?;
writeln!(
writer,
"| Function | Max Depth | Stacks with Recursion | Location |"
)?;
writeln!(
writer,
"|----------|-----------|----------------------|----------|"
)?;
for func in &analysis.recursive_functions {
let rec_pct = if func.total_samples > 0 {
((func.recursive_samples as f64 / func.total_samples as f64) * 100.0).min(100.0)
} else {
0.0
};
writeln!(
writer,
"| `{}` | {} | {} ({:.0}% of appearances) | `{}` |",
Self::escape_markdown(&func.name),
func.max_depth,
func.recursive_samples,
rec_pct,
Self::escape_markdown(&func.location)
)?;
}
writeln!(writer)?;
writeln!(writer, "**Optimization tips for recursive functions:**")?;
writeln!(
writer,
"- Consider iterative alternatives using explicit stack"
)?;
writeln!(
writer,
"- Add memoization if computing same values repeatedly"
)?;
writeln!(writer, "- Check for accidental infinite recursion patterns")?;
writeln!(writer)?;
writeln!(writer, "---")?;
writeln!(writer)?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_key_takeaways(
writer: &mut dyn Write,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
let breakdown = &analysis.category_breakdown;
let inclusive = &analysis.category_breakdown_inclusive;
let flow = &analysis.category_call_flow;
let total = breakdown.total();
if total == 0 {
return Ok(());
}
let app_pct = (breakdown.app as f64 / total as f64) * 100.0;
let deps_pct = (breakdown.deps as f64 / total as f64) * 100.0;
let native_pct = ((breakdown.v8_internal + breakdown.native) as f64 / total as f64) * 100.0;
let app_triggers: u64 = flow
.callees_for(FrameCategory::App)
.iter()
.map(|(_, t)| *t)
.sum();
let node_triggers: u64 = flow
.callees_for(FrameCategory::NodeInternal)
.iter()
.map(|(_, t)| *t)
.sum();
if app_pct > 50.0 {
writeln!(
writer,
"- App code dominates ({:.0}% self) — focus optimization on your code",
app_pct
)?;
} else if deps_pct > 20.0 {
let inclusive_pct = (inclusive.deps as f64 / total as f64) * 100.0;
writeln!(
writer,
"- Dependencies: {:.0}% self, {:.0}% stack presence — review which packages are expensive",
deps_pct,
inclusive_pct.min(100.0)
)?;
} else if native_pct > 70.0 {
let node_to_native: u64 = flow
.callees_for(FrameCategory::NodeInternal)
.iter()
.filter(|(cat, _)| {
*cat == FrameCategory::Native || *cat == FrameCategory::V8Internal
})
.map(|(_, t)| *t)
.sum();
let app_to_native: u64 = flow
.callees_for(FrameCategory::App)
.iter()
.filter(|(cat, _)| {
*cat == FrameCategory::Native || *cat == FrameCategory::V8Internal
})
.map(|(_, t)| *t)
.sum();
if node_to_native > app_to_native {
writeln!(
writer,
"- V8/Native dominates ({:.0}%) via Node.js internals — likely module loading/compilation",
native_pct
)?;
} else {
writeln!(
writer,
"- V8/Native dominates ({:.0}%) — check for native addon work or heavy compilation",
native_pct
)?;
}
} else if app_triggers > breakdown.app * 5 {
writeln!(
writer,
"- App code ({:.0}% self) triggers {} in other categories — optimize hot call sites",
app_pct,
format_time_us(app_triggers)
)?;
} else if node_triggers > total / 3 {
writeln!(
writer,
"- Node.js internals trigger {} — likely I/O or module loading",
format_time_us(node_triggers)
)?;
}
if let Some(top) = analysis.functions.first() {
let pct = top.self_percent(analysis.total_time);
if pct > 5.0 {
writeln!(
writer,
"- Top hotspot: `{}` at {:.1}% self time",
top.name, pct
)?;
}
}
if let Some(ref gc) = analysis.gc_analysis {
let gc_pct = (gc.total_time as f64 / analysis.total_time as f64) * 100.0;
if gc_pct > 5.0 {
if let Some(top) = gc.allocation_hotspots.first() {
writeln!(
writer,
"- GC overhead at {:.1}% — `{}` may be allocating heavily ({:.0}% correlation)",
gc_pct, top.name, top.gc_correlation
)?;
} else {
writeln!(
writer,
"- GC overhead at {:.1}% — investigate allocation patterns",
gc_pct
)?;
}
}
} else if analysis.gc_time > 0 {
let gc_pct = (analysis.gc_time as f64 / analysis.total_time as f64) * 100.0;
if gc_pct > 5.0 {
writeln!(
writer,
"- GC overhead at {:.1}% — may indicate allocation pressure",
gc_pct
)?;
}
}
Ok(())
}
fn category_badge(category: FrameCategory) -> &'static str {
match category {
FrameCategory::App => "App",
FrameCategory::Deps => "Deps",
FrameCategory::NodeInternal => "Node",
FrameCategory::V8Internal => "V8",
FrameCategory::Native => "Native",
}
}
fn classify_workload(cpu_util_pct: f64, profiles_merged: usize) -> String {
let merged_note = if profiles_merged > 1 {
", aggregated across processes"
} else {
""
};
if cpu_util_pct >= 80.0 {
format!(
"CPU-bound (~{:.0}% utilization{})",
cpu_util_pct, merged_note
)
} else if cpu_util_pct <= 50.0 {
format!(
"I/O or wait-bound (~{:.0}% CPU utilization{}). CPU profiling may miss the full picture.",
cpu_util_pct, merged_note
)
} else {
format!(
"Mixed (~{:.0}% CPU utilization{})",
cpu_util_pct, merged_note
)
}
}
#[expect(clippy::cast_precision_loss)]
fn top_category_summary(breakdown: &crate::analysis::CategoryBreakdown, total: u64) -> String {
let v8_native = breakdown.v8_internal + breakdown.native;
let categories = [
("V8/Native", v8_native),
("App", breakdown.app),
("Dependencies", breakdown.deps),
("Node internals", breakdown.node_internal),
];
let (top_name, top_time) = categories
.iter()
.max_by_key(|(_, t)| *t)
.unwrap_or(&("Unknown", 0));
let top_pct = (*top_time as f64 / total as f64) * 100.0;
let insight = match *top_name {
"V8/Native" if top_pct > 70.0 => {
" — engine/runtime frames dominate (often startup/GC/JIT)"
}
"V8/Native" => " — engine/runtime frames (not necessarily native code)",
"Dependencies" if top_pct > 40.0 => " — heavy library usage",
"Dependencies" => "",
"App" if top_pct > 50.0 => " — your code dominates, good optimization target",
"App" => " — your code",
"Node internals" => " — module loading/runtime setup",
_ => "",
};
format!(
"{} at {:.0}% self (exclusive){}",
top_name, top_pct, insight
)
}
fn escape_markdown(s: &str) -> String {
s.replace('|', "\\|").replace('`', "\\`")
}
fn format_percent(pct: f64) -> String {
if pct >= 1.0 {
format!("{:.1}%", pct)
} else if pct >= 0.1 {
format!("{:.2}%", pct)
} else if pct > 0.0 {
"<0.1%".to_string()
} else {
"0%".to_string()
}
}
fn format_location(location: &str) -> String {
let path = location.strip_prefix("file://").unwrap_or(location);
if let Some(nm_idx) = path.rfind("node_modules/") {
let after_nm = &path[nm_idx + 13..];
let (pkg_name, rest) = if after_nm.starts_with('@') {
let parts: Vec<&str> = after_nm.splitn(3, '/').collect();
if parts.len() >= 3 {
(format!("{}/{}", parts[0], parts[1]), parts[2].to_string())
} else {
(after_nm.to_string(), String::new())
}
} else {
let parts: Vec<&str> = after_nm.splitn(2, '/').collect();
if parts.len() >= 2 {
(parts[0].to_string(), parts[1].to_string())
} else {
(after_nm.to_string(), String::new())
}
};
let file_part = Self::extract_file_and_line(&rest);
if file_part.is_empty() {
return pkg_name;
}
return format!("{pkg_name} » {file_part}");
}
if path.starts_with("node:") {
return path.to_string();
}
for marker in &[
"/src/",
"/lib/",
"/dist/",
"/build/",
"/apps/",
"/packages/",
] {
if let Some(idx) = path.find(marker) {
return Self::extract_file_and_line(&path[idx + 1..]);
}
}
Self::extract_file_and_line(path)
}
fn extract_file_and_line(path: &str) -> String {
let (path_part, line_col) = Self::split_line_col(path);
let filename = path_part.rsplit('/').next().unwrap_or(path_part);
let display_path = if path_part.contains('/') {
let parts: Vec<&str> = path_part.rsplitn(3, '/').collect();
if parts.len() >= 2 && parts[1].len() < 20 {
format!("{}/{}", parts[1], parts[0])
} else {
filename.to_string()
}
} else {
filename.to_string()
};
if let Some(line) = line_col {
format!("{display_path}:{line}")
} else {
display_path
}
}
fn split_line_col(path: &str) -> (&str, Option<u32>) {
let mut parts = path.rsplitn(3, ':');
let last = parts.next();
let second = parts.next();
let rest = parts.next();
match (rest, second, last) {
(Some(path), Some(line), Some(_col)) => {
(path, line.parse().ok())
}
(None, Some(path), Some(line_or_col)) => {
if line_or_col.chars().all(|c| c.is_ascii_digit()) {
(path, line_or_col.parse().ok())
} else {
(path.rsplit_once(':').map_or(path, |(p, _)| p), None)
}
}
_ => (path, None),
}
}
fn write_hot_path_visualization(
writer: &mut dyn Write,
profile: &ProfileIR,
path: &HotPath,
) -> Result<(), OutputError> {
let frames: Vec<_> = path
.frames
.iter()
.filter_map(|&fid| profile.get_frame(fid))
.collect();
let start_idx = frames
.iter()
.position(|f| !f.category.is_internal())
.unwrap_or(0);
let display_frames: Vec<_> = frames.iter().skip(start_idx).take(8).collect();
for (i, frame) in display_frames.iter().enumerate() {
let indent = " ".repeat(i);
let arrow = if i > 0 { "└─ " } else { "" };
let hotspot = if i == display_frames.len() - 1 {
" ← HOTSPOT"
} else {
""
};
let location = Self::format_location(&frame.location());
writeln!(
writer,
"{indent}{arrow}{} ({location}){hotspot}",
frame.display_name()
)?;
}
if frames.len() > display_frames.len() + start_idx {
writeln!(
writer,
" ... ({} frames omitted)",
frames.len() - display_frames.len() - start_idx
)?;
}
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_path_explanation(
writer: &mut dyn Write,
profile: &ProfileIR,
path: &HotPath,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
let mut reasons = Vec::new();
if let Some(&leaf_id) = path.frames.last() {
if let Some(func) = analysis.functions.iter().find(|f| f.frame_id == leaf_id) {
let self_pct = func.self_percent(analysis.total_time);
if self_pct > 1.0 {
reasons.push(format!(
"Leaf function `{}` has {:.1}% self time",
func.name, self_pct
));
}
}
}
let total_samples = analysis.total_samples;
if total_samples > 0 {
let path_sample_pct = (path.sample_count as f64 / total_samples as f64) * 100.0;
if path_sample_pct > 1.0 {
reasons.push(format!("Appears in {:.1}% of samples", path_sample_pct));
}
}
let fs_keywords = [
"fs:",
"readFile",
"writeFile",
"stat",
"readdir",
"createReadStream",
"createWriteStream",
"readdirSync",
"statSync",
"readFileSync",
"existsSync",
"accessSync",
];
let has_fs = path.frames.iter().any(|&fid| {
profile.get_frame(fid).is_some_and(|f| {
let name = f.display_name();
let location = f.location();
fs_keywords
.iter()
.any(|kw| name.contains(kw) || location.contains(kw))
})
});
let has_net = path.frames.iter().any(|&fid| {
profile.get_frame(fid).is_some_and(|f| {
let location = f.location();
location.contains("node:net")
|| location.contains("node:dns")
|| location.contains("node:http")
|| location.contains("node:https")
|| location.contains("node:tls")
|| location.contains("node:dgram")
})
});
if has_fs && has_net {
reasons.push("File system and network activity on stack".to_string());
} else if has_fs {
reasons.push("File system activity on stack (stat/readdir/path ops)".to_string());
} else if has_net {
reasons.push("Network activity on stack".to_string());
}
let mut has_native_addon = false;
let mut has_native_runtime = false;
for &fid in &path.frames {
if let Some(f) = profile.get_frame(fid) {
if f.kind == FrameKind::Native {
let name = f.display_name();
let location = f.location();
if name.contains("napi_")
|| location.ends_with(".node")
|| location.contains("/binding.")
{
has_native_addon = true;
} else if f.category == FrameCategory::Native && !name.starts_with('(') {
has_native_runtime = true;
}
}
}
}
if has_native_addon {
reasons.push("Calls native addon (C++/Rust via N-API)".to_string());
} else if has_native_runtime {
reasons.push("Includes Node/V8 native operations".to_string());
}
if reasons.is_empty() {
reasons.push("This call sequence accumulates time across samples".to_string());
}
writeln!(writer, "**Why this path is hot:**")?;
for reason in reasons {
writeln!(writer, "- {reason}")?;
}
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_hot_function_detail(
writer: &mut dyn Write,
detail: &HotFunctionDetail,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
let profile_total_time = analysis.total_time;
let self_time = format_time_us(detail.self_time);
let self_pct = if profile_total_time > 0 {
(detail.self_time as f64 / profile_total_time as f64) * 100.0
} else {
0.0
};
writeln!(
writer,
"### `{}` ({} self, {:.1}%)",
detail.name, self_time, self_pct
)?;
writeln!(writer, "Location: `{}`", detail.location)?;
writeln!(writer)?;
if !detail.callers.is_empty() {
writeln!(writer, "**Top callers:**")?;
writeln!(writer, "| Caller | Time | Calls |")?;
writeln!(writer, "|--------|------|-------|")?;
for caller in detail.callers.iter().take(5) {
writeln!(
writer,
"| `{}` | {} | {} |",
Self::escape_markdown(&caller.name),
format_time_us(caller.time),
caller.call_count
)?;
}
writeln!(writer)?;
}
if !detail.callees.is_empty() {
writeln!(writer, "**Top callees inside:**")?;
writeln!(writer, "| Callee | Self | Total | Calls |")?;
writeln!(writer, "|--------|------|-------|-------|")?;
for callee in detail.callees.iter().take(5) {
writeln!(
writer,
"| `{}` | {} | {} | {} |",
Self::escape_markdown(&callee.name),
format_time_us(callee.self_time),
format_time_us(callee.total_time),
callee.call_count
)?;
}
writeln!(writer)?;
}
if detail.callers.len() == 1 && detail.self_time > profile_total_time / 100 {
writeln!(
writer,
"**Call pattern signal:** Single caller — if result is deterministic, consider memoization."
)?;
writeln!(writer)?;
} else if detail.callers.len() > 3 {
writeln!(
writer,
"**Call pattern signal:** Called from {} different sites — hot utility function.",
detail.callers.len()
)?;
writeln!(writer)?;
}
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_recommendations(
writer: &mut dyn Write,
profile: &ProfileIR,
analysis: &CpuAnalysis,
) -> Result<(), OutputError> {
let report = RecommendationEngine::analyze(profile, analysis);
writeln!(writer, "## Action Items")?;
writeln!(writer)?;
if report.recommendations.is_empty() {
let cpu_util = analysis.metadata.cpu_utilization().unwrap_or(1.0) * 100.0;
let is_cpu_bound = cpu_util >= 80.0;
writeln!(writer, "**No dominant CPU hotspot detected in App code.**")?;
writeln!(writer)?;
if is_cpu_bound {
writeln!(
writer,
"CPU usage is high ({:.0}%) but distributed across dependencies and runtime. To improve performance:",
cpu_util
)?;
writeln!(
writer,
"- **Reduce filesystem CPU cost:** cache config/path resolution, avoid repeated `stat`/`readdir`"
)?;
writeln!(
writer,
"- **Minimize parser/transform passes:** batch operations, reuse AST where possible"
)?;
writeln!(
writer,
"- **Review dependency usage:** check if heavy deps can be replaced or lazily loaded"
)?;
writeln!(
writer,
"- **Profile under sustained load:** startup overhead may dominate short runs"
)?;
} else {
writeln!(
writer,
"CPU utilization is low ({:.0}%), indicating the process spent time waiting. Consider:",
cpu_util
)?;
writeln!(
writer,
"- **I/O latency:** check file system, network, or database wait times"
)?;
writeln!(
writer,
"- **Async bottlenecks:** look for sequential awaits that could be parallelized"
)?;
writeln!(
writer,
"- **Tool orchestration:** time spent in `npx`, package managers, or build tools"
)?;
writeln!(
writer,
"- **Use tracing:** CPU profiles can't measure wait time; consider `--trace-event-categories`"
)?;
}
writeln!(writer)?;
return Ok(());
}
if !report.insights.is_empty() {
writeln!(writer, "### Key Insights")?;
writeln!(writer)?;
for insight in &report.insights {
writeln!(writer, "- {insight}")?;
}
writeln!(writer)?;
}
if !report.quick_wins.is_empty() {
writeln!(writer, "### Quick Wins")?;
writeln!(writer)?;
writeln!(
writer,
"> High-impact improvements that are easy to implement"
)?;
writeln!(writer)?;
for &idx in &report.quick_wins {
if let Some(rec) = report.recommendations.get(idx) {
Self::write_recommendation_summary(writer, rec, analysis.total_time)?;
}
}
writeln!(writer)?;
}
let quick_win_set: std::collections::HashSet<_> = report.quick_wins.iter().collect();
let critical: Vec<_> = report
.recommendations
.iter()
.enumerate()
.filter(|(i, r)| r.priority == Priority::Critical && !quick_win_set.contains(i))
.map(|(_, r)| r)
.collect();
let high: Vec<_> = report
.recommendations
.iter()
.enumerate()
.filter(|(i, r)| r.priority == Priority::High && !quick_win_set.contains(i))
.map(|(_, r)| r)
.collect();
let medium: Vec<_> = report
.recommendations
.iter()
.enumerate()
.filter(|(i, r)| r.priority == Priority::Medium && !quick_win_set.contains(i))
.map(|(_, r)| r)
.collect();
if !critical.is_empty() {
writeln!(writer, "### Critical Priority")?;
writeln!(writer)?;
for rec in critical {
Self::write_recommendation_detail(writer, rec, analysis.total_time)?;
}
}
if !high.is_empty() {
writeln!(writer, "### High Priority")?;
writeln!(writer)?;
for rec in high {
Self::write_recommendation_detail(writer, rec, analysis.total_time)?;
}
}
if !medium.is_empty() {
writeln!(writer, "### Medium Priority")?;
writeln!(writer)?;
for rec in &medium[..medium.len().min(5)] {
Self::write_recommendation_summary(writer, rec, analysis.total_time)?;
}
if medium.len() > 5 {
writeln!(
writer,
"*...and {} more medium-priority items*",
medium.len() - 5
)?;
}
writeln!(writer)?;
}
if !report.investigations.is_empty() {
writeln!(writer, "### Needs Investigation")?;
writeln!(writer)?;
for item in &report.investigations {
writeln!(writer, "- {item}")?;
}
writeln!(writer)?;
}
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_recommendation_summary(
writer: &mut dyn Write,
rec: &Recommendation,
total_time: u64,
) -> Result<(), OutputError> {
let savings_str = format_time_us(rec.estimated_savings_us);
let savings_pct = rec.savings_percent(total_time);
writeln!(
writer,
"- **{}** — *{} potential savings ({:.1}% faster)*",
rec.title, savings_str, savings_pct
)?;
writeln!(writer, " - {}", rec.root_cause)?;
writeln!(writer, " - Effort: {}", rec.effort)?;
writeln!(writer)?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_recommendation_detail(
writer: &mut dyn Write,
rec: &Recommendation,
total_time: u64,
) -> Result<(), OutputError> {
let savings_str = format_time_us(rec.estimated_savings_us);
let savings_pct = rec.savings_percent(total_time);
let current_str = format_time_us(rec.current_time_us);
writeln!(
writer,
"#### {} `{}`",
Self::priority_icon(rec.priority),
rec.title
)?;
writeln!(writer)?;
writeln!(writer, "| Metric | Value |")?;
writeln!(writer, "|--------|-------|")?;
writeln!(writer, "| Current time | {} |", current_str)?;
writeln!(
writer,
"| Potential savings | {} ({:.1}% faster) |",
savings_str, savings_pct
)?;
writeln!(writer, "| Effort | {} |", rec.effort)?;
writeln!(writer, "| Type | {} |", rec.issue_type)?;
writeln!(writer)?;
writeln!(
writer,
"**Location:** `{}`",
Self::format_location(&rec.location)
)?;
writeln!(writer)?;
writeln!(writer, "**Why:** {}", rec.root_cause)?;
writeln!(writer)?;
writeln!(writer, "**Actions:**")?;
for action in &rec.actions {
writeln!(writer, "- {action}")?;
}
writeln!(writer)?;
if !rec.code_patterns.is_empty() {
writeln!(writer, "**Look for:**")?;
for pattern in &rec.code_patterns {
writeln!(writer, "- `{pattern}`")?;
}
writeln!(writer)?;
}
if !rec.evidence.is_empty() {
writeln!(writer, "<details>")?;
writeln!(writer, "<summary>Evidence from profile</summary>")?;
writeln!(writer)?;
for evidence in &rec.evidence {
writeln!(writer, "- {evidence}")?;
}
writeln!(writer)?;
writeln!(writer, "</details>")?;
writeln!(writer)?;
}
Ok(())
}
fn priority_icon(priority: Priority) -> &'static str {
match priority {
Priority::Critical => "🔴",
Priority::High => "🟠",
Priority::Medium => "🟡",
Priority::Low => "🟢",
}
}
}