use std::io::Write;
use serde::Serialize;
use crate::analysis::{AllocationStats, CpuAnalysis, HeapAnalysis};
use crate::ir::ProfileIR;
use super::{Formatter, OutputError};
pub struct JsonFormatter;
#[derive(Serialize)]
struct JsonOutput<'a> {
metadata: JsonMetadata<'a>,
executive_summary: JsonExecutiveSummary,
category_breakdown: JsonCategoryBreakdown,
functions_by_self_time: Vec<JsonFunction>,
functions_by_inclusive_time: Vec<JsonFunction>,
hot_paths: Vec<JsonHotPath>,
hot_function_details: Vec<JsonHotFunctionDetail>,
file_stats: Vec<JsonFileStats>,
package_stats: Vec<JsonPackageStats>,
signals: JsonSignals,
recommendations: JsonRecommendations,
}
#[derive(Serialize)]
struct JsonMetadata<'a> {
source_file: Option<&'a str>,
total_time_us: u64,
total_time_ms: f64,
total_samples: usize,
sample_interval_ms: f64,
internals_filtered: bool,
focus_package: Option<&'a str>,
}
#[derive(Serialize)]
struct JsonExecutiveSummary {
app_percent: f64,
deps_percent: f64,
node_internal_percent: f64,
v8_native_percent: f64,
key_takeaways: Vec<String>,
}
#[derive(Serialize)]
struct JsonCategoryBreakdown {
app_us: u64,
app_ms: f64,
app_percent: f64,
deps_us: u64,
deps_ms: f64,
deps_percent: f64,
node_internal_us: u64,
node_internal_ms: f64,
node_internal_percent: f64,
v8_internal_us: u64,
v8_internal_ms: f64,
v8_internal_percent: f64,
native_us: u64,
native_ms: f64,
native_percent: f64,
}
#[derive(Serialize)]
struct JsonFunction {
rank: usize,
name: String,
location: String,
category: String,
self_time_us: u64,
self_time_ms: f64,
self_percent: f64,
self_samples: u32,
inclusive_time_us: u64,
inclusive_time_ms: f64,
inclusive_percent: f64,
total_samples: u32,
}
#[derive(Serialize)]
struct JsonHotPath {
rank: usize,
frames: Vec<JsonPathFrame>,
time_us: u64,
time_ms: f64,
percent: f64,
sample_count: u32,
explanation: Vec<String>,
}
#[derive(Serialize)]
struct JsonPathFrame {
name: String,
location: String,
is_hotspot: bool,
}
#[derive(Serialize)]
struct JsonHotFunctionDetail {
name: String,
location: String,
self_time_ms: f64,
self_percent: f64,
inclusive_time_ms: f64,
callers: Vec<JsonCallerCallee>,
callees: Vec<JsonCallerCallee>,
call_pattern_signal: Option<String>,
}
#[derive(Serialize)]
struct JsonCallerCallee {
name: String,
location: String,
time_ms: f64,
call_count: u32,
}
#[derive(Serialize)]
struct JsonFileStats {
file: String,
self_time_ms: f64,
total_time_ms: f64,
call_count: u32,
category: String,
}
#[derive(Serialize)]
struct JsonPackageStats {
package: String,
time_ms: f64,
percent_of_deps: f64,
top_function: String,
top_function_location: String,
}
#[derive(Serialize)]
struct JsonSignals {
gc_time_us: u64,
gc_time_ms: f64,
gc_percent: f64,
gc_assessment: String,
native_time_us: u64,
native_time_ms: f64,
native_percent: f64,
}
#[derive(Serialize)]
struct JsonRecommendations {
critical: Vec<JsonRecommendation>,
high: Vec<JsonRecommendation>,
}
#[derive(Serialize)]
struct JsonRecommendation {
function: String,
location: String,
self_percent: f64,
inclusive_percent: f64,
category: String,
}
impl Formatter for JsonFormatter {
#[expect(clippy::cast_precision_loss)]
fn write_cpu_analysis(
&self,
profile: &ProfileIR,
analysis: &CpuAnalysis,
writer: &mut dyn Write,
) -> Result<(), OutputError> {
let breakdown = &analysis.category_breakdown;
let total = breakdown.total();
let mut key_takeaways = Vec::new();
let flow = &analysis.category_call_flow;
let app_pct = breakdown.percent(crate::ir::FrameCategory::App);
let deps_pct = breakdown.percent(crate::ir::FrameCategory::Deps);
let native_pct_total = breakdown.percent(crate::ir::FrameCategory::V8Internal)
+ breakdown.percent(crate::ir::FrameCategory::Native);
let deps_triggers: u64 = flow
.callees_for(crate::ir::FrameCategory::Deps)
.iter()
.map(|(_, t)| *t)
.sum();
if app_pct > 50.0 {
key_takeaways.push(format!(
"App code dominates ({:.0}%) — focus optimization efforts on your code",
app_pct
));
} else if deps_pct > 20.0 || (total > 0 && deps_triggers > total / 2) {
let deps_total_pct = if total > 0 {
((breakdown.deps + deps_triggers) as f64 / total as f64) * 100.0
} else {
0.0
};
key_takeaways.push(format!(
"Dependencies drive {:.0}% of work — check which packages are expensive",
deps_total_pct.min(100.0)
));
} else if native_pct_total > 70.0 {
let node_to_native: u64 = flow
.callees_for(crate::ir::FrameCategory::NodeInternal)
.iter()
.filter(|(cat, _)| {
*cat == crate::ir::FrameCategory::Native
|| *cat == crate::ir::FrameCategory::V8Internal
})
.map(|(_, t)| *t)
.sum();
let app_to_native: u64 = flow
.callees_for(crate::ir::FrameCategory::App)
.iter()
.filter(|(cat, _)| {
*cat == crate::ir::FrameCategory::Native
|| *cat == crate::ir::FrameCategory::V8Internal
})
.map(|(_, t)| *t)
.sum();
if node_to_native > app_to_native {
key_takeaways.push(format!(
"V8/Native dominates ({:.0}%) via Node.js — likely module loading/compilation",
native_pct_total
));
} else {
key_takeaways.push(format!(
"V8/Native dominates ({:.0}%) — check for native addon work or compilation",
native_pct_total
));
}
}
if let Some(top) = analysis.functions.first() {
let pct = top.self_percent(analysis.total_time);
if pct > 5.0 {
key_takeaways.push(format!(
"Top bottleneck: {} at {:.1}% self time",
top.name, pct
));
}
}
if analysis.gc_time > 0 {
let gc_pct = (analysis.gc_time as f64 / analysis.total_time as f64) * 100.0;
if gc_pct > 5.0 {
key_takeaways.push(format!(
"GC overhead at {:.1}% — may indicate allocation pressure",
gc_pct
));
}
}
let gc_pct = if analysis.total_time > 0 {
(analysis.gc_time as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
};
let gc_assessment = if gc_pct > 10.0 {
"High GC pressure — investigate allocation patterns"
} else if gc_pct > 5.0 {
"Moderate GC activity — may warrant investigation"
} else {
"Normal GC overhead"
}
.to_string();
let native_pct = if analysis.total_time > 0 {
(analysis.native_time as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
};
let critical: Vec<JsonRecommendation> = analysis
.functions
.iter()
.filter(|f| {
f.self_percent(analysis.total_time) >= 20.0
|| f.total_percent(analysis.total_time) >= 35.0
})
.map(|f| JsonRecommendation {
function: f.name.clone(),
location: f.location.clone(),
self_percent: f.self_percent(analysis.total_time),
inclusive_percent: f.total_percent(analysis.total_time),
category: format!("{}", f.category),
})
.collect();
let high: Vec<JsonRecommendation> = analysis
.functions
.iter()
.filter(|f| {
let self_pct = f.self_percent(analysis.total_time);
let total_pct = f.total_percent(analysis.total_time);
(self_pct >= 10.0 && self_pct < 20.0) || (total_pct >= 20.0 && total_pct < 35.0)
})
.map(|f| JsonRecommendation {
function: f.name.clone(),
location: f.location.clone(),
self_percent: f.self_percent(analysis.total_time),
inclusive_percent: f.total_percent(analysis.total_time),
category: format!("{}", f.category),
})
.collect();
let output = JsonOutput {
metadata: JsonMetadata {
source_file: profile.source_file.as_deref(),
total_time_us: analysis.total_time,
total_time_ms: analysis.total_time as f64 / 1000.0,
total_samples: analysis.total_samples,
sample_interval_ms: analysis.metadata.sample_interval_ms,
internals_filtered: analysis.metadata.internals_filtered,
focus_package: analysis.metadata.focus_package.as_deref(),
},
executive_summary: JsonExecutiveSummary {
app_percent: app_pct,
deps_percent: deps_pct,
node_internal_percent: breakdown.percent(crate::ir::FrameCategory::NodeInternal),
v8_native_percent: breakdown.percent(crate::ir::FrameCategory::V8Internal)
+ breakdown.percent(crate::ir::FrameCategory::Native),
key_takeaways,
},
category_breakdown: JsonCategoryBreakdown {
app_us: breakdown.app,
app_ms: breakdown.app as f64 / 1000.0,
app_percent: if total > 0 {
(breakdown.app as f64 / total as f64) * 100.0
} else {
0.0
},
deps_us: breakdown.deps,
deps_ms: breakdown.deps as f64 / 1000.0,
deps_percent: if total > 0 {
(breakdown.deps as f64 / total as f64) * 100.0
} else {
0.0
},
node_internal_us: breakdown.node_internal,
node_internal_ms: breakdown.node_internal as f64 / 1000.0,
node_internal_percent: if total > 0 {
(breakdown.node_internal as f64 / total as f64) * 100.0
} else {
0.0
},
v8_internal_us: breakdown.v8_internal,
v8_internal_ms: breakdown.v8_internal as f64 / 1000.0,
v8_internal_percent: if total > 0 {
(breakdown.v8_internal as f64 / total as f64) * 100.0
} else {
0.0
},
native_us: breakdown.native,
native_ms: breakdown.native as f64 / 1000.0,
native_percent: if total > 0 {
(breakdown.native as f64 / total as f64) * 100.0
} else {
0.0
},
},
functions_by_self_time: analysis
.functions
.iter()
.enumerate()
.map(|(i, f)| JsonFunction {
rank: i + 1,
name: f.name.clone(),
location: f.location.clone(),
category: format!("{}", f.category),
self_time_us: f.self_time,
self_time_ms: f.self_time as f64 / 1000.0,
self_percent: f.self_percent(analysis.total_time),
self_samples: f.self_samples,
inclusive_time_us: f.total_time,
inclusive_time_ms: f.total_time as f64 / 1000.0,
inclusive_percent: f.total_percent(analysis.total_time),
total_samples: f.total_samples,
})
.collect(),
functions_by_inclusive_time: analysis
.functions_by_total
.iter()
.enumerate()
.map(|(i, f)| JsonFunction {
rank: i + 1,
name: f.name.clone(),
location: f.location.clone(),
category: format!("{}", f.category),
self_time_us: f.self_time,
self_time_ms: f.self_time as f64 / 1000.0,
self_percent: f.self_percent(analysis.total_time),
self_samples: f.self_samples,
inclusive_time_us: f.total_time,
inclusive_time_ms: f.total_time as f64 / 1000.0,
inclusive_percent: f.total_percent(analysis.total_time),
total_samples: f.total_samples,
})
.collect(),
hot_paths: analysis
.hot_paths
.iter()
.enumerate()
.map(|(i, p)| {
let frames: Vec<JsonPathFrame> = p
.frames
.iter()
.enumerate()
.filter_map(|(idx, fid)| {
profile.get_frame(*fid).map(|f| JsonPathFrame {
name: f.display_name(),
location: f.location(),
is_hotspot: idx == p.frames.len() - 1,
})
})
.collect();
let mut explanation = Vec::new();
if let Some(&leaf_id) = p.frames.last() {
if let Some(func) =
analysis.functions.iter().find(|f| f.frame_id == leaf_id)
{
let self_pct = func.self_percent(analysis.total_time);
if self_pct > 1.0 {
explanation.push(format!(
"Leaf function has {:.1}% self time (self-heavy)",
self_pct
));
}
}
}
if analysis.total_samples > 0 {
let path_pct =
(p.sample_count as f64 / analysis.total_samples as f64) * 100.0;
if path_pct > 1.0 {
explanation.push(format!(
"Appears in {:.1}% of samples (frequently executed)",
path_pct
));
}
}
JsonHotPath {
rank: i + 1,
frames,
time_us: p.time,
time_ms: p.time as f64 / 1000.0,
percent: p.percent,
sample_count: p.sample_count,
explanation,
}
})
.collect(),
hot_function_details: analysis
.hot_function_details
.iter()
.map(|d| {
let call_pattern_signal =
if d.callers.len() == 1 && d.self_time > analysis.total_time / 100 {
Some(
"Single caller — if result is deterministic, consider memoization"
.to_string(),
)
} else if d.callers.len() > 3 {
Some(format!(
"Called from {} different sites — hot utility function",
d.callers.len()
))
} else {
None
};
JsonHotFunctionDetail {
name: d.name.clone(),
location: d.location.clone(),
self_time_ms: d.self_time as f64 / 1000.0,
self_percent: if analysis.total_time > 0 {
(d.self_time as f64 / analysis.total_time as f64) * 100.0
} else {
0.0
},
inclusive_time_ms: d.total_time as f64 / 1000.0,
callers: d
.callers
.iter()
.map(|c| JsonCallerCallee {
name: c.name.clone(),
location: c.location.clone(),
time_ms: c.time as f64 / 1000.0,
call_count: c.call_count,
})
.collect(),
callees: d
.callees
.iter()
.map(|c| JsonCallerCallee {
name: c.name.clone(),
location: c.location.clone(),
time_ms: c.self_time as f64 / 1000.0,
call_count: c.call_count,
})
.collect(),
call_pattern_signal,
}
})
.collect(),
file_stats: analysis
.file_stats
.iter()
.map(|f| JsonFileStats {
file: f.file.clone(),
self_time_ms: f.self_time as f64 / 1000.0,
total_time_ms: f.total_time as f64 / 1000.0,
call_count: f.call_count,
category: format!("{}", f.category),
})
.collect(),
package_stats: analysis
.package_stats
.iter()
.map(|p| JsonPackageStats {
package: p.package.clone(),
time_ms: p.time as f64 / 1000.0,
percent_of_deps: p.percent_of_deps,
top_function: p.top_function.clone(),
top_function_location: p.top_function_location.clone(),
})
.collect(),
signals: JsonSignals {
gc_time_us: analysis.gc_time,
gc_time_ms: analysis.gc_time as f64 / 1000.0,
gc_percent: gc_pct,
gc_assessment,
native_time_us: analysis.native_time,
native_time_ms: analysis.native_time as f64 / 1000.0,
native_percent: native_pct,
},
recommendations: JsonRecommendations { critical, high },
};
serde_json::to_writer_pretty(writer, &output)?;
Ok(())
}
#[expect(clippy::cast_precision_loss)]
fn write_heap_analysis(
&self,
profile: &ProfileIR,
analysis: &HeapAnalysis,
writer: &mut dyn Write,
) -> Result<(), OutputError> {
#[derive(Serialize)]
struct HeapOutput<'a> {
metadata: HeapMetadata<'a>,
category_breakdown: HeapCategoryBreakdown,
allocations: Vec<HeapAllocation>,
}
#[derive(Serialize)]
struct HeapMetadata<'a> {
source_file: Option<&'a str>,
total_size_bytes: u64,
total_size_formatted: String,
total_allocations: usize,
}
#[derive(Serialize)]
struct HeapCategoryBreakdown {
app_bytes: u64,
app_percent: f64,
deps_bytes: u64,
deps_percent: f64,
node_internal_bytes: u64,
node_internal_percent: f64,
v8_native_bytes: u64,
v8_native_percent: f64,
}
#[derive(Serialize)]
struct HeapAllocation {
name: String,
location: String,
category: String,
self_bytes: u64,
self_formatted: String,
self_percent: f64,
total_bytes: u64,
total_formatted: String,
allocation_count: u32,
}
let breakdown = &analysis.category_breakdown;
let total = breakdown.total();
let output = HeapOutput {
metadata: HeapMetadata {
source_file: profile.source_file.as_deref(),
total_size_bytes: analysis.total_size,
total_size_formatted: AllocationStats::format_size(analysis.total_size),
total_allocations: analysis.total_allocations,
},
category_breakdown: HeapCategoryBreakdown {
app_bytes: breakdown.app,
app_percent: if total > 0 {
(breakdown.app as f64 / total as f64) * 100.0
} else {
0.0
},
deps_bytes: breakdown.deps,
deps_percent: if total > 0 {
(breakdown.deps as f64 / total as f64) * 100.0
} else {
0.0
},
node_internal_bytes: breakdown.node_internal,
node_internal_percent: if total > 0 {
(breakdown.node_internal as f64 / total as f64) * 100.0
} else {
0.0
},
v8_native_bytes: breakdown.v8_internal + breakdown.native,
v8_native_percent: if total > 0 {
((breakdown.v8_internal + breakdown.native) as f64 / total as f64) * 100.0
} else {
0.0
},
},
allocations: analysis
.functions
.iter()
.map(|f| HeapAllocation {
name: f.name.clone(),
location: f.location.clone(),
category: format!("{:?}", f.category),
self_bytes: f.self_size,
self_formatted: AllocationStats::format_size(f.self_size),
self_percent: f.self_percent(analysis.total_size),
total_bytes: f.total_size,
total_formatted: AllocationStats::format_size(f.total_size),
allocation_count: f.allocation_count,
})
.collect(),
};
serde_json::to_writer_pretty(writer, &output)?;
Ok(())
}
}