use std::collections::HashMap;
use std::path::{Component, PathBuf};
use super::{FrameCategory, FrameId, StackId};
use serde::Serialize;
use crate::sourcemap::SourceMapResolver;
fn normalize_path_components(path: &PathBuf) -> String {
let mut components: Vec<Component> = Vec::new();
for component in path.components() {
match component {
Component::ParentDir => {
if let Some(Component::Normal(_)) = components.last() {
components.pop();
} else {
components.push(component);
}
}
Component::CurDir => {
}
_ => {
components.push(component);
}
}
}
components
.iter()
.collect::<PathBuf>()
.to_string_lossy()
.to_string()
}
#[derive(Debug, Clone, Serialize)]
pub struct Sample {
pub timestamp_us: u64,
pub stack_id: StackId,
pub weight: u64,
}
impl Sample {
pub fn new(timestamp_us: u64, stack_id: StackId, weight: u64) -> Self {
Self {
timestamp_us,
stack_id,
weight,
}
}
}
#[derive(Debug, Clone)]
pub struct ProfileIR {
pub frames: Vec<super::Frame>,
pub stacks: Vec<super::Stack>,
pub samples: Vec<Sample>,
pub profile_type: ProfileType,
pub duration_us: Option<u64>,
pub source_file: Option<String>,
pub sourcemaps_resolved: usize,
pub profiles_merged: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ProfileType {
Cpu,
Heap,
}
impl ProfileIR {
pub fn new_cpu(
frames: Vec<super::Frame>,
stacks: Vec<super::Stack>,
samples: Vec<Sample>,
duration_us: u64,
source_file: Option<String>,
) -> Self {
Self {
frames,
stacks,
samples,
profile_type: ProfileType::Cpu,
duration_us: Some(duration_us),
source_file,
sourcemaps_resolved: 0,
profiles_merged: 1,
}
}
pub fn new_heap(
frames: Vec<super::Frame>,
stacks: Vec<super::Stack>,
samples: Vec<Sample>,
source_file: Option<String>,
) -> Self {
Self {
frames,
stacks,
samples,
profile_type: ProfileType::Heap,
duration_us: None,
source_file,
sourcemaps_resolved: 0,
profiles_merged: 1,
}
}
pub fn total_weight(&self) -> u64 {
self.samples.iter().map(|s| s.weight).sum()
}
pub fn sample_count(&self) -> usize {
self.samples.len()
}
pub fn get_frame(&self, id: super::FrameId) -> Option<&super::Frame> {
self.frames.iter().find(|f| f.id == id)
}
pub fn get_stack(&self, id: StackId) -> Option<&super::Stack> {
self.stacks.iter().find(|s| s.id == id)
}
pub fn resolve_sourcemaps(&mut self, sourcemap_dirs: Vec<PathBuf>) -> usize {
if sourcemap_dirs.is_empty() {
return 0;
}
let mut resolver = SourceMapResolver::new(sourcemap_dirs.clone());
let mut resolved_count = 0;
let base_dir = sourcemap_dirs.first().cloned();
let mut location_counts: std::collections::HashMap<String, usize> =
std::collections::HashMap::new();
for frame in &mut self.frames {
let (Some(file), Some(line), Some(col)) = (&frame.file, frame.line, frame.col) else {
continue;
};
if let Some(resolved) = resolver.resolve(file, line, col) {
frame.minified_name = Some(frame.name.clone());
frame.minified_location = Some(frame.location());
if let Some(name) = resolved.name {
if !name.is_empty() {
frame.name = name;
}
}
let normalized_path = Self::normalize_sourcemap_path(&resolved.file, &base_dir);
frame.file = Some(normalized_path.clone());
frame.line = Some(resolved.line);
frame.col = Some(resolved.col);
let loc_key = format!("{}:{}", normalized_path, resolved.line);
*location_counts.entry(loc_key).or_insert(0) += 1;
frame.category = Self::classify_category_from_path(&normalized_path);
resolved_count += 1;
}
}
if resolved_count > 10 {
for (loc, count) in &location_counts {
let pct = (*count as f64 / resolved_count as f64) * 100.0;
if pct > 50.0 && *count > 20 {
eprintln!(
" ⚠️ Warning: {} frames ({:.0}%) resolved to same location: {}",
count, pct, loc
);
eprintln!(
" This usually means the source map points to a bundled file, not original sources."
);
eprintln!(
" Try pointing --sourcemap-dir to a directory with maps to original .ts files."
);
break;
}
}
}
self.sourcemaps_resolved = resolved_count;
resolved_count
}
fn normalize_sourcemap_path(path: &str, base_dir: &Option<PathBuf>) -> String {
let path = path
.strip_prefix("webpack://")
.or_else(|| path.strip_prefix("webpack:///"))
.or_else(|| path.strip_prefix("file://"))
.unwrap_or(path);
let path = if path.contains('/') && !path.starts_with('.') && !path.starts_with('/') {
let first_segment = path.split('/').next().unwrap_or("");
if !first_segment.contains('.') {
path.split_once('/').map(|(_, rest)| rest).unwrap_or(path)
} else {
path
}
} else {
path
};
if let Some(base) = base_dir {
let path_buf = PathBuf::from(path);
if path_buf.is_relative() {
let resolved = base.join(&path_buf);
if let Ok(canonical) = resolved.canonicalize() {
return canonical.to_string_lossy().to_string();
}
return normalize_path_components(&resolved);
}
}
path.to_string()
}
fn classify_category_from_path(path: &str) -> FrameCategory {
if path.contains("node_modules") {
return FrameCategory::Deps;
}
if path.starts_with("node:") || path.contains("internal/") {
return FrameCategory::NodeInternal;
}
FrameCategory::App
}
pub fn merge(profiles: Vec<Self>) -> Option<Self> {
if profiles.is_empty() {
return None;
}
if profiles.len() == 1 {
return profiles.into_iter().next();
}
let profile_type = profiles[0].profile_type;
let mut frame_key_to_id: HashMap<
(String, Option<String>, Option<u32>, Option<u32>),
FrameId,
> = HashMap::new();
let mut merged_frames: Vec<super::Frame> = Vec::new();
let mut stack_key_to_id: HashMap<Vec<FrameId>, StackId> = HashMap::new();
let mut merged_stacks: Vec<super::Stack> = Vec::new();
let mut merged_samples: Vec<Sample> = Vec::new();
let mut total_duration: u64 = 0;
let mut total_sourcemaps_resolved: usize = 0;
let profiles_count = profiles.len();
for profile in profiles {
let mut frame_id_map: HashMap<FrameId, FrameId> = HashMap::new();
for frame in &profile.frames {
let key = (
frame.name.clone(),
frame.file.clone(),
frame.line,
frame.col,
);
let new_id = *frame_key_to_id.entry(key).or_insert_with(|| {
let id = FrameId(merged_frames.len() as u32);
let mut new_frame = frame.clone();
new_frame.id = id;
merged_frames.push(new_frame);
id
});
frame_id_map.insert(frame.id, new_id);
}
let mut stack_id_map: HashMap<StackId, StackId> = HashMap::new();
for stack in &profile.stacks {
let remapped_frames: Vec<FrameId> = stack
.frames
.iter()
.filter_map(|fid| frame_id_map.get(fid).copied())
.collect();
let new_id = *stack_key_to_id
.entry(remapped_frames.clone())
.or_insert_with(|| {
let id = StackId(merged_stacks.len() as u32);
merged_stacks.push(super::Stack::new(id, remapped_frames));
id
});
stack_id_map.insert(stack.id, new_id);
}
for sample in &profile.samples {
if let Some(&new_stack_id) = stack_id_map.get(&sample.stack_id) {
merged_samples.push(Sample {
timestamp_us: sample.timestamp_us,
stack_id: new_stack_id,
weight: sample.weight,
});
}
}
if let Some(dur) = profile.duration_us {
total_duration += dur;
}
total_sourcemaps_resolved += profile.sourcemaps_resolved;
}
Some(Self {
frames: merged_frames,
stacks: merged_stacks,
samples: merged_samples,
profile_type,
duration_us: if total_duration > 0 {
Some(total_duration)
} else {
None
},
source_file: Some("(merged)".to_string()),
sourcemaps_resolved: total_sourcemaps_resolved,
profiles_merged: profiles_count,
})
}
}