1use std::io::Write;
2
3use crate::analysis::{
4 AllocationStats, CpuAnalysis, HeapAnalysis, HotFunctionDetail, HotPath, Priority,
5 Recommendation, RecommendationEngine,
6};
7use crate::ir::{FrameCategory, FrameKind, ProfileIR};
8
9use super::{Formatter, OutputError, format_time_ms, format_time_us};
10
11pub struct MarkdownFormatter;
13
14impl Formatter for MarkdownFormatter {
15 #[expect(clippy::cast_precision_loss)]
16 #[expect(clippy::too_many_lines)]
17 fn write_cpu_analysis(
18 &self,
19 profile: &ProfileIR,
20 analysis: &CpuAnalysis,
21 writer: &mut dyn Write,
22 ) -> Result<(), OutputError> {
23 if let Some(ref pkg) = analysis.metadata.focus_package {
25 writeln!(writer, "# Profile Inspect Report — CPU (Package: `{pkg}`)")?;
26 } else {
27 writeln!(writer, "# Profile Inspect Report — CPU")?;
28 }
29 writeln!(writer)?;
30
31 if let Some(ref source) = analysis.metadata.source_file {
33 if analysis.metadata.profiles_merged > 1 {
34 writeln!(
35 writer,
36 "**Profile:** `{source}` ({} profiles merged)",
37 analysis.metadata.profiles_merged
38 )?;
39 } else {
40 writeln!(writer, "**Profile:** `{source}`")?;
41 }
42 }
43
44 let internals_str = if analysis.metadata.internals_filtered {
45 "hidden"
46 } else {
47 "shown"
48 };
49 let sourcemaps_total =
50 analysis.metadata.sourcemaps_loaded + analysis.metadata.sourcemaps_inline;
51 let sourcemaps_str = if sourcemaps_total > 0 {
52 if analysis.metadata.sourcemaps_inline > 0 && analysis.metadata.sourcemaps_loaded > 0 {
53 format!(
54 "{} frames resolved ({} inline)",
55 sourcemaps_total, analysis.metadata.sourcemaps_inline
56 )
57 } else if analysis.metadata.sourcemaps_inline > 0 {
58 format!("{} frames resolved (inline)", sourcemaps_total)
59 } else {
60 format!("{} frames resolved", sourcemaps_total)
61 }
62 } else {
63 "OFF".to_string()
64 };
65
66 let cpu_time_str = format_time_ms(analysis.metadata.duration_ms);
67
68 if let Some(wall_ms) = analysis.metadata.wall_time_ms {
70 let wall_str = format_time_ms(wall_ms);
71 let util = analysis.metadata.cpu_utilization().unwrap_or(0.0);
72 let util_pct = util * 100.0;
73
74 let util_note = if analysis.metadata.profiles_merged > 1 {
76 " (aggregated)"
77 } else {
78 ""
79 };
80
81 writeln!(
82 writer,
83 "**Wall time:** {} | **CPU time:** {} | **CPU utilization:** ~{:.0}%{}",
84 wall_str, cpu_time_str, util_pct, util_note
85 )?;
86 writeln!(
87 writer,
88 "**Samples:** {} | **Interval:** ~{:.2} ms",
89 analysis.metadata.sample_count, analysis.metadata.sample_interval_ms
90 )?;
91
92 let workload_class =
94 Self::classify_workload(util_pct, analysis.metadata.profiles_merged);
95 writeln!(writer, "**Workload:** {}", workload_class)?;
96
97 let breakdown = &analysis.category_breakdown;
99 let total = breakdown.total();
100 if total > 0 {
101 let top_category = Self::top_category_summary(breakdown, total);
102 writeln!(writer, "**Top category (self time):** {}", top_category)?;
103 }
104
105 if analysis.metadata.profiles_merged > 1 && util_pct > 100.0 {
107 writeln!(writer)?;
108 writeln!(
109 writer,
110 "> ℹ️ CPU utilization exceeds 100% because {} profiles were merged (multiple processes running in parallel).",
111 analysis.metadata.profiles_merged
112 )?;
113 }
114 } else {
115 writeln!(
116 writer,
117 "**Duration:** {} | **Samples:** {} | **Interval:** ~{:.2} ms",
118 cpu_time_str, analysis.metadata.sample_count, analysis.metadata.sample_interval_ms
119 )?;
120 }
121
122 if let Some(ref pkg) = analysis.metadata.focus_package {
123 writeln!(writer, "**Package filter:** `{pkg}`")?;
124 } else {
125 writeln!(
126 writer,
127 "**Node/V8 internals:** {internals_str} | **Sourcemaps:** {sourcemaps_str}"
128 )?;
129 }
130
131 if let Some(scope_line) = Self::scope_line(&analysis.metadata) {
132 writeln!(writer, "{scope_line}")?;
133 }
134
135 let quality_notes = Self::profile_quality_notes(&analysis.metadata);
136 if !quality_notes.is_empty() {
137 for note in quality_notes {
138 writeln!(writer, "> ℹ️ {note}")?;
139 }
140 }
141
142 if analysis.metadata.profiles_merged > 1 {
144 writeln!(writer)?;
145 writeln!(
146 writer,
147 "> **Note:** {} profiles were merged. This happens when Node.js spawns multiple processes",
148 analysis.metadata.profiles_merged
149 )?;
150 writeln!(
151 writer,
152 "> (e.g., `npx` launching your script, worker threads, or child processes)."
153 )?;
154 writeln!(
155 writer,
156 "> Each process generates its own `.cpuprofile` file."
157 )?;
158 }
159
160 writeln!(writer)?;
161 writeln!(writer, "---")?;
162 writeln!(writer)?;
163
164 Self::write_tldr(writer, profile, analysis)?;
166
167 if Self::has_filters(&analysis.metadata) {
169 writeln!(writer, "## Executive Summary (Full Profile)")?;
170 writeln!(writer)?;
171 writeln!(
172 writer,
173 "> **Scope note:** Function tables, hot paths, and recommendations honor filters. Category totals below reflect the full profile."
174 )?;
175 writeln!(writer)?;
176 } else {
177 writeln!(writer, "## Executive Summary")?;
178 writeln!(writer)?;
179 }
180 writeln!(writer, "| Category | Self | % | Stack | % | Assessment |")?;
181 writeln!(
182 writer,
183 "|----------|-----------|---|-----------|---|------------|"
184 )?;
185
186 let breakdown = &analysis.category_breakdown;
187 let inclusive = &analysis.category_breakdown_inclusive;
188 let total = breakdown.total();
189
190 Self::write_summary_row_with_inclusive(
191 writer,
192 "App code",
193 breakdown.app,
194 inclusive.app,
195 total,
196 )?;
197 Self::write_summary_row_with_inclusive(
198 writer,
199 "Dependencies",
200 breakdown.deps,
201 inclusive.deps,
202 total,
203 )?;
204 Self::write_summary_row_with_inclusive(
205 writer,
206 "Node.js internals",
207 breakdown.node_internal,
208 inclusive.node_internal,
209 total,
210 )?;
211 Self::write_summary_row_with_inclusive(
212 writer,
213 "V8/Native",
214 breakdown.v8_internal + breakdown.native,
215 inclusive.v8_internal + inclusive.native,
216 total,
217 )?;
218 writeln!(writer)?;
219
220 writeln!(
222 writer,
223 "> **Self:** CPU time spent directly executing this category's code (exclusive, sums to 100%)."
224 )?;
225 writeln!(
226 writer,
227 "> **Stack:** CPU time when this category appears anywhere in the call stack (inclusive)."
228 )?;
229 writeln!(
230 writer,
231 "> Stack percentages can exceed 100% because categories overlap (e.g., App calls Deps)."
232 )?;
233 writeln!(writer)?;
234
235 writeln!(writer, "**Key takeaways:**")?;
237 Self::write_key_takeaways(writer, analysis)?;
238 writeln!(writer)?;
239 writeln!(writer, "---")?;
240 writeln!(writer)?;
241
242 if let Some(ref phases) = analysis.phase_analysis {
244 Self::write_phase_analysis(writer, phases)?;
245 }
246
247 writeln!(writer, "## Top Hotspots by Self Time")?;
249 writeln!(writer)?;
250 writeln!(
251 writer,
252 "> Self time = CPU time spent directly in this function."
253 )?;
254 writeln!(writer)?;
255 writeln!(
256 writer,
257 "| # | Self | % | Samples | Total | Function | Location | Category |"
258 )?;
259 writeln!(
260 writer,
261 "|---|------|---|---------|-------|----------|----------|----------|"
262 )?;
263
264 for (i, func) in analysis.functions.iter().take(25).enumerate() {
265 let self_time = format_time_us(func.self_time);
266 let self_pct = func.self_percent(analysis.total_time);
267 let total_time = format_time_us(func.total_time);
268 let category_badge = Self::category_badge(func.category);
269
270 writeln!(
271 writer,
272 "| {} | {} | {} | {} | {} | `{}` | `{}` | {} |",
273 i + 1,
274 self_time,
275 Self::format_percent(self_pct),
276 func.self_samples,
277 total_time,
278 Self::escape_markdown(&func.name),
279 Self::format_location(&func.location),
280 category_badge
281 )?;
282 }
283 writeln!(writer)?;
284
285 writeln!(writer, "## Top Hotspots by Total Time")?;
287 writeln!(writer)?;
288 writeln!(
289 writer,
290 "> Total time = CPU time when this function is on the stack (including callees)."
291 )?;
292 writeln!(writer)?;
293 writeln!(
294 writer,
295 "| # | Total | % | Self | Function | Location | Category |"
296 )?;
297 writeln!(
298 writer,
299 "|---|-------|---|------|----------|----------|----------|"
300 )?;
301
302 for (i, func) in analysis.functions_by_total.iter().take(15).enumerate() {
303 let self_time = format_time_us(func.self_time);
304 let total_time = format_time_us(func.total_time);
305 let total_pct = func.total_percent(analysis.total_time);
306 let category_badge = Self::category_badge(func.category);
307
308 writeln!(
309 writer,
310 "| {} | {} | {} | {} | `{}` | `{}` | {} |",
311 i + 1,
312 total_time,
313 Self::format_percent(total_pct),
314 self_time,
315 Self::escape_markdown(&func.name),
316 Self::format_location(&func.location),
317 category_badge
318 )?;
319 }
320 writeln!(writer)?;
321
322 if !analysis.hot_paths.is_empty() {
324 writeln!(writer, "## Hot Paths")?;
325 writeln!(writer)?;
326 writeln!(writer, "> Sorted by CPU time descending.")?;
327 writeln!(writer)?;
328 let mut significant_paths: Vec<&HotPath> = analysis
329 .hot_paths
330 .iter()
331 .filter(|path| {
332 path.percent >= 0.2 || path.time >= 10_000 || path.sample_count >= 10
333 })
334 .collect();
335
336 if significant_paths.is_empty() {
337 significant_paths = analysis.hot_paths.iter().take(1).collect();
338 }
339
340 for (i, path) in significant_paths.iter().take(5).enumerate() {
341 let cpu_time_str = format_time_us(path.time);
342 let path_pct = Self::format_percent(path.percent);
343 let low_signal = path.percent < 0.1 || path.sample_count < 10;
344 let signal_note = if low_signal { " — low signal" } else { "" };
345
346 writeln!(
347 writer,
348 "### Path #{} — {} ({}, {} samples){}",
349 i + 1,
350 path_pct,
351 cpu_time_str,
352 path.sample_count,
353 signal_note
354 )?;
355 writeln!(writer)?;
356 writeln!(writer, "```")?;
357 Self::write_hot_path_visualization(writer, profile, path)?;
358 writeln!(writer, "```")?;
359 writeln!(writer)?;
360
361 Self::write_path_explanation(writer, profile, path, analysis)?;
363 writeln!(writer)?;
364 }
365 }
366
367 if !analysis.hot_function_details.is_empty() {
369 writeln!(writer, "## Caller & Callee Attribution")?;
370 writeln!(writer)?;
371
372 for detail in &analysis.hot_function_details {
373 Self::write_hot_function_detail(writer, detail, analysis)?;
374 }
375 }
376
377 if !analysis.recursive_functions.is_empty() {
379 Self::write_recursive_functions(writer, analysis)?;
380 }
381
382 if !analysis.file_stats.is_empty() {
384 if analysis.metadata.focus_package.is_some() {
385 writeln!(writer, "## By Source File")?;
386 writeln!(writer)?;
387 writeln!(
388 writer,
389 "> Omitted under package filter — file stats are computed from the full profile."
390 )?;
391 writeln!(writer)?;
392 } else {
393 let file_stats: Vec<_> = analysis
394 .file_stats
395 .iter()
396 .filter(|fs| Self::category_allowed(&analysis.metadata, fs.category))
397 .collect();
398
399 if !file_stats.is_empty() {
400 writeln!(writer, "## By Source File")?;
401 writeln!(writer)?;
402 writeln!(writer, "| File | Self | Total | Samples | Category |")?;
403 writeln!(writer, "|------|------|-------|---------|----------|")?;
404
405 for fs in file_stats.iter().take(15) {
406 let self_time = format_time_us(fs.self_time);
407 let total_time = format_time_us(fs.total_time);
408 let category_badge = Self::category_badge(fs.category);
409
410 writeln!(
411 writer,
412 "| `{}` | {} | {} | {} | {} |",
413 Self::format_location(&fs.file),
414 self_time,
415 total_time,
416 fs.call_count,
417 category_badge
418 )?;
419 }
420 writeln!(writer)?;
421 }
422 }
423 }
424
425 let show_package_stats = analysis.metadata.focus_package.is_none()
427 && (analysis.metadata.filter_categories.is_empty()
428 || analysis
429 .metadata
430 .filter_categories
431 .contains(&FrameCategory::Deps));
432
433 if !analysis.package_stats.is_empty() && show_package_stats {
434 writeln!(writer, "## By Dependency Package")?;
435 writeln!(writer)?;
436 writeln!(writer, "| Package | Time | % of Deps | Top Function |")?;
437 writeln!(writer, "|---------|------|-----------|--------------|")?;
438
439 for pkg in &analysis.package_stats {
440 let time_str = format_time_us(pkg.time);
441 writeln!(
442 writer,
443 "| `{}` | {} | {:.1}% | `{}` |",
444 pkg.package,
445 time_str,
446 pkg.percent_of_deps,
447 Self::escape_markdown(&pkg.top_function)
448 )?;
449 }
450 writeln!(writer)?;
451 }
452
453 if analysis.native_time > 0 {
455 writeln!(writer, "## Native/Runtime Frames (Leaf)")?;
456 writeln!(writer)?;
457 writeln!(
458 writer,
459 "> Time in frames explicitly marked `Native` (leaf frames only)."
460 )?;
461 writeln!(
462 writer,
463 "> V8/Node JS internals still appear in the category breakdown above."
464 )?;
465 writeln!(writer)?;
466
467 let native_time = format_time_us(analysis.native_time);
468 let native_pct = if analysis.total_time > 0 {
469 (analysis.native_time as f64 / analysis.total_time as f64) * 100.0
470 } else {
471 0.0
472 };
473
474 writeln!(writer, "**Total:** {} ({:.1}%)", native_time, native_pct)?;
475 writeln!(writer)?;
476
477 let native_entries: Vec<_> = analysis
479 .functions
480 .iter()
481 .filter(|f| {
482 profile
483 .get_frame(f.frame_id)
484 .is_some_and(|frame| frame.kind == FrameKind::Native)
485 })
486 .take(5)
487 .collect();
488
489 if !native_entries.is_empty() {
490 writeln!(writer, "**Visible native frames:**")?;
491 for func in native_entries {
492 let time_str = format_time_us(func.self_time);
493 writeln!(writer, "- `{}` — {}", func.name, time_str)?;
494 }
495 writeln!(writer)?;
496 }
497
498 writeln!(writer, "**What this means:**")?;
499 writeln!(
500 writer,
501 "- This time is spent in compiled code (V8/Node native internals, syscalls, or addons)"
502 )?;
503 writeln!(
504 writer,
505 "- To attribute to specific libraries, capture a native profile (Instruments/perf)"
506 )?;
507 writeln!(
508 writer,
509 "- Focus optimization on reducing how often your JS code triggers native operations"
510 )?;
511 writeln!(writer)?;
512 }
513
514 if let Some(ref gc) = analysis.gc_analysis {
516 Self::write_gc_analysis(
517 writer,
518 gc,
519 analysis.total_time,
520 analysis.phase_analysis.as_ref(),
521 )?;
522 } else if analysis.gc_time > 0 {
523 writeln!(writer, "## GC & Allocation Signals")?;
525 writeln!(writer)?;
526 let gc_time = format_time_us(analysis.gc_time);
527 let gc_pct = (analysis.gc_time as f64 / analysis.total_time as f64) * 100.0;
528 writeln!(writer, "**GC time:** {} ({:.1}%)", gc_time, gc_pct)?;
529 writeln!(writer)?;
530 }
531
532 Self::write_recommendations(writer, profile, analysis)?;
534
535 Ok(())
536 }
537
538 #[expect(clippy::cast_precision_loss)]
539 fn write_heap_analysis(
540 &self,
541 profile: &ProfileIR,
542 analysis: &HeapAnalysis,
543 writer: &mut dyn Write,
544 ) -> Result<(), OutputError> {
545 writeln!(writer, "# Profile Inspect Report — Heap")?;
547 writeln!(writer)?;
548
549 if let Some(ref source) = profile.source_file {
550 writeln!(writer, "**Profile:** `{source}`")?;
551 }
552
553 writeln!(
554 writer,
555 "**Total allocated:** {} | **Allocations:** {}",
556 AllocationStats::format_size(analysis.total_size),
557 analysis.total_allocations
558 )?;
559 writeln!(writer)?;
560 writeln!(writer, "---")?;
561 writeln!(writer)?;
562
563 writeln!(writer, "## Allocation by Category")?;
565 writeln!(writer)?;
566 writeln!(writer, "| Category | Size | % |")?;
567 writeln!(writer, "|----------|------|---|")?;
568
569 let breakdown = &analysis.category_breakdown;
570 let total = breakdown.total();
571
572 Self::write_heap_category_row(writer, "App code", breakdown.app, total)?;
573 Self::write_heap_category_row(writer, "Dependencies", breakdown.deps, total)?;
574 Self::write_heap_category_row(writer, "Node.js internals", breakdown.node_internal, total)?;
575 Self::write_heap_category_row(
576 writer,
577 "V8/Native",
578 breakdown.v8_internal + breakdown.native,
579 total,
580 )?;
581 writeln!(writer)?;
582
583 writeln!(writer, "## Top Allocations by Size")?;
585 writeln!(writer)?;
586 writeln!(
587 writer,
588 "| # | Self | % | Allocs | Total | Function | Location | Category |"
589 )?;
590 writeln!(
591 writer,
592 "|---|------|---|--------|-----------|----------|----------|----------|"
593 )?;
594
595 for (i, func) in analysis.functions.iter().enumerate() {
596 let self_pct = func.self_percent(analysis.total_size);
597 let self_str = AllocationStats::format_size(func.self_size);
598 let total_str = AllocationStats::format_size(func.total_size);
599 let category = Self::category_badge(func.category);
600
601 writeln!(
602 writer,
603 "| {} | {} | {:.1}% | {} | {} | `{}` | `{}` | {} |",
604 i + 1,
605 self_str,
606 self_pct,
607 func.allocation_count,
608 total_str,
609 Self::escape_markdown(&func.name),
610 Self::escape_markdown(&func.location),
611 category
612 )?;
613 }
614 writeln!(writer)?;
615
616 writeln!(writer, "## Recommendations")?;
618 writeln!(writer)?;
619
620 let large_allocators: Vec<_> = analysis
621 .functions
622 .iter()
623 .filter(|f| f.self_percent(analysis.total_size) >= 10.0)
624 .collect();
625
626 if !large_allocators.is_empty() {
627 writeln!(writer, "**Large allocators (>=10% of total):**")?;
628 writeln!(writer)?;
629 for func in &large_allocators {
630 let pct = func.self_percent(analysis.total_size);
631 writeln!(
632 writer,
633 "- `{}` — {:.1}% ({})",
634 func.name,
635 pct,
636 AllocationStats::format_size(func.self_size)
637 )?;
638 }
639 } else {
640 writeln!(writer, "No single function dominates allocations.")?;
641 writeln!(writer)?;
642 writeln!(writer, "Memory is well-distributed across the codebase.")?;
643 }
644
645 Ok(())
646 }
647}
648
649impl MarkdownFormatter {
650 #[expect(clippy::cast_precision_loss)]
652 fn write_tldr(
653 writer: &mut dyn Write,
654 _profile: &ProfileIR,
655 analysis: &CpuAnalysis,
656 ) -> Result<(), OutputError> {
657 writeln!(writer, "## Quick Assessment")?;
658 writeln!(writer)?;
659
660 let filters = &analysis.metadata.filter_categories;
661 let has_filter = !filters.is_empty();
662
663 if has_filter {
665 let filter_names = Self::format_category_list(filters);
666 writeln!(
667 writer,
668 "> **Filter active:** Function lists, hot paths, and recommendations show only {} categories",
669 filter_names
670 )?;
671 writeln!(writer)?;
672 }
673
674 let breakdown = &analysis.category_breakdown;
675 let total = breakdown.total();
676
677 let app_pct = if total > 0 {
679 (breakdown.app as f64 / total as f64) * 100.0
680 } else {
681 0.0
682 };
683 let deps_pct = if total > 0 {
684 (breakdown.deps as f64 / total as f64) * 100.0
685 } else {
686 0.0
687 };
688 let internal_pct = if total > 0 {
689 ((breakdown.v8_internal + breakdown.native + breakdown.node_internal) as f64
690 / total as f64)
691 * 100.0
692 } else {
693 0.0
694 };
695
696 let visible_total = Self::visible_total_time(analysis);
697 let show_filtered_pct = has_filter
698 || analysis.metadata.internals_filtered
699 || analysis.metadata.focus_package.is_some();
700
701 let verdict = Self::generate_verdict(app_pct, deps_pct, internal_pct, analysis);
703 writeln!(writer, "**{}**", verdict)?;
704 writeln!(writer)?;
705
706 writeln!(writer, "| Category | Time | Status |")?;
708 writeln!(writer, "|----------|------|--------|")?;
709
710 let show_app = !has_filter || filters.contains(&FrameCategory::App);
711 let show_deps = !has_filter || filters.contains(&FrameCategory::Deps);
712 let show_internal = !has_filter
713 || filters.contains(&FrameCategory::NodeInternal)
714 || filters.contains(&FrameCategory::V8Internal)
715 || filters.contains(&FrameCategory::Native);
716
717 let format_pct_label = |value: u64| -> String {
718 let pct_total = if total > 0 {
719 (value as f64 / total as f64) * 100.0
720 } else {
721 0.0
722 };
723 if show_filtered_pct {
724 if let Some(filtered_total) = visible_total {
725 if filtered_total > 0 && filtered_total != total {
726 let pct_filtered = (value as f64 / filtered_total as f64) * 100.0;
727 return format!("{:.0}% total; {:.0}% filtered", pct_total, pct_filtered);
728 }
729 }
730 }
731 format!("{:.0}%", pct_total)
732 };
733
734 if show_app {
735 let app_status = if app_pct > 50.0 {
736 "⚠️ Focus here"
737 } else if app_pct > 20.0 {
738 "👀 Worth checking"
739 } else {
740 "✅ Healthy"
741 };
742 writeln!(
743 writer,
744 "| App code | {} ({}) | {} |",
745 format_time_us(breakdown.app),
746 format_pct_label(breakdown.app),
747 app_status
748 )?;
749 }
750
751 if show_deps {
752 let deps_status = if deps_pct > 40.0 {
753 "⚠️ Heavy deps"
754 } else if deps_pct > 20.0 {
755 "👀 Review usage"
756 } else {
757 "✅ Normal"
758 };
759 writeln!(
760 writer,
761 "| Dependencies | {} ({}) | {} |",
762 format_time_us(breakdown.deps),
763 format_pct_label(breakdown.deps),
764 deps_status
765 )?;
766 }
767
768 if show_internal {
769 let internal_status = if internal_pct > 70.0 {
770 "ℹ️ Startup overhead"
771 } else {
772 "✅ Normal"
773 };
774 writeln!(
775 writer,
776 "| V8/Node internals | {} ({}) | {} |",
777 format_time_us(breakdown.v8_internal + breakdown.native + breakdown.node_internal),
778 format_pct_label(
779 breakdown.v8_internal + breakdown.native + breakdown.node_internal
780 ),
781 internal_status
782 )?;
783 }
784 writeln!(writer)?;
785
786 if let Some(top) = analysis.functions.first() {
788 let top_pct = top.self_percent(analysis.total_time);
789
790 if top_pct >= 2.0 {
792 let potential_savings = top.self_time / 2; let potential_pct = if analysis.total_time > 0 {
794 (potential_savings as f64 / analysis.total_time as f64) * 100.0
795 } else {
796 0.0
797 };
798
799 writeln!(writer, "**Top hotspot:** `{}` at {:.1}%", top.name, top_pct)?;
800
801 if top_pct >= 5.0 {
802 writeln!(
803 writer,
804 "**If optimized 50%:** Save {} ({:.1}% faster)",
805 format_time_us(potential_savings),
806 potential_pct
807 )?;
808 }
809
810 match top.category {
812 FrameCategory::Deps => {
813 let pkg_hint = Self::extract_package_name(&top.location);
815 writeln!(
816 writer,
817 "**Note:** This is dependency code{}. Check if it's necessary or can be optimized.",
818 pkg_hint.map_or(String::new(), |p| format!(" ({})", p))
819 )?;
820 }
821 FrameCategory::App => {
822 writeln!(
823 writer,
824 "**Location:** `{}`",
825 Self::format_location(&top.location)
826 )?;
827 }
828 FrameCategory::NodeInternal
829 | FrameCategory::V8Internal
830 | FrameCategory::Native => {
831 writeln!(
832 writer,
833 "**Note:** This is runtime/engine code. Focus on what triggers it from your code."
834 )?;
835 }
836 }
837 } else {
838 writeln!(
840 writer,
841 "**Top function:** `{}` at {:.1}% (low impact — no dominant CPU hotspot)",
842 top.name, top_pct
843 )?;
844 }
845 }
846
847 if !analysis.functions.is_empty() && analysis.total_time > 0 {
849 let top_n = 5usize.min(analysis.functions.len());
850 let top_sum: u64 = analysis
851 .functions
852 .iter()
853 .take(top_n)
854 .map(|f| f.self_time)
855 .sum();
856 let total_pct = (top_sum as f64 / analysis.total_time as f64) * 100.0;
857 let (scope_pct, scope_label) = if let Some(filtered_total) = visible_total {
858 if filtered_total > 0 && filtered_total != total {
859 let filtered_pct = (top_sum as f64 / filtered_total as f64) * 100.0;
860 (filtered_pct, "filtered")
861 } else {
862 (total_pct, "total")
863 }
864 } else {
865 (total_pct, "total")
866 };
867 let concentration = if scope_pct < 10.0 {
868 "very flat"
869 } else if scope_pct < 25.0 {
870 "moderately flat"
871 } else {
872 "concentrated"
873 };
874
875 if scope_label == "filtered" {
876 writeln!(
877 writer,
878 "**Hotspot concentration:** Top {top_n} functions = {:.1}% of filtered ({total_pct:.1}% of total) — {concentration}",
879 scope_pct
880 )?;
881 } else {
882 writeln!(
883 writer,
884 "**Hotspot concentration:** Top {top_n} functions = {:.1}% of total — {concentration}",
885 scope_pct
886 )?;
887 }
888 }
889
890 writeln!(writer)?;
891 writeln!(writer, "---")?;
892 writeln!(writer)?;
893
894 Ok(())
895 }
896
897 fn generate_verdict(
899 app_pct: f64,
900 deps_pct: f64,
901 internal_pct: f64,
902 analysis: &CpuAnalysis,
903 ) -> String {
904 if let Some(gc) = &analysis.gc_analysis {
906 let gc_pct = if analysis.total_time > 0 {
907 (gc.total_time as f64 / analysis.total_time as f64) * 100.0
908 } else {
909 0.0
910 };
911 if gc_pct > 10.0 {
912 return format!(
913 "🔴 High GC pressure ({:.0}%) — reduce allocations to improve performance",
914 gc_pct
915 );
916 }
917 }
918
919 if app_pct > 50.0 {
921 if let Some(top) = analysis.functions.first() {
922 if top.self_percent(analysis.total_time) > 20.0 {
923 return format!(
924 "🔴 Single function dominates — `{}` uses {:.0}% of CPU",
925 top.name,
926 top.self_percent(analysis.total_time)
927 );
928 }
929 }
930 return "🟡 App code dominates — optimization opportunities exist".to_string();
931 }
932
933 if deps_pct > 40.0 {
935 return "🟡 Heavy dependency usage — review if all are necessary".to_string();
936 }
937
938 if internal_pct > 70.0 {
940 return "ℹ️ Profile is startup-heavy (V8/Node internals dominate). Profile under sustained load for better signal.".to_string();
941 }
942
943 if let Some(top) = analysis.functions.first() {
945 let top_pct = top.self_percent(analysis.total_time);
946 if top_pct < 5.0 {
947 return "✅ No clear bottleneck — CPU time is well-distributed".to_string();
948 }
949 }
950
951 "✅ Profile looks healthy — no critical issues detected".to_string()
952 }
953
954 fn has_filters(metadata: &crate::analysis::ProfileMetadata) -> bool {
955 metadata.internals_filtered
956 || metadata.focus_package.is_some()
957 || !metadata.filter_categories.is_empty()
958 }
959
960 fn scope_line(metadata: &crate::analysis::ProfileMetadata) -> Option<String> {
961 let mut parts = Vec::new();
962
963 if !metadata.filter_categories.is_empty() {
964 parts.push(format!(
965 "Categories: {}",
966 Self::format_category_list(&metadata.filter_categories)
967 ));
968 }
969
970 if metadata.internals_filtered {
971 parts.push("Internals hidden".to_string());
972 }
973
974 if let Some(pkg) = &metadata.focus_package {
975 parts.push(format!("Package: `{pkg}`"));
976 }
977
978 if parts.is_empty() {
979 None
980 } else {
981 Some(format!("**Scope:** Filtered view ({})", parts.join("; ")))
982 }
983 }
984
985 fn profile_quality_notes(metadata: &crate::analysis::ProfileMetadata) -> Vec<String> {
986 let mut notes = Vec::new();
987
988 if metadata.duration_ms < 1_000.0 || metadata.sample_count < 1_000 {
989 notes.push("Short profile (<1s or <1000 samples). Results may be noisy.".to_string());
990 }
991
992 if metadata.sample_interval_ms > 5.0 {
993 notes.push(
994 "Coarse sampling interval (>5ms). Fine-grained hotspots may be missed.".to_string(),
995 );
996 }
997
998 notes
999 }
1000
1001 fn format_category_list(categories: &[FrameCategory]) -> String {
1002 categories
1003 .iter()
1004 .map(|c| format!("`{}`", Self::category_label(*c)))
1005 .collect::<Vec<_>>()
1006 .join(", ")
1007 }
1008
1009 fn category_label(category: FrameCategory) -> &'static str {
1010 match category {
1011 FrameCategory::App => "App",
1012 FrameCategory::Deps => "Dependencies",
1013 FrameCategory::NodeInternal => "Node internals",
1014 FrameCategory::V8Internal => "V8 internals",
1015 FrameCategory::Native => "Native",
1016 }
1017 }
1018
1019 fn visible_total_time(analysis: &CpuAnalysis) -> Option<u64> {
1020 if analysis.metadata.focus_package.is_some() {
1021 return None;
1022 }
1023
1024 let mut categories = if analysis.metadata.filter_categories.is_empty() {
1025 vec![
1026 FrameCategory::App,
1027 FrameCategory::Deps,
1028 FrameCategory::NodeInternal,
1029 FrameCategory::V8Internal,
1030 FrameCategory::Native,
1031 ]
1032 } else {
1033 analysis.metadata.filter_categories.clone()
1034 };
1035
1036 if analysis.metadata.internals_filtered {
1037 categories.retain(|c| !c.is_internal());
1038 }
1039
1040 let breakdown = &analysis.category_breakdown;
1041 let mut total = 0;
1042 for category in categories {
1043 total += match category {
1044 FrameCategory::App => breakdown.app,
1045 FrameCategory::Deps => breakdown.deps,
1046 FrameCategory::NodeInternal => breakdown.node_internal,
1047 FrameCategory::V8Internal => breakdown.v8_internal,
1048 FrameCategory::Native => breakdown.native,
1049 };
1050 }
1051
1052 Some(total)
1053 }
1054
1055 fn category_allowed(
1056 metadata: &crate::analysis::ProfileMetadata,
1057 category: FrameCategory,
1058 ) -> bool {
1059 if metadata.internals_filtered && category.is_internal() {
1060 return false;
1061 }
1062
1063 if !metadata.filter_categories.is_empty() && !metadata.filter_categories.contains(&category)
1064 {
1065 return false;
1066 }
1067
1068 true
1069 }
1070
1071 fn extract_package_name(location: &str) -> Option<String> {
1073 let path = location.strip_prefix("file://").unwrap_or(location);
1074
1075 if let Some(nm_idx) = path.rfind("node_modules/") {
1076 let after_nm = &path[nm_idx + 13..];
1077
1078 if after_nm.starts_with('@') {
1080 let parts: Vec<&str> = after_nm.splitn(3, '/').collect();
1081 if parts.len() >= 2 {
1082 return Some(format!("{}/{}", parts[0], parts[1]));
1083 }
1084 } else {
1085 let parts: Vec<&str> = after_nm.splitn(2, '/').collect();
1086 if !parts.is_empty() {
1087 return Some(parts[0].to_string());
1088 }
1089 }
1090 }
1091
1092 None
1093 }
1094
1095 #[expect(clippy::cast_precision_loss)]
1096 fn write_summary_row_with_inclusive(
1097 writer: &mut dyn Write,
1098 name: &str,
1099 self_time: u64,
1100 inclusive_time: u64,
1101 total: u64,
1102 ) -> Result<(), OutputError> {
1103 let self_str = format_time_us(self_time);
1104 let inclusive_str = format_time_us(inclusive_time);
1105 let self_pct = if total > 0 {
1106 (self_time as f64 / total as f64) * 100.0
1107 } else {
1108 0.0
1109 };
1110 let inclusive_pct = if total > 0 {
1111 (inclusive_time as f64 / total as f64) * 100.0
1112 } else {
1113 0.0
1114 };
1115
1116 let assessment = if self_pct < 20.0 {
1117 "normal"
1118 } else if self_pct < 50.0 {
1119 "notable"
1120 } else {
1121 "dominant"
1122 };
1123
1124 writeln!(
1125 writer,
1126 "| {name} | {self_str} | {self_pct:.1}% | {inclusive_str} | {inclusive_pct:.1}% | {assessment} |"
1127 )?;
1128 Ok(())
1129 }
1130
1131 #[expect(clippy::cast_precision_loss)]
1132 fn write_heap_category_row(
1133 writer: &mut dyn Write,
1134 name: &str,
1135 size: u64,
1136 total: u64,
1137 ) -> Result<(), OutputError> {
1138 let size_str = AllocationStats::format_size(size);
1139 let pct = if total > 0 {
1140 (size as f64 / total as f64) * 100.0
1141 } else {
1142 0.0
1143 };
1144 writeln!(writer, "| {name} | {size_str} | {pct:.1}% |")?;
1145 Ok(())
1146 }
1147
1148 #[expect(clippy::cast_precision_loss)]
1149 fn write_gc_analysis(
1150 writer: &mut dyn Write,
1151 gc: &crate::analysis::GcAnalysis,
1152 total_time: u64,
1153 phase_analysis: Option<&crate::analysis::PhaseAnalysis>,
1154 ) -> Result<(), OutputError> {
1155 writeln!(writer, "## GC & Allocation Signals")?;
1156 writeln!(writer)?;
1157
1158 let gc_time_str = format_time_us(gc.total_time);
1160 let gc_pct = if total_time > 0 {
1161 (gc.total_time as f64 / total_time as f64) * 100.0
1162 } else {
1163 0.0
1164 };
1165 let avg_pause_str = format_time_us(gc.avg_pause_us);
1166
1167 let (severity, assessment) = if gc_pct > 10.0 {
1169 ("🔴", "High GC pressure — likely allocation hotspot")
1170 } else if gc_pct > 5.0 {
1171 ("🟡", "Moderate GC — worth investigating")
1172 } else if gc_pct > 2.0 {
1173 ("🟢", "Normal GC overhead")
1174 } else {
1175 ("⚪", "Minimal GC activity")
1176 };
1177
1178 writeln!(
1179 writer,
1180 "**{} GC overhead:** {} ({:.1}%) across {} samples — {}",
1181 severity, gc_time_str, gc_pct, gc.sample_count, assessment
1182 )?;
1183 writeln!(writer)?;
1184
1185 let target_gc_pct = 2.0; let potential_savings_us = if gc_pct > target_gc_pct {
1188 let excess_pct = gc_pct - target_gc_pct;
1189 (excess_pct / 100.0 * total_time as f64) as u64
1190 } else {
1191 0
1192 };
1193 let potential_speedup_pct = if total_time > 0 {
1194 (potential_savings_us as f64 / total_time as f64) * 100.0
1195 } else {
1196 0.0
1197 };
1198
1199 writeln!(writer, "| Metric | Value |")?;
1200 writeln!(writer, "|--------|-------|")?;
1201 writeln!(
1202 writer,
1203 "| Total GC time | {} ({:.1}%) |",
1204 gc_time_str, gc_pct
1205 )?;
1206 writeln!(writer, "| GC samples | {} |", gc.sample_count)?;
1207 writeln!(writer, "| Avg pause | {} |", avg_pause_str)?;
1208
1209 if phase_analysis.is_some() && gc.startup_gc_time > 0 {
1211 let startup_pct = (gc.startup_gc_time as f64 / gc.total_time as f64) * 100.0;
1212 let steady_pct = (gc.steady_gc_time as f64 / gc.total_time as f64) * 100.0;
1213 writeln!(
1214 writer,
1215 "| Startup GC | {} ({:.0}%) |",
1216 format_time_us(gc.startup_gc_time),
1217 startup_pct
1218 )?;
1219 writeln!(
1220 writer,
1221 "| Steady-state GC | {} ({:.0}%) |",
1222 format_time_us(gc.steady_gc_time),
1223 steady_pct
1224 )?;
1225 }
1226 writeln!(writer)?;
1227
1228 if gc_pct > 3.0 && potential_savings_us > 0 && potential_speedup_pct > 0.5 {
1231 writeln!(writer, "### 📈 Improvement Potential")?;
1232 writeln!(writer)?;
1233 writeln!(
1234 writer,
1235 "Reducing GC from {:.0}% to ~{:.0}% could save approximately **{}** (~{:.0}% faster)",
1236 gc_pct,
1237 target_gc_pct,
1238 format_time_us(potential_savings_us),
1239 potential_speedup_pct
1240 )?;
1241 writeln!(writer)?;
1242
1243 if !gc.allocation_hotspots.is_empty() {
1245 writeln!(writer, "| Optimize | Est. Savings | Impact |")?;
1246 writeln!(writer, "|----------|--------------|--------|")?;
1247
1248 for hotspot in gc.allocation_hotspots.iter().take(5) {
1249 let hotspot_savings_us =
1252 (hotspot.gc_correlation / 100.0 * potential_savings_us as f64) as u64;
1253 let hotspot_impact_pct = if total_time > 0 {
1254 (hotspot_savings_us as f64 / total_time as f64) * 100.0
1255 } else {
1256 0.0
1257 };
1258
1259 if hotspot_savings_us > 0 {
1260 writeln!(
1261 writer,
1262 "| `{}` | {} | {:.1}% faster |",
1263 Self::escape_markdown(&hotspot.name),
1264 format_time_us(hotspot_savings_us),
1265 hotspot_impact_pct
1266 )?;
1267 }
1268 }
1269 writeln!(writer)?;
1270
1271 writeln!(
1272 writer,
1273 "> **Note:** Estimates assume optimizing each function eliminates its GC contribution."
1274 )?;
1275 writeln!(
1276 writer,
1277 "> Actual savings depend on allocation patterns and may overlap between functions."
1278 )?;
1279 writeln!(writer)?;
1280 }
1281 }
1282
1283 if !gc.allocation_hotspots.is_empty() {
1285 writeln!(writer, "### Allocation Hotspots")?;
1286 writeln!(writer)?;
1287 writeln!(
1288 writer,
1289 "> Functions frequently on the call stack during GC — likely allocating heavily."
1290 )?;
1291 writeln!(writer)?;
1292
1293 writeln!(
1294 writer,
1295 "| Function | GC Correlation | GC Samples | Category |"
1296 )?;
1297 writeln!(
1298 writer,
1299 "|----------|----------------|------------|----------|"
1300 )?;
1301
1302 for hotspot in &gc.allocation_hotspots {
1303 writeln!(
1304 writer,
1305 "| `{}` | {:.0}% | {} | {} |",
1306 Self::escape_markdown(&hotspot.name),
1307 hotspot.gc_correlation,
1308 hotspot.gc_samples,
1309 Self::category_badge(hotspot.category)
1310 )?;
1311 }
1312 writeln!(writer)?;
1313 }
1314
1315 if gc_pct > 5.0 {
1317 writeln!(writer, "### Optimization Strategies")?;
1318 writeln!(writer)?;
1319
1320 if !gc.allocation_hotspots.is_empty() {
1321 let top = &gc.allocation_hotspots[0];
1322 let top_savings = (top.gc_correlation / 100.0 * potential_savings_us as f64) as u64;
1323 writeln!(
1324 writer,
1325 "**Priority target: `{}`** ({:.0}% of GC events, ~{} potential savings)",
1326 top.name,
1327 top.gc_correlation,
1328 format_time_us(top_savings)
1329 )?;
1330 writeln!(writer)?;
1331 }
1332
1333 writeln!(writer, "**Common fixes:**")?;
1334 writeln!(
1335 writer,
1336 "- **Object reuse**: Pool frequently created objects instead of allocating new ones"
1337 )?;
1338 writeln!(
1339 writer,
1340 "- **Avoid closures in loops**: Each closure allocates; move them outside hot paths"
1341 )?;
1342 writeln!(
1343 writer,
1344 "- **Use typed arrays**: `Float64Array` instead of `[]` for numeric data"
1345 )?;
1346 writeln!(
1347 writer,
1348 "- **Batch operations**: Reduce intermediate array/object creation"
1349 )?;
1350 writeln!(
1351 writer,
1352 "- **String concatenation**: Use array join or template literals instead of `+` in loops"
1353 )?;
1354
1355 if gc_pct > 10.0 {
1356 writeln!(writer)?;
1357 writeln!(writer, "**For severe GC pressure (>10%):**")?;
1358 writeln!(
1359 writer,
1360 "- Increase heap with `node --max-old-space-size=4096` (if memory allows)"
1361 )?;
1362 writeln!(
1363 writer,
1364 "- Profile heap with `profile-inspect heap` to find large allocators"
1365 )?;
1366 }
1367 writeln!(writer)?;
1368 }
1369
1370 Ok(())
1371 }
1372
1373 #[expect(clippy::cast_precision_loss)]
1374 fn write_phase_analysis(
1375 writer: &mut dyn Write,
1376 phases: &crate::analysis::PhaseAnalysis,
1377 ) -> Result<(), OutputError> {
1378 writeln!(writer, "## Timing Phase Analysis")?;
1379 writeln!(writer)?;
1380 writeln!(
1381 writer,
1382 "> Separates startup overhead from steady-state performance."
1383 )?;
1384 writeln!(writer)?;
1385
1386 let startup = &phases.startup;
1388 let startup_duration = format_time_us(startup.end_us - startup.start_us);
1389 let startup_pct = if phases.total_duration_us > 0 {
1390 ((startup.end_us - startup.start_us) as f64 / phases.total_duration_us as f64) * 100.0
1391 } else {
1392 0.0
1393 };
1394
1395 writeln!(
1396 writer,
1397 "### Startup Phase ({}, {:.1}% of profile)",
1398 startup_duration, startup_pct
1399 )?;
1400 writeln!(writer)?;
1401
1402 if !startup.top_functions.is_empty() {
1403 writeln!(writer, "| Function | Self Time | % | Category |")?;
1404 writeln!(writer, "|----------|-----------|---|----------|")?;
1405 for func in &startup.top_functions {
1406 writeln!(
1407 writer,
1408 "| `{}` | {} | {:.1}% | {} |",
1409 Self::escape_markdown(&func.name),
1410 format_time_us(func.self_time),
1411 func.percent,
1412 Self::category_badge(func.category)
1413 )?;
1414 }
1415 writeln!(writer)?;
1416 }
1417
1418 let total_startup = startup.category_breakdown.total();
1420 if total_startup > 0 {
1421 let v8_native =
1422 startup.category_breakdown.v8_internal + startup.category_breakdown.native;
1423 let v8_pct = (v8_native as f64 / total_startup as f64) * 100.0;
1424 if v8_pct > 50.0 {
1425 writeln!(
1426 writer,
1427 "**Startup insight:** {:.0}% V8/Native — typical for module loading/compilation",
1428 v8_pct
1429 )?;
1430 writeln!(writer)?;
1431 }
1432 }
1433
1434 let steady = &phases.steady_state;
1436 let steady_duration = format_time_us(steady.end_us - steady.start_us);
1437
1438 writeln!(writer, "### Steady State ({})", steady_duration)?;
1439 writeln!(writer)?;
1440
1441 if !steady.top_functions.is_empty() {
1442 writeln!(writer, "| Function | Self Time | % | Category |")?;
1443 writeln!(writer, "|----------|-----------|---|----------|")?;
1444 for func in &steady.top_functions {
1445 writeln!(
1446 writer,
1447 "| `{}` | {} | {:.1}% | {} |",
1448 Self::escape_markdown(&func.name),
1449 format_time_us(func.self_time),
1450 func.percent,
1451 Self::category_badge(func.category)
1452 )?;
1453 }
1454 writeln!(writer)?;
1455 }
1456
1457 let total_steady = steady.category_breakdown.total();
1459 if total_startup > 0 && total_steady > 0 {
1460 let startup_app_pct =
1461 (startup.category_breakdown.app as f64 / total_startup as f64) * 100.0;
1462 let steady_app_pct =
1463 (steady.category_breakdown.app as f64 / total_steady as f64) * 100.0;
1464
1465 if steady_app_pct > startup_app_pct * 2.0 {
1466 writeln!(
1467 writer,
1468 "**Steady state insight:** App code increases from {:.0}% to {:.0}% — good, your code dominates runtime",
1469 startup_app_pct, steady_app_pct
1470 )?;
1471 writeln!(writer)?;
1472 }
1473 }
1474
1475 writeln!(writer, "---")?;
1476 writeln!(writer)?;
1477
1478 Ok(())
1479 }
1480
1481 fn write_recursive_functions(
1482 writer: &mut dyn Write,
1483 analysis: &CpuAnalysis,
1484 ) -> Result<(), OutputError> {
1485 writeln!(writer, "## Recursive Functions")?;
1486 writeln!(writer)?;
1487 writeln!(
1488 writer,
1489 "> Functions that call themselves. Deep recursion can cause stack overflow and performance issues."
1490 )?;
1491 writeln!(writer)?;
1492
1493 writeln!(
1494 writer,
1495 "| Function | Max Depth | Stacks with Recursion | Location |"
1496 )?;
1497 writeln!(
1498 writer,
1499 "|----------|-----------|----------------------|----------|"
1500 )?;
1501
1502 for func in &analysis.recursive_functions {
1503 let rec_pct = if func.total_samples > 0 {
1506 ((func.recursive_samples as f64 / func.total_samples as f64) * 100.0).min(100.0)
1507 } else {
1508 0.0
1509 };
1510
1511 writeln!(
1512 writer,
1513 "| `{}` | {} | {} ({:.0}% of appearances) | `{}` |",
1514 Self::escape_markdown(&func.name),
1515 func.max_depth,
1516 func.recursive_samples,
1517 rec_pct,
1518 Self::escape_markdown(&func.location)
1519 )?;
1520 }
1521
1522 writeln!(writer)?;
1523 writeln!(writer, "**Optimization tips for recursive functions:**")?;
1524 writeln!(
1525 writer,
1526 "- Consider iterative alternatives using explicit stack"
1527 )?;
1528 writeln!(
1529 writer,
1530 "- Add memoization if computing same values repeatedly"
1531 )?;
1532 writeln!(writer, "- Check for accidental infinite recursion patterns")?;
1533 writeln!(writer)?;
1534 writeln!(writer, "---")?;
1535 writeln!(writer)?;
1536
1537 Ok(())
1538 }
1539
1540 #[expect(clippy::cast_precision_loss)]
1541 fn write_key_takeaways(
1542 writer: &mut dyn Write,
1543 analysis: &CpuAnalysis,
1544 ) -> Result<(), OutputError> {
1545 let breakdown = &analysis.category_breakdown;
1546 let inclusive = &analysis.category_breakdown_inclusive;
1547 let flow = &analysis.category_call_flow;
1548 let total = breakdown.total();
1549
1550 if total == 0 {
1551 return Ok(());
1552 }
1553
1554 let app_pct = (breakdown.app as f64 / total as f64) * 100.0;
1555 let deps_pct = (breakdown.deps as f64 / total as f64) * 100.0;
1556 let native_pct = ((breakdown.v8_internal + breakdown.native) as f64 / total as f64) * 100.0;
1557
1558 let app_triggers: u64 = flow
1560 .callees_for(FrameCategory::App)
1561 .iter()
1562 .map(|(_, t)| *t)
1563 .sum();
1564 let node_triggers: u64 = flow
1565 .callees_for(FrameCategory::NodeInternal)
1566 .iter()
1567 .map(|(_, t)| *t)
1568 .sum();
1569
1570 if app_pct > 50.0 {
1572 writeln!(
1573 writer,
1574 "- App code dominates ({:.0}% self) — focus optimization on your code",
1575 app_pct
1576 )?;
1577 } else if deps_pct > 20.0 {
1578 let inclusive_pct = (inclusive.deps as f64 / total as f64) * 100.0;
1580 writeln!(
1581 writer,
1582 "- Dependencies: {:.0}% self, {:.0}% stack presence — review which packages are expensive",
1583 deps_pct,
1584 inclusive_pct.min(100.0)
1585 )?;
1586 } else if native_pct > 70.0 {
1587 let node_to_native: u64 = flow
1590 .callees_for(FrameCategory::NodeInternal)
1591 .iter()
1592 .filter(|(cat, _)| {
1593 *cat == FrameCategory::Native || *cat == FrameCategory::V8Internal
1594 })
1595 .map(|(_, t)| *t)
1596 .sum();
1597 let app_to_native: u64 = flow
1598 .callees_for(FrameCategory::App)
1599 .iter()
1600 .filter(|(cat, _)| {
1601 *cat == FrameCategory::Native || *cat == FrameCategory::V8Internal
1602 })
1603 .map(|(_, t)| *t)
1604 .sum();
1605
1606 if node_to_native > app_to_native {
1607 writeln!(
1608 writer,
1609 "- V8/Native dominates ({:.0}%) via Node.js internals — likely module loading/compilation",
1610 native_pct
1611 )?;
1612 } else {
1613 writeln!(
1614 writer,
1615 "- V8/Native dominates ({:.0}%) — check for native addon work or heavy compilation",
1616 native_pct
1617 )?;
1618 }
1619 } else if app_triggers > breakdown.app * 5 {
1620 writeln!(
1622 writer,
1623 "- App code ({:.0}% self) triggers {} in other categories — optimize hot call sites",
1624 app_pct,
1625 format_time_us(app_triggers)
1626 )?;
1627 } else if node_triggers > total / 3 {
1628 writeln!(
1630 writer,
1631 "- Node.js internals trigger {} — likely I/O or module loading",
1632 format_time_us(node_triggers)
1633 )?;
1634 }
1635
1636 if let Some(top) = analysis.functions.first() {
1638 let pct = top.self_percent(analysis.total_time);
1639 if pct > 5.0 {
1640 writeln!(
1641 writer,
1642 "- Top hotspot: `{}` at {:.1}% self time",
1643 top.name, pct
1644 )?;
1645 }
1646 }
1647
1648 if let Some(ref gc) = analysis.gc_analysis {
1650 let gc_pct = (gc.total_time as f64 / analysis.total_time as f64) * 100.0;
1651 if gc_pct > 5.0 {
1652 if let Some(top) = gc.allocation_hotspots.first() {
1653 writeln!(
1654 writer,
1655 "- GC overhead at {:.1}% — `{}` may be allocating heavily ({:.0}% correlation)",
1656 gc_pct, top.name, top.gc_correlation
1657 )?;
1658 } else {
1659 writeln!(
1660 writer,
1661 "- GC overhead at {:.1}% — investigate allocation patterns",
1662 gc_pct
1663 )?;
1664 }
1665 }
1666 } else if analysis.gc_time > 0 {
1667 let gc_pct = (analysis.gc_time as f64 / analysis.total_time as f64) * 100.0;
1668 if gc_pct > 5.0 {
1669 writeln!(
1670 writer,
1671 "- GC overhead at {:.1}% — may indicate allocation pressure",
1672 gc_pct
1673 )?;
1674 }
1675 }
1676
1677 Ok(())
1678 }
1679
1680 fn category_badge(category: FrameCategory) -> &'static str {
1681 match category {
1682 FrameCategory::App => "App",
1683 FrameCategory::Deps => "Deps",
1684 FrameCategory::NodeInternal => "Node",
1685 FrameCategory::V8Internal => "V8",
1686 FrameCategory::Native => "Native",
1687 }
1688 }
1689
1690 fn classify_workload(cpu_util_pct: f64, profiles_merged: usize) -> String {
1692 let merged_note = if profiles_merged > 1 {
1693 ", aggregated across processes"
1694 } else {
1695 ""
1696 };
1697
1698 if cpu_util_pct >= 80.0 {
1699 format!(
1700 "CPU-bound (~{:.0}% utilization{})",
1701 cpu_util_pct, merged_note
1702 )
1703 } else if cpu_util_pct <= 50.0 {
1704 format!(
1705 "I/O or wait-bound (~{:.0}% CPU utilization{}). CPU profiling may miss the full picture.",
1706 cpu_util_pct, merged_note
1707 )
1708 } else {
1709 format!(
1710 "Mixed (~{:.0}% CPU utilization{})",
1711 cpu_util_pct, merged_note
1712 )
1713 }
1714 }
1715
1716 #[expect(clippy::cast_precision_loss)]
1718 fn top_category_summary(breakdown: &crate::analysis::CategoryBreakdown, total: u64) -> String {
1719 let v8_native = breakdown.v8_internal + breakdown.native;
1720 let categories = [
1721 ("V8/Native", v8_native),
1722 ("App", breakdown.app),
1723 ("Dependencies", breakdown.deps),
1724 ("Node internals", breakdown.node_internal),
1725 ];
1726
1727 let (top_name, top_time) = categories
1728 .iter()
1729 .max_by_key(|(_, t)| *t)
1730 .unwrap_or(&("Unknown", 0));
1731
1732 let top_pct = (*top_time as f64 / total as f64) * 100.0;
1733
1734 let insight = match *top_name {
1735 "V8/Native" if top_pct > 70.0 => {
1736 " — engine/runtime frames dominate (often startup/GC/JIT)"
1737 }
1738 "V8/Native" => " — engine/runtime frames (not necessarily native code)",
1739 "Dependencies" if top_pct > 40.0 => " — heavy library usage",
1740 "Dependencies" => "",
1741 "App" if top_pct > 50.0 => " — your code dominates, good optimization target",
1742 "App" => " — your code",
1743 "Node internals" => " — module loading/runtime setup",
1744 _ => "",
1745 };
1746
1747 format!(
1748 "{} at {:.0}% self (exclusive){}",
1749 top_name, top_pct, insight
1750 )
1751 }
1752
1753 fn escape_markdown(s: &str) -> String {
1754 s.replace('|', "\\|").replace('`', "\\`")
1755 }
1756
1757 fn format_percent(pct: f64) -> String {
1762 if pct >= 1.0 {
1763 format!("{:.1}%", pct)
1764 } else if pct >= 0.1 {
1765 format!("{:.2}%", pct)
1766 } else if pct > 0.0 {
1767 "<0.1%".to_string()
1768 } else {
1769 "0%".to_string()
1770 }
1771 }
1772
1773 fn format_location(location: &str) -> String {
1781 let path = location.strip_prefix("file://").unwrap_or(location);
1783
1784 if let Some(nm_idx) = path.rfind("node_modules/") {
1786 let after_nm = &path[nm_idx + 13..]; let (pkg_name, rest) = if after_nm.starts_with('@') {
1790 let parts: Vec<&str> = after_nm.splitn(3, '/').collect();
1792 if parts.len() >= 3 {
1793 (format!("{}/{}", parts[0], parts[1]), parts[2].to_string())
1794 } else {
1795 (after_nm.to_string(), String::new())
1796 }
1797 } else {
1798 let parts: Vec<&str> = after_nm.splitn(2, '/').collect();
1800 if parts.len() >= 2 {
1801 (parts[0].to_string(), parts[1].to_string())
1802 } else {
1803 (after_nm.to_string(), String::new())
1804 }
1805 };
1806
1807 let file_part = Self::extract_file_and_line(&rest);
1809 if file_part.is_empty() {
1810 return pkg_name;
1811 }
1812 return format!("{pkg_name} » {file_part}");
1813 }
1814
1815 if path.starts_with("node:") {
1817 return path.to_string();
1818 }
1819
1820 for marker in &[
1823 "/src/",
1824 "/lib/",
1825 "/dist/",
1826 "/build/",
1827 "/apps/",
1828 "/packages/",
1829 ] {
1830 if let Some(idx) = path.find(marker) {
1831 return Self::extract_file_and_line(&path[idx + 1..]);
1832 }
1833 }
1834
1835 Self::extract_file_and_line(path)
1837 }
1838
1839 fn extract_file_and_line(path: &str) -> String {
1842 let (path_part, line_col) = Self::split_line_col(path);
1844
1845 let filename = path_part.rsplit('/').next().unwrap_or(path_part);
1847
1848 let display_path = if path_part.contains('/') {
1850 let parts: Vec<&str> = path_part.rsplitn(3, '/').collect();
1851 if parts.len() >= 2 && parts[1].len() < 20 {
1852 format!("{}/{}", parts[1], parts[0])
1853 } else {
1854 filename.to_string()
1855 }
1856 } else {
1857 filename.to_string()
1858 };
1859
1860 if let Some(line) = line_col {
1861 format!("{display_path}:{line}")
1862 } else {
1863 display_path
1864 }
1865 }
1866
1867 fn split_line_col(path: &str) -> (&str, Option<u32>) {
1870 let mut parts = path.rsplitn(3, ':');
1872 let last = parts.next();
1873 let second = parts.next();
1874 let rest = parts.next();
1875
1876 match (rest, second, last) {
1877 (Some(path), Some(line), Some(_col)) => {
1878 (path, line.parse().ok())
1880 }
1881 (None, Some(path), Some(line_or_col)) => {
1882 if line_or_col.chars().all(|c| c.is_ascii_digit()) {
1884 (path, line_or_col.parse().ok())
1885 } else {
1886 (path.rsplit_once(':').map_or(path, |(p, _)| p), None)
1888 }
1889 }
1890 _ => (path, None),
1891 }
1892 }
1893
1894 fn write_hot_path_visualization(
1895 writer: &mut dyn Write,
1896 profile: &ProfileIR,
1897 path: &HotPath,
1898 ) -> Result<(), OutputError> {
1899 let frames: Vec<_> = path
1901 .frames
1902 .iter()
1903 .filter_map(|&fid| profile.get_frame(fid))
1904 .collect();
1905
1906 let start_idx = frames
1908 .iter()
1909 .position(|f| !f.category.is_internal())
1910 .unwrap_or(0);
1911
1912 let display_frames: Vec<_> = frames.iter().skip(start_idx).take(8).collect();
1914
1915 for (i, frame) in display_frames.iter().enumerate() {
1916 let indent = " ".repeat(i);
1917 let arrow = if i > 0 { "└─ " } else { "" };
1918 let hotspot = if i == display_frames.len() - 1 {
1919 " ← HOTSPOT"
1920 } else {
1921 ""
1922 };
1923 let location = Self::format_location(&frame.location());
1924 writeln!(
1925 writer,
1926 "{indent}{arrow}{} ({location}){hotspot}",
1927 frame.display_name()
1928 )?;
1929 }
1930
1931 if frames.len() > display_frames.len() + start_idx {
1932 writeln!(
1933 writer,
1934 " ... ({} frames omitted)",
1935 frames.len() - display_frames.len() - start_idx
1936 )?;
1937 }
1938
1939 Ok(())
1940 }
1941
1942 #[expect(clippy::cast_precision_loss)]
1943 fn write_path_explanation(
1944 writer: &mut dyn Write,
1945 profile: &ProfileIR,
1946 path: &HotPath,
1947 analysis: &CpuAnalysis,
1948 ) -> Result<(), OutputError> {
1949 let mut reasons = Vec::new();
1950
1951 if let Some(&leaf_id) = path.frames.last() {
1953 if let Some(func) = analysis.functions.iter().find(|f| f.frame_id == leaf_id) {
1954 let self_pct = func.self_percent(analysis.total_time);
1955 if self_pct > 1.0 {
1956 reasons.push(format!(
1957 "Leaf function `{}` has {:.1}% self time",
1958 func.name, self_pct
1959 ));
1960 }
1961 }
1962 }
1963
1964 let total_samples = analysis.total_samples;
1966 if total_samples > 0 {
1967 let path_sample_pct = (path.sample_count as f64 / total_samples as f64) * 100.0;
1968 if path_sample_pct > 1.0 {
1969 reasons.push(format!("Appears in {:.1}% of samples", path_sample_pct));
1970 }
1971 }
1972
1973 let fs_keywords = [
1975 "fs:",
1976 "readFile",
1977 "writeFile",
1978 "stat",
1979 "readdir",
1980 "createReadStream",
1981 "createWriteStream",
1982 "readdirSync",
1983 "statSync",
1984 "readFileSync",
1985 "existsSync",
1986 "accessSync",
1987 ];
1988 let has_fs = path.frames.iter().any(|&fid| {
1989 profile.get_frame(fid).is_some_and(|f| {
1990 let name = f.display_name();
1991 let location = f.location();
1992 fs_keywords
1993 .iter()
1994 .any(|kw| name.contains(kw) || location.contains(kw))
1995 })
1996 });
1997
1998 let has_net = path.frames.iter().any(|&fid| {
2000 profile.get_frame(fid).is_some_and(|f| {
2001 let location = f.location();
2002 location.contains("node:net")
2004 || location.contains("node:dns")
2005 || location.contains("node:http")
2006 || location.contains("node:https")
2007 || location.contains("node:tls")
2008 || location.contains("node:dgram")
2009 })
2010 });
2011
2012 if has_fs && has_net {
2014 reasons.push("File system and network activity on stack".to_string());
2015 } else if has_fs {
2016 reasons.push("File system activity on stack (stat/readdir/path ops)".to_string());
2017 } else if has_net {
2018 reasons.push("Network activity on stack".to_string());
2019 }
2020
2021 let mut has_native_addon = false;
2024 let mut has_native_runtime = false;
2025
2026 for &fid in &path.frames {
2027 if let Some(f) = profile.get_frame(fid) {
2028 if f.kind == FrameKind::Native {
2029 let name = f.display_name();
2030 let location = f.location();
2031
2032 if name.contains("napi_")
2034 || location.ends_with(".node")
2035 || location.contains("/binding.")
2036 {
2037 has_native_addon = true;
2038 } else if f.category == FrameCategory::Native && !name.starts_with('(') {
2039 has_native_runtime = true;
2041 }
2042 }
2043 }
2044 }
2045
2046 if has_native_addon {
2047 reasons.push("Calls native addon (C++/Rust via N-API)".to_string());
2048 } else if has_native_runtime {
2049 reasons.push("Includes Node/V8 native operations".to_string());
2050 }
2051
2052 if reasons.is_empty() {
2053 reasons.push("This call sequence accumulates time across samples".to_string());
2054 }
2055
2056 writeln!(writer, "**Why this path is hot:**")?;
2057 for reason in reasons {
2058 writeln!(writer, "- {reason}")?;
2059 }
2060
2061 Ok(())
2062 }
2063
2064 #[expect(clippy::cast_precision_loss)]
2065 fn write_hot_function_detail(
2066 writer: &mut dyn Write,
2067 detail: &HotFunctionDetail,
2068 analysis: &CpuAnalysis,
2069 ) -> Result<(), OutputError> {
2070 let profile_total_time = analysis.total_time;
2071 let self_time = format_time_us(detail.self_time);
2072 let self_pct = if profile_total_time > 0 {
2073 (detail.self_time as f64 / profile_total_time as f64) * 100.0
2074 } else {
2075 0.0
2076 };
2077
2078 writeln!(
2079 writer,
2080 "### `{}` ({} self, {:.1}%)",
2081 detail.name, self_time, self_pct
2082 )?;
2083 writeln!(writer, "Location: `{}`", detail.location)?;
2084 writeln!(writer)?;
2085
2086 if !detail.callers.is_empty() {
2088 writeln!(writer, "**Top callers:**")?;
2089 writeln!(writer, "| Caller | Time | Calls |")?;
2090 writeln!(writer, "|--------|------|-------|")?;
2091
2092 for caller in detail.callers.iter().take(5) {
2093 writeln!(
2094 writer,
2095 "| `{}` | {} | {} |",
2096 Self::escape_markdown(&caller.name),
2097 format_time_us(caller.time),
2098 caller.call_count
2099 )?;
2100 }
2101 writeln!(writer)?;
2102 }
2103
2104 if !detail.callees.is_empty() {
2106 writeln!(writer, "**Top callees inside:**")?;
2107 writeln!(writer, "| Callee | Self | Total | Calls |")?;
2108 writeln!(writer, "|--------|------|-------|-------|")?;
2109
2110 for callee in detail.callees.iter().take(5) {
2111 writeln!(
2112 writer,
2113 "| `{}` | {} | {} | {} |",
2114 Self::escape_markdown(&callee.name),
2115 format_time_us(callee.self_time),
2116 format_time_us(callee.total_time),
2117 callee.call_count
2118 )?;
2119 }
2120 writeln!(writer)?;
2121 }
2122
2123 if detail.callers.len() == 1 && detail.self_time > profile_total_time / 100 {
2125 writeln!(
2126 writer,
2127 "**Call pattern signal:** Single caller — if result is deterministic, consider memoization."
2128 )?;
2129 writeln!(writer)?;
2130 } else if detail.callers.len() > 3 {
2131 writeln!(
2132 writer,
2133 "**Call pattern signal:** Called from {} different sites — hot utility function.",
2134 detail.callers.len()
2135 )?;
2136 writeln!(writer)?;
2137 }
2138
2139 Ok(())
2140 }
2141
2142 #[expect(clippy::cast_precision_loss)]
2144 fn write_recommendations(
2145 writer: &mut dyn Write,
2146 profile: &ProfileIR,
2147 analysis: &CpuAnalysis,
2148 ) -> Result<(), OutputError> {
2149 let report = RecommendationEngine::analyze(profile, analysis);
2150
2151 writeln!(writer, "## Action Items")?;
2152 writeln!(writer)?;
2153
2154 if report.recommendations.is_empty() {
2155 let cpu_util = analysis.metadata.cpu_utilization().unwrap_or(1.0) * 100.0;
2157 let is_cpu_bound = cpu_util >= 80.0;
2158
2159 writeln!(writer, "**No dominant CPU hotspot detected in App code.**")?;
2160 writeln!(writer)?;
2161
2162 if is_cpu_bound {
2163 writeln!(
2165 writer,
2166 "CPU usage is high ({:.0}%) but distributed across dependencies and runtime. To improve performance:",
2167 cpu_util
2168 )?;
2169 writeln!(
2170 writer,
2171 "- **Reduce filesystem CPU cost:** cache config/path resolution, avoid repeated `stat`/`readdir`"
2172 )?;
2173 writeln!(
2174 writer,
2175 "- **Minimize parser/transform passes:** batch operations, reuse AST where possible"
2176 )?;
2177 writeln!(
2178 writer,
2179 "- **Review dependency usage:** check if heavy deps can be replaced or lazily loaded"
2180 )?;
2181 writeln!(
2182 writer,
2183 "- **Profile under sustained load:** startup overhead may dominate short runs"
2184 )?;
2185 } else {
2186 writeln!(
2188 writer,
2189 "CPU utilization is low ({:.0}%), indicating the process spent time waiting. Consider:",
2190 cpu_util
2191 )?;
2192 writeln!(
2193 writer,
2194 "- **I/O latency:** check file system, network, or database wait times"
2195 )?;
2196 writeln!(
2197 writer,
2198 "- **Async bottlenecks:** look for sequential awaits that could be parallelized"
2199 )?;
2200 writeln!(
2201 writer,
2202 "- **Tool orchestration:** time spent in `npx`, package managers, or build tools"
2203 )?;
2204 writeln!(
2205 writer,
2206 "- **Use tracing:** CPU profiles can't measure wait time; consider `--trace-event-categories`"
2207 )?;
2208 }
2209 writeln!(writer)?;
2210 return Ok(());
2211 }
2212
2213 if !report.insights.is_empty() {
2215 writeln!(writer, "### Key Insights")?;
2216 writeln!(writer)?;
2217 for insight in &report.insights {
2218 writeln!(writer, "- {insight}")?;
2219 }
2220 writeln!(writer)?;
2221 }
2222
2223 if !report.quick_wins.is_empty() {
2225 writeln!(writer, "### Quick Wins")?;
2226 writeln!(writer)?;
2227 writeln!(
2228 writer,
2229 "> High-impact improvements that are easy to implement"
2230 )?;
2231 writeln!(writer)?;
2232 for &idx in &report.quick_wins {
2233 if let Some(rec) = report.recommendations.get(idx) {
2234 Self::write_recommendation_summary(writer, rec, analysis.total_time)?;
2235 }
2236 }
2237 writeln!(writer)?;
2238 }
2239
2240 let quick_win_set: std::collections::HashSet<_> = report.quick_wins.iter().collect();
2242 let critical: Vec<_> = report
2243 .recommendations
2244 .iter()
2245 .enumerate()
2246 .filter(|(i, r)| r.priority == Priority::Critical && !quick_win_set.contains(i))
2247 .map(|(_, r)| r)
2248 .collect();
2249 let high: Vec<_> = report
2250 .recommendations
2251 .iter()
2252 .enumerate()
2253 .filter(|(i, r)| r.priority == Priority::High && !quick_win_set.contains(i))
2254 .map(|(_, r)| r)
2255 .collect();
2256 let medium: Vec<_> = report
2257 .recommendations
2258 .iter()
2259 .enumerate()
2260 .filter(|(i, r)| r.priority == Priority::Medium && !quick_win_set.contains(i))
2261 .map(|(_, r)| r)
2262 .collect();
2263
2264 if !critical.is_empty() {
2265 writeln!(writer, "### Critical Priority")?;
2266 writeln!(writer)?;
2267 for rec in critical {
2268 Self::write_recommendation_detail(writer, rec, analysis.total_time)?;
2269 }
2270 }
2271
2272 if !high.is_empty() {
2273 writeln!(writer, "### High Priority")?;
2274 writeln!(writer)?;
2275 for rec in high {
2276 Self::write_recommendation_detail(writer, rec, analysis.total_time)?;
2277 }
2278 }
2279
2280 if !medium.is_empty() {
2281 writeln!(writer, "### Medium Priority")?;
2282 writeln!(writer)?;
2283 for rec in &medium[..medium.len().min(5)] {
2284 Self::write_recommendation_summary(writer, rec, analysis.total_time)?;
2285 }
2286 if medium.len() > 5 {
2287 writeln!(
2288 writer,
2289 "*...and {} more medium-priority items*",
2290 medium.len() - 5
2291 )?;
2292 }
2293 writeln!(writer)?;
2294 }
2295
2296 if !report.investigations.is_empty() {
2298 writeln!(writer, "### Needs Investigation")?;
2299 writeln!(writer)?;
2300 for item in &report.investigations {
2301 writeln!(writer, "- {item}")?;
2302 }
2303 writeln!(writer)?;
2304 }
2305
2306 Ok(())
2307 }
2308
2309 #[expect(clippy::cast_precision_loss)]
2311 fn write_recommendation_summary(
2312 writer: &mut dyn Write,
2313 rec: &Recommendation,
2314 total_time: u64,
2315 ) -> Result<(), OutputError> {
2316 let savings_str = format_time_us(rec.estimated_savings_us);
2317 let savings_pct = rec.savings_percent(total_time);
2318
2319 writeln!(
2320 writer,
2321 "- **{}** — *{} potential savings ({:.1}% faster)*",
2322 rec.title, savings_str, savings_pct
2323 )?;
2324 writeln!(writer, " - {}", rec.root_cause)?;
2325 writeln!(writer, " - Effort: {}", rec.effort)?;
2326 writeln!(writer)?;
2327
2328 Ok(())
2329 }
2330
2331 #[expect(clippy::cast_precision_loss)]
2333 fn write_recommendation_detail(
2334 writer: &mut dyn Write,
2335 rec: &Recommendation,
2336 total_time: u64,
2337 ) -> Result<(), OutputError> {
2338 let savings_str = format_time_us(rec.estimated_savings_us);
2339 let savings_pct = rec.savings_percent(total_time);
2340 let current_str = format_time_us(rec.current_time_us);
2341
2342 writeln!(
2344 writer,
2345 "#### {} `{}`",
2346 Self::priority_icon(rec.priority),
2347 rec.title
2348 )?;
2349 writeln!(writer)?;
2350
2351 writeln!(writer, "| Metric | Value |")?;
2353 writeln!(writer, "|--------|-------|")?;
2354 writeln!(writer, "| Current time | {} |", current_str)?;
2355 writeln!(
2356 writer,
2357 "| Potential savings | {} ({:.1}% faster) |",
2358 savings_str, savings_pct
2359 )?;
2360 writeln!(writer, "| Effort | {} |", rec.effort)?;
2361 writeln!(writer, "| Type | {} |", rec.issue_type)?;
2362 writeln!(writer)?;
2363
2364 writeln!(
2366 writer,
2367 "**Location:** `{}`",
2368 Self::format_location(&rec.location)
2369 )?;
2370 writeln!(writer)?;
2371
2372 writeln!(writer, "**Why:** {}", rec.root_cause)?;
2374 writeln!(writer)?;
2375
2376 writeln!(writer, "**Actions:**")?;
2378 for action in &rec.actions {
2379 writeln!(writer, "- {action}")?;
2380 }
2381 writeln!(writer)?;
2382
2383 if !rec.code_patterns.is_empty() {
2385 writeln!(writer, "**Look for:**")?;
2386 for pattern in &rec.code_patterns {
2387 writeln!(writer, "- `{pattern}`")?;
2388 }
2389 writeln!(writer)?;
2390 }
2391
2392 if !rec.evidence.is_empty() {
2394 writeln!(writer, "<details>")?;
2395 writeln!(writer, "<summary>Evidence from profile</summary>")?;
2396 writeln!(writer)?;
2397 for evidence in &rec.evidence {
2398 writeln!(writer, "- {evidence}")?;
2399 }
2400 writeln!(writer)?;
2401 writeln!(writer, "</details>")?;
2402 writeln!(writer)?;
2403 }
2404
2405 Ok(())
2406 }
2407
2408 fn priority_icon(priority: Priority) -> &'static str {
2409 match priority {
2410 Priority::Critical => "🔴",
2411 Priority::High => "🟠",
2412 Priority::Medium => "🟡",
2413 Priority::Low => "🟢",
2414 }
2415 }
2416}