use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug, Clone, PartialEq)]
pub struct SourceEntry {
pub path: String,
pub self_ms: f64,
pub sourced_ms: f64,
}
#[derive(Debug, Clone)]
pub struct PluginStats {
pub name: String,
pub total_self_ms: f64,
pub total_sourced_ms: f64,
pub file_count: usize,
pub top_files: Vec<FileStat>,
pub is_managed: bool,
pub lazy: bool,
pub init_ms: f64,
pub load_ms: f64,
pub trig_ms: f64,
}
#[derive(Debug, Clone)]
pub struct FileStat {
pub relative_path: String,
pub self_ms: f64,
pub sourced_ms: f64,
}
#[derive(Debug, Clone, Default)]
pub struct PhaseTime {
pub name: String,
pub duration_ms: f64,
}
#[derive(Debug, Clone)]
pub struct ProfileReport {
pub runs: usize,
pub total_startup_ms: f64,
pub plugins: Vec<PluginStats>,
pub nvim_version: Option<String>,
pub phase_timeline: Option<Vec<PhaseTime>>,
pub no_merge: bool,
pub no_instrument: bool,
}
pub fn parse_startuptime(content: &str) -> Vec<SourceEntry> {
let mut entries = Vec::new();
for line in content.lines() {
let Some(entry) = parse_startuptime_line(line) else {
continue;
};
entries.push(entry);
}
entries
}
fn parse_startuptime_line(line: &str) -> Option<SourceEntry> {
let (head, tail) = line.split_once(':')?;
let nums: Vec<f64> = head
.split_whitespace()
.filter_map(|s| s.parse::<f64>().ok())
.collect();
if nums.len() != 3 {
return None;
}
let tail = tail.trim_start();
let path = extract_source_path(tail)?;
Some(SourceEntry {
path: normalize_path(&path),
self_ms: nums[2],
sourced_ms: nums[1],
})
}
fn extract_source_path(tail: &str) -> Option<String> {
if let Some(rest) = tail.strip_prefix("sourcing ") {
return Some(rest.trim().to_string());
}
None
}
fn normalize_path(p: &str) -> String {
p.trim().replace('\\', "/")
}
#[derive(Debug, Clone)]
pub struct PluginPathEntry {
pub name: String,
pub root: String,
pub lazy: bool,
}
pub const GROUP_MERGED: &str = "[merged]";
pub const GROUP_RUNTIME: &str = "[runtime]";
pub const GROUP_LOADER: &str = "[rvpm loader]";
pub const GROUP_USER: &str = "[user config]";
pub fn aggregate_single_run(
entries: &[SourceEntry],
plugins: &[PluginPathEntry],
merged_dir: &str,
loader_path: &str,
user_config_roots: &[String],
) -> HashMap<String, PluginStats> {
let normalized_plugins: Vec<(String, PluginPathEntry)> = plugins
.iter()
.map(|p| (normalize_path(&p.root), p.clone()))
.collect();
let mut sorted_plugins: Vec<&(String, PluginPathEntry)> = normalized_plugins.iter().collect();
sorted_plugins.sort_by_key(|(root, _)| std::cmp::Reverse(root.len()));
let merged = normalize_path(merged_dir);
let loader = normalize_path(loader_path);
let user_roots: Vec<String> = user_config_roots
.iter()
.map(|s| normalize_path(s))
.filter(|s| !s.is_empty())
.collect();
let mut stats: HashMap<String, PluginStats> = HashMap::new();
for entry in entries {
let (owner_name, is_managed, lazy, rel) =
resolve_owner(&entry.path, &sorted_plugins, &merged, &loader, &user_roots);
let s = stats
.entry(owner_name.clone())
.or_insert_with(|| PluginStats {
name: owner_name,
total_self_ms: 0.0,
total_sourced_ms: 0.0,
file_count: 0,
top_files: Vec::new(),
is_managed,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
lazy,
});
s.total_self_ms += entry.self_ms;
s.total_sourced_ms += entry.sourced_ms;
s.file_count += 1;
s.top_files.push(FileStat {
relative_path: rel,
self_ms: entry.self_ms,
sourced_ms: entry.sourced_ms,
});
}
for s in stats.values_mut() {
s.top_files.sort_by(|a, b| {
b.self_ms
.partial_cmp(&a.self_ms)
.unwrap_or(std::cmp::Ordering::Equal)
});
}
stats
}
fn resolve_owner(
path: &str,
sorted_plugins: &[&(String, PluginPathEntry)],
merged: &str,
loader: &str,
user_roots: &[String],
) -> (String, bool, bool, String) {
let path_lc = path.to_ascii_lowercase();
for (root, p) in sorted_plugins {
let root_lc = root.to_ascii_lowercase();
if path_starts_with(&path_lc, &root_lc) {
let rel = strip_prefix_case_insensitive(path, root);
return (p.name.clone(), true, p.lazy, rel);
}
}
let merged_lc = merged.to_ascii_lowercase();
if path_starts_with(&path_lc, &merged_lc) {
let rel = strip_prefix_case_insensitive(path, merged);
return (GROUP_MERGED.to_string(), false, false, rel);
}
if path_lc == loader.to_ascii_lowercase() {
return (
GROUP_LOADER.to_string(),
false,
false,
"loader.lua".to_string(),
);
}
let mut sorted_user_roots: Vec<&String> = user_roots.iter().collect();
sorted_user_roots.sort_by_key(|r| std::cmp::Reverse(r.len()));
for user_root in sorted_user_roots {
let user_lc = user_root.to_ascii_lowercase();
if path_starts_with(&path_lc, &user_lc) {
let rel = strip_prefix_case_insensitive(path, user_root);
return (GROUP_USER.to_string(), false, false, rel);
}
}
let basename = path.rsplit('/').next().unwrap_or(path).to_string();
(GROUP_RUNTIME.to_string(), false, false, basename)
}
fn path_starts_with(path: &str, prefix: &str) -> bool {
if !path.starts_with(prefix) {
return false;
}
if prefix.ends_with('/') {
return true;
}
let rest = &path[prefix.len()..];
rest.is_empty() || rest.starts_with('/')
}
fn strip_prefix_case_insensitive(path: &str, prefix: &str) -> String {
if path.len() < prefix.len() {
return path.to_string();
}
let rest = &path[prefix.len()..];
rest.trim_start_matches('/').to_string()
}
pub fn average_stats(
runs_stats: Vec<HashMap<String, PluginStats>>,
runs: usize,
) -> Vec<PluginStats> {
if runs == 0 {
return Vec::new();
}
let mut merged: HashMap<String, PluginStats> = HashMap::new();
let mut files_acc: HashMap<String, HashMap<String, (f64, f64)>> = HashMap::new();
for single in runs_stats {
for (name, s) in single {
let entry = merged.entry(name.clone()).or_insert_with(|| PluginStats {
name: s.name.clone(),
total_self_ms: 0.0,
total_sourced_ms: 0.0,
file_count: 0,
top_files: Vec::new(),
is_managed: s.is_managed,
lazy: s.lazy,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
});
entry.total_self_ms += s.total_self_ms;
entry.total_sourced_ms += s.total_sourced_ms;
entry.init_ms += s.init_ms;
entry.load_ms += s.load_ms;
entry.trig_ms += s.trig_ms;
entry.file_count = entry.file_count.max(s.file_count);
let file_map = files_acc.entry(name).or_default();
for f in &s.top_files {
let e = file_map
.entry(f.relative_path.clone())
.or_insert((0.0, 0.0));
e.0 += f.self_ms;
e.1 += f.sourced_ms;
}
}
}
let mut out: Vec<PluginStats> = merged
.into_iter()
.map(|(name, mut s)| {
s.total_self_ms /= runs as f64;
s.total_sourced_ms /= runs as f64;
s.init_ms /= runs as f64;
s.load_ms /= runs as f64;
s.trig_ms /= runs as f64;
if let Some(file_map) = files_acc.remove(&name) {
let mut files: Vec<FileStat> = file_map
.into_iter()
.map(|(path, (self_sum, sourced_sum))| FileStat {
relative_path: path,
self_ms: self_sum / runs as f64,
sourced_ms: sourced_sum / runs as f64,
})
.collect();
files.sort_by(|a, b| {
b.self_ms
.partial_cmp(&a.self_ms)
.unwrap_or(std::cmp::Ordering::Equal)
});
s.top_files = files;
}
s
})
.collect();
out.sort_by(|a, b| {
b.total_self_ms
.partial_cmp(&a.total_self_ms)
.unwrap_or(std::cmp::Ordering::Equal)
});
out
}
#[derive(Debug, Clone, PartialEq)]
pub struct MarkerEvent {
pub name: String,
pub clock_ms: f64,
}
pub fn parse_marker_events(content: &str, marker_dir_normalized: &str) -> Vec<MarkerEvent> {
let prefix = normalize_path(marker_dir_normalized)
.trim_end_matches('/')
.to_string();
let prefix_lc = prefix.to_ascii_lowercase();
let mut events = Vec::new();
for line in content.lines() {
let Some((head, tail)) = line.split_once(':') else {
continue;
};
let nums: Vec<f64> = head
.split_whitespace()
.filter_map(|s| s.parse::<f64>().ok())
.collect();
if nums.len() != 3 {
continue;
}
let Some(rest) = tail.trim_start().strip_prefix("sourcing ") else {
continue;
};
let path = normalize_path(rest.trim());
let path_lc = path.to_ascii_lowercase();
if !path_starts_with(&path_lc, &prefix_lc) {
continue;
}
let rest_after = &path[prefix.len()..];
let rest_after = rest_after.trim_start_matches('/');
let name = rest_after.trim_end_matches(".vim").to_string();
if name.is_empty() {
continue;
}
events.push(MarkerEvent {
name,
clock_ms: nums[0],
});
}
events
}
pub fn compute_phase_times(events: &[MarkerEvent]) -> Vec<PhaseTime> {
use std::collections::HashMap;
let mut begins: HashMap<&str, f64> = HashMap::new();
let mut ends: HashMap<&str, f64> = HashMap::new();
for e in events {
if let Some(phase) = e.name.strip_suffix("-begin") {
begins.insert(phase, e.clock_ms);
} else if let Some(phase) = e.name.strip_suffix("-end") {
ends.insert(phase, e.clock_ms);
}
}
let order = [
"phase-3", "phase-4", "phase-5", "phase-6", "phase-7", "phase-8", "phase-9",
];
let mut out = Vec::new();
for phase in order {
if let (Some(b), Some(e)) = (begins.get(phase), ends.get(phase)) {
out.push(PhaseTime {
name: phase.to_string(),
duration_ms: (e - b).max(0.0),
});
}
}
out
}
pub fn compute_per_plugin_phase_times(
events: &[MarkerEvent],
) -> std::collections::HashMap<String, (f64, f64)> {
use std::collections::HashMap;
let mut init_begin: HashMap<String, f64> = HashMap::new();
let mut init_end: HashMap<String, f64> = HashMap::new();
let mut trig_begin: HashMap<String, f64> = HashMap::new();
let mut trig_end: HashMap<String, f64> = HashMap::new();
for e in events {
if let Some(rest) = e.name.strip_prefix("init-") {
if let Some(name) = rest.strip_suffix("-begin") {
init_begin.insert(name.to_string(), e.clock_ms);
} else if let Some(name) = rest.strip_suffix("-end") {
init_end.insert(name.to_string(), e.clock_ms);
}
} else if let Some(rest) = e.name.strip_prefix("trig-") {
if let Some(name) = rest.strip_suffix("-begin") {
trig_begin.insert(name.to_string(), e.clock_ms);
} else if let Some(name) = rest.strip_suffix("-end") {
trig_end.insert(name.to_string(), e.clock_ms);
}
}
}
let mut out: HashMap<String, (f64, f64)> = HashMap::new();
let mut names: std::collections::HashSet<String> = std::collections::HashSet::new();
names.extend(init_begin.keys().cloned());
names.extend(trig_begin.keys().cloned());
for name in names {
let i = match (init_begin.get(&name), init_end.get(&name)) {
(Some(b), Some(e)) => (e - b).max(0.0),
_ => 0.0,
};
let t = match (trig_begin.get(&name), trig_end.get(&name)) {
(Some(b), Some(e)) => (e - b).max(0.0),
_ => 0.0,
};
out.insert(name, (i, t));
}
out
}
pub fn average_phase_timelines(timelines: Vec<Vec<PhaseTime>>) -> Vec<PhaseTime> {
use std::collections::HashMap;
if timelines.is_empty() {
return Vec::new();
}
let runs = timelines.len() as f64;
let mut acc: HashMap<String, f64> = HashMap::new();
let mut order: Vec<String> = Vec::new();
for t in &timelines {
for p in t {
if !acc.contains_key(&p.name) {
order.push(p.name.clone());
}
*acc.entry(p.name.clone()).or_insert(0.0) += p.duration_ms;
}
}
order
.into_iter()
.map(|name| {
let total = acc.get(&name).copied().unwrap_or(0.0);
PhaseTime {
name,
duration_ms: total / runs,
}
})
.collect()
}
pub fn extract_total_ms(content: &str) -> f64 {
let mut last: f64 = 0.0;
for line in content.lines() {
let Some((head, _)) = line.split_once(':') else {
continue;
};
let nums: Vec<f64> = head
.split_whitespace()
.filter_map(|s| s.parse::<f64>().ok())
.collect();
if !nums.is_empty() {
last = last.max(nums[0]);
}
}
last
}
pub async fn run_single_startuptime(extra_args: &[&str]) -> anyhow::Result<(String, f64)> {
let tmp = tempfile::Builder::new()
.prefix("rvpm-profile-")
.suffix(".log")
.tempfile()
.map_err(|e| anyhow::anyhow!("failed to create startuptime tempfile: {}", e))?;
let tmp_path = tmp.path().to_path_buf();
let mut cmd = tokio::process::Command::new("nvim");
cmd.kill_on_drop(true);
cmd.arg("--headless")
.arg("--startuptime")
.arg(&tmp_path)
.args(extra_args)
.arg("+qa");
let timeout = std::time::Duration::from_secs(30);
let out_result = tokio::time::timeout(timeout, cmd.output()).await;
match out_result {
Ok(Ok(out)) => {
if !out.status.success() {
let stderr = String::from_utf8_lossy(&out.stderr);
anyhow::bail!(
"nvim exited with {} (stderr: {})",
out.status,
stderr.trim()
);
}
let content = std::fs::read_to_string(&tmp_path).unwrap_or_default();
if content.is_empty() {
anyhow::bail!("nvim produced empty --startuptime output");
}
let total = extract_total_ms(&content);
drop(tmp);
Ok((content, total))
}
Ok(Err(e)) => Err(anyhow::anyhow!("failed to spawn nvim: {}", e)),
Err(_) => {
anyhow::bail!("nvim --startuptime timed out after {:?}", timeout)
}
}
}
pub async fn probe_nvim_version() -> Option<String> {
let timeout = std::time::Duration::from_secs(2);
let mut builder = tokio::process::Command::new("nvim");
builder.kill_on_drop(true);
let cmd = builder.arg("--version").output();
let out = tokio::time::timeout(timeout, cmd).await.ok()?.ok()?;
let stdout = String::from_utf8_lossy(&out.stdout);
stdout.lines().next().map(|s| s.trim().to_string())
}
pub struct ProfileRunConfig {
pub runs: usize,
pub plugins: Vec<PluginPathEntry>,
pub merged_dir: PathBuf,
pub loader_path: PathBuf,
pub user_config_roots: Vec<PathBuf>,
pub marker_dir: Option<PathBuf>,
pub no_merge: bool,
pub no_instrument: bool,
}
pub async fn run_profile(cfg: ProfileRunConfig) -> anyhow::Result<ProfileReport> {
if cfg.runs == 0 {
anyhow::bail!("runs must be >= 1");
}
let merged_s = cfg.merged_dir.to_string_lossy().to_string();
let loader_s = cfg.loader_path.to_string_lossy().to_string();
let user_s: Vec<String> = cfg
.user_config_roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect();
let marker_s = cfg
.marker_dir
.as_ref()
.map(|p| normalize_path(&p.to_string_lossy()));
let mut totals = Vec::with_capacity(cfg.runs);
let mut runs_stats = Vec::with_capacity(cfg.runs);
let mut phase_timelines: Vec<Vec<PhaseTime>> = Vec::new();
for i in 0..cfg.runs {
let (content, total) = run_single_startuptime(&[])
.await
.map_err(|e| anyhow::anyhow!("profile run {}/{} failed: {}", i + 1, cfg.runs, e))?;
totals.push(total);
let entries = parse_startuptime(&content);
let mut stats = aggregate_single_run(
&entries,
&cfg.plugins,
&merged_s,
&loader_s,
user_s.as_slice(),
);
for s in stats.values_mut() {
if s.is_managed && !s.lazy {
s.load_ms = s.total_self_ms;
}
}
if let Some(mdir) = &marker_s {
let markers = parse_marker_events(&content, mdir);
let phases = compute_phase_times(&markers);
let per_plugin = compute_per_plugin_phase_times(&markers);
for plugin in &cfg.plugins {
let safe = crate::loader::sanitize_name(&plugin.name);
if per_plugin.contains_key(&safe) && !stats.contains_key(&plugin.name) {
stats.insert(
plugin.name.clone(),
PluginStats {
name: plugin.name.clone(),
total_self_ms: 0.0,
total_sourced_ms: 0.0,
file_count: 0,
top_files: Vec::new(),
is_managed: true,
lazy: plugin.lazy,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
},
);
}
}
for s in stats.values_mut() {
if !s.is_managed {
continue;
}
let safe = crate::loader::sanitize_name(&s.name);
if let Some((init, trig)) = per_plugin.get(&safe) {
s.init_ms = *init;
s.trig_ms = *trig;
}
}
phase_timelines.push(phases);
}
runs_stats.push(stats);
}
let total_startup_ms = totals.iter().sum::<f64>() / cfg.runs as f64;
let plugins_stats = average_stats(runs_stats, cfg.runs);
let phase_timeline = if phase_timelines.is_empty() {
None
} else {
Some(average_phase_timelines(phase_timelines))
};
let nvim_version = probe_nvim_version().await;
Ok(ProfileReport {
runs: cfg.runs,
total_startup_ms,
plugins: plugins_stats,
nvim_version,
phase_timeline,
no_merge: cfg.no_merge,
no_instrument: cfg.no_instrument,
})
}
pub fn is_group_name(name: &str) -> bool {
matches!(
name,
GROUP_MERGED | GROUP_RUNTIME | GROUP_LOADER | GROUP_USER
)
}
pub fn report_to_json(report: &ProfileReport) -> serde_json::Value {
serde_json::json!({
"runs": report.runs,
"total_startup_ms": report.total_startup_ms,
"nvim_version": report.nvim_version,
"no_merge": report.no_merge,
"no_instrument": report.no_instrument,
"phase_timeline": report.phase_timeline.as_ref().map(|pts| pts.iter().map(|p| serde_json::json!({
"name": p.name,
"duration_ms": p.duration_ms,
})).collect::<Vec<_>>()),
"plugins": report.plugins.iter().map(|p| serde_json::json!({
"name": p.name,
"total_self_ms": p.total_self_ms,
"total_sourced_ms": p.total_sourced_ms,
"init_ms": p.init_ms,
"load_ms": p.load_ms,
"trig_ms": p.trig_ms,
"file_count": p.file_count,
"is_managed": p.is_managed,
"lazy": p.lazy,
"top_files": p.top_files.iter().map(|f| serde_json::json!({
"path": f.relative_path,
"self_ms": f.self_ms,
"sourced_ms": f.sourced_ms,
})).collect::<Vec<_>>(),
})).collect::<Vec<_>>(),
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_skips_header_and_events() {
let sample = "\
times in msec
clock self+sourced self: sourced script
clock elapsed: other lines
000.008 000.008: --- NVIM STARTING ---
000.110 000.102: event init
";
let entries = parse_startuptime(sample);
assert!(entries.is_empty(), "event lines should be ignored");
}
#[test]
fn parse_extracts_sourcing_entries() {
let sample = "\
002.345 000.012 000.005: sourcing /home/me/.local/share/nvim/plugin/foo.lua
002.456 000.015 000.008: sourcing C:\\Users\\me\\plugins\\bar.vim
002.500 000.001 000.001: require('vim.shared')
";
let entries = parse_startuptime(sample);
assert_eq!(entries.len(), 2, "require lines are skipped");
assert_eq!(entries[0].self_ms, 0.005);
assert_eq!(entries[0].sourced_ms, 0.012);
assert_eq!(entries[0].path, "/home/me/.local/share/nvim/plugin/foo.lua");
assert_eq!(entries[1].self_ms, 0.008);
assert_eq!(entries[1].path, "C:/Users/me/plugins/bar.vim");
}
#[test]
fn parse_handles_windows_drive_colon_in_path() {
let line = "010.234 000.050 000.042: sourcing C:/foo/bar/baz.lua";
let entry = parse_startuptime_line(line).expect("should parse");
assert_eq!(entry.path, "C:/foo/bar/baz.lua");
assert_eq!(entry.self_ms, 0.042);
}
#[test]
fn extract_total_returns_last_clock() {
let sample = "\
000.100 000.100: event a
002.345 000.012 000.005: sourcing /foo.lua
005.678 000.015 000.008: sourcing /bar.lua
";
assert!((extract_total_ms(sample) - 5.678).abs() < 1e-6);
}
fn plugin(name: &str, root: &str, lazy: bool) -> PluginPathEntry {
PluginPathEntry {
name: name.to_string(),
root: root.to_string(),
lazy,
}
}
#[test]
fn aggregate_attributes_file_to_matching_plugin() {
let entries = vec![
SourceEntry {
path: "/cache/repos/github.com/owner/foo/plugin/foo.lua".into(),
self_ms: 10.0,
sourced_ms: 12.0,
},
SourceEntry {
path: "/cache/repos/github.com/owner/foo/lua/foo/init.lua".into(),
self_ms: 5.0,
sourced_ms: 5.0,
},
];
let plugins = vec![plugin("foo", "/cache/repos/github.com/owner/foo", false)];
let stats = aggregate_single_run(
&entries,
&plugins,
"/cache/merged",
"/cache/loader.lua",
&["/config".to_string()],
);
let foo = stats.get("foo").expect("foo should exist");
assert_eq!(foo.file_count, 2);
assert!((foo.total_self_ms - 15.0).abs() < 1e-6);
assert!(foo.is_managed);
assert_eq!(foo.top_files[0].relative_path, "plugin/foo.lua");
assert_eq!(foo.top_files[1].relative_path, "lua/foo/init.lua");
}
#[test]
fn aggregate_buckets_merged_and_runtime() {
let entries = vec![
SourceEntry {
path: "/cache/merged/plugin/common.lua".into(),
self_ms: 3.0,
sourced_ms: 3.0,
},
SourceEntry {
path: "/usr/share/nvim/runtime/plugin/foo.vim".into(),
self_ms: 1.0,
sourced_ms: 1.0,
},
SourceEntry {
path: "/cache/loader.lua".into(),
self_ms: 2.0,
sourced_ms: 2.0,
},
];
let plugins = vec![];
let stats = aggregate_single_run(
&entries,
&plugins,
"/cache/merged",
"/cache/loader.lua",
&["/config".to_string()],
);
assert!(stats.contains_key(GROUP_MERGED));
assert!(stats.contains_key(GROUP_RUNTIME));
assert!(stats.contains_key(GROUP_LOADER));
assert!((stats.get(GROUP_MERGED).unwrap().total_self_ms - 3.0).abs() < 1e-6);
}
#[test]
fn aggregate_prefers_deeper_plugin_path() {
let entries = vec![SourceEntry {
path: "/plugins/outer/inner/plugin/x.lua".into(),
self_ms: 4.0,
sourced_ms: 4.0,
}];
let plugins = vec![
plugin("outer", "/plugins/outer", false),
plugin("inner", "/plugins/outer/inner", false),
];
let stats = aggregate_single_run(
&entries,
&plugins,
"/cache/merged",
"/cache/loader.lua",
&["/config".to_string()],
);
assert!(stats.contains_key("inner"));
assert!(!stats.contains_key("outer"));
}
#[test]
fn path_starts_with_rejects_partial_segment() {
assert!(!path_starts_with("/foo/barbaz/x", "/foo/bar"));
assert!(path_starts_with("/foo/bar/x", "/foo/bar"));
assert!(path_starts_with("/foo/bar", "/foo/bar"));
}
#[test]
fn average_divides_by_runs_and_sorts_desc() {
let mut run1 = HashMap::new();
run1.insert(
"a".to_string(),
PluginStats {
name: "a".into(),
total_self_ms: 20.0,
total_sourced_ms: 25.0,
file_count: 2,
top_files: vec![FileStat {
relative_path: "plugin/a.lua".into(),
self_ms: 20.0,
sourced_ms: 25.0,
}],
is_managed: true,
lazy: false,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
},
);
run1.insert(
"b".to_string(),
PluginStats {
name: "b".into(),
total_self_ms: 40.0,
total_sourced_ms: 40.0,
file_count: 1,
top_files: vec![],
is_managed: true,
lazy: false,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
},
);
let mut run2 = HashMap::new();
run2.insert(
"a".to_string(),
PluginStats {
name: "a".into(),
total_self_ms: 10.0,
total_sourced_ms: 15.0,
file_count: 2,
top_files: vec![],
is_managed: true,
lazy: false,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
},
);
let avg = average_stats(vec![run1, run2], 2);
assert_eq!(avg[0].name, "b");
assert!((avg[0].total_self_ms - 20.0).abs() < 1e-6);
assert_eq!(avg[1].name, "a");
assert!((avg[1].total_self_ms - 15.0).abs() < 1e-6);
}
#[test]
fn extract_source_path_rejects_require_lines() {
assert!(extract_source_path("require('foo')").is_none());
assert_eq!(
extract_source_path("sourcing /foo/bar.lua"),
Some("/foo/bar.lua".to_string())
);
}
#[test]
fn parse_marker_events_extracts_phase_markers() {
let content = "\
010.100 000.005 000.005: sourcing /tmp/markers/phase-3-begin.vim
010.500 000.008 000.008: sourcing /tmp/markers/phase-3-end.vim
011.200 000.003 000.003: sourcing /tmp/markers/init-telescope-begin.vim
011.800 000.012 000.012: sourcing /tmp/markers/init-telescope-end.vim
020.000 000.010 000.010: sourcing /some/other/plugin.lua
";
let events = parse_marker_events(content, "/tmp/markers");
assert_eq!(events.len(), 4);
assert_eq!(events[0].name, "phase-3-begin");
assert_eq!(events[0].clock_ms, 10.100);
assert_eq!(events[3].name, "init-telescope-end");
assert!((events[3].clock_ms - 11.800).abs() < 1e-6);
}
#[test]
fn compute_phase_times_pairs_begin_end() {
let events = vec![
MarkerEvent {
name: "phase-3-begin".into(),
clock_ms: 10.0,
},
MarkerEvent {
name: "phase-3-end".into(),
clock_ms: 15.0,
},
MarkerEvent {
name: "phase-6-begin".into(),
clock_ms: 20.0,
},
MarkerEvent {
name: "phase-6-end".into(),
clock_ms: 100.0,
},
];
let phases = compute_phase_times(&events);
assert_eq!(phases.len(), 2);
assert_eq!(phases[0].name, "phase-3");
assert_eq!(phases[0].duration_ms, 5.0);
assert_eq!(phases[1].name, "phase-6");
assert_eq!(phases[1].duration_ms, 80.0);
}
#[test]
fn compute_phase_times_skips_unpaired() {
let events = vec![
MarkerEvent {
name: "phase-3-begin".into(),
clock_ms: 10.0,
},
MarkerEvent {
name: "phase-3-end".into(),
clock_ms: 12.0,
},
MarkerEvent {
name: "phase-4-begin".into(),
clock_ms: 13.0,
},
];
let phases = compute_phase_times(&events);
assert_eq!(phases.len(), 1);
assert_eq!(phases[0].name, "phase-3");
}
#[test]
fn compute_per_plugin_phase_times_extracts_init_and_trig() {
let events = vec![
MarkerEvent {
name: "init-alpha-begin".into(),
clock_ms: 10.0,
},
MarkerEvent {
name: "init-alpha-end".into(),
clock_ms: 10.5,
},
MarkerEvent {
name: "trig-beta-begin".into(),
clock_ms: 20.0,
},
MarkerEvent {
name: "trig-beta-end".into(),
clock_ms: 20.3,
},
];
let pp = compute_per_plugin_phase_times(&events);
assert!((pp["alpha"].0 - 0.5).abs() < 1e-6, "alpha init_ms");
assert_eq!(pp["alpha"].1, 0.0, "alpha has no trig");
assert!((pp["beta"].1 - 0.3).abs() < 1e-6, "beta trig_ms");
assert_eq!(pp["beta"].0, 0.0, "beta has no init");
}
#[test]
fn aggregate_accepts_multiple_user_config_roots() {
let entries = vec![
SourceEntry {
path: "/home/me/.config/nvim/init.lua".into(),
self_ms: 5.0,
sourced_ms: 5.0,
},
SourceEntry {
path: "/home/me/.config/rvpm/nvim/before.lua".into(),
self_ms: 2.0,
sourced_ms: 2.0,
},
];
let stats = aggregate_single_run(
&entries,
&[],
"/cache/merged",
"/cache/loader.lua",
&[
"/home/me/.config/nvim".to_string(),
"/home/me/.config/rvpm/nvim".to_string(),
],
);
let u = stats
.get(GROUP_USER)
.expect("should bucket under [user config]");
assert_eq!(u.file_count, 2);
assert!(!stats.contains_key(GROUP_RUNTIME));
}
#[test]
fn aggregate_strips_prefix_case_insensitive_on_windows_paths() {
let entries = vec![SourceEntry {
path: "C:/Users/me/plugin/foo.lua".into(),
self_ms: 1.0,
sourced_ms: 1.0,
}];
let plugins = vec![PluginPathEntry {
name: "foo".into(),
root: "c:/users/me".into(),
lazy: false,
}];
let stats = aggregate_single_run(
&entries,
&plugins,
"/cache/merged",
"/cache/loader.lua",
&[],
);
let foo = stats.get("foo").expect("should match case-insensitive");
assert_eq!(foo.top_files[0].relative_path, "plugin/foo.lua");
}
#[test]
fn average_stats_aggregates_top_files_across_runs() {
let make_stats = |self_ms: f64| {
let mut m = HashMap::new();
m.insert(
"plug".to_string(),
PluginStats {
name: "plug".into(),
total_self_ms: self_ms,
total_sourced_ms: self_ms,
file_count: 1,
top_files: vec![FileStat {
relative_path: "plugin/x.lua".into(),
self_ms,
sourced_ms: self_ms,
}],
is_managed: true,
lazy: false,
init_ms: 0.0,
load_ms: 0.0,
trig_ms: 0.0,
},
);
m
};
let avg = average_stats(vec![make_stats(10.0), make_stats(20.0)], 2);
assert_eq!(avg.len(), 1);
let plug = &avg[0];
assert!((plug.total_self_ms - 15.0).abs() < 1e-6);
assert_eq!(plug.top_files.len(), 1);
assert!(
(plug.top_files[0].self_ms - 15.0).abs() < 1e-6,
"got {}",
plug.top_files[0].self_ms
);
}
#[test]
fn average_phase_timelines_handles_multiple_runs() {
let r1 = vec![
PhaseTime {
name: "phase-3".into(),
duration_ms: 4.0,
},
PhaseTime {
name: "phase-6".into(),
duration_ms: 100.0,
},
];
let r2 = vec![
PhaseTime {
name: "phase-3".into(),
duration_ms: 6.0,
},
PhaseTime {
name: "phase-6".into(),
duration_ms: 80.0,
},
];
let avg = average_phase_timelines(vec![r1, r2]);
assert_eq!(avg.len(), 2);
assert_eq!(avg[0].name, "phase-3");
assert!((avg[0].duration_ms - 5.0).abs() < 1e-6);
assert_eq!(avg[1].name, "phase-6");
assert!((avg[1].duration_ms - 90.0).abs() < 1e-6);
}
}