#![allow(unused_imports)]
#![allow(clippy::field_reassign_with_default)]
use std::collections::BTreeMap;
use std::path::Path;
use super::aggregate::{format_cpu_range, merge_aggregated_into};
use super::cgroup_merge::{
merge_cgroup_cpu, merge_cgroup_memory, merge_cgroup_pids, merge_kv_counters, merge_max_option,
merge_memory_stat, merge_min_option, merge_psi,
};
use super::columns::{compare_columns_for, format_cgroup_only_section_warning};
use super::compare::sort_diff_rows_by_keys;
use super::groups::build_row;
use super::pattern::{
Segment, apply_systemd_template, cgroup_normalize_skeleton, cgroup_skeleton_tokens,
classify_token, is_token_separator, pattern_counts_union, pattern_key, split_into_segments,
tighten_group,
};
use super::render::psi_pair_has_data;
use super::scale::{auto_scale, format_delta_cell};
use super::tests_fixtures::*;
use super::*;
use crate::ctprof::{CgroupStats, CtprofSnapshot, Psi, ThreadState};
use crate::metric_types::{
Bytes, CategoricalString, CpuSet, MonotonicCount, MonotonicNs, OrdinalI32, PeakNs,
};
use regex::Regex;
#[test]
fn pattern_key_strips_trailing_digits() {
assert_eq!(pattern_key("tokio-worker-12"), "tokio-worker-{N}");
assert_eq!(pattern_key("worker_5"), "worker_{N}");
assert_eq!(pattern_key("rayon.pool.7"), "rayon.pool.{N}");
assert_eq!(pattern_key("Chrome thread 4"), "Chrome thread {N}");
assert_eq!(pattern_key("pool-2-thread-7"), "pool-{N}-thread-{N}");
}
#[test]
fn pattern_key_bare_numeric_and_dangling_separator() {
assert_eq!(pattern_key("0"), "{N}");
assert_eq!(pattern_key("worker-"), "worker-");
}
#[test]
fn pattern_key_alpha_prefix_groups_without_separator() {
assert_eq!(pattern_key("CamelCaseWord0"), "CamelCaseWord{N}");
assert_eq!(pattern_key("CamelCaseWord175"), "CamelCaseWord{N}");
assert_eq!(pattern_key("worker7"), "worker{N}");
assert_eq!(pattern_key("cpu0"), "cpu{N}");
assert_eq!(pattern_key("init"), "init");
}
#[test]
fn pattern_key_single_letter_alpha_prefix_normalizes() {
assert_eq!(pattern_key("gadget-v2"), "gadget-v{N}");
assert_eq!(pattern_key("thingo-r2"), "thingo-r{N}");
assert_eq!(pattern_key("t1"), "t{N}");
assert_eq!(pattern_key("a0"), "{H}");
assert_eq!(pattern_key("t-1"), "t-{N}");
assert_eq!(pattern_key("ab_5"), "ab_{N}");
}
#[test]
fn pattern_key_kworker_shapes_under_token_normalizer() {
assert_eq!(pattern_key("kworker/0:0"), "kworker/{N}:{N}");
assert_eq!(pattern_key("kworker/3:2"), "kworker/{N}:{N}");
assert_eq!(pattern_key("kworker/u8:3"), "kworker/u{N}:{N}");
assert_eq!(pattern_key("kworker/u8:7"), "kworker/u{N}:{N}");
assert_eq!(pattern_key("kworker/u16:0"), "kworker/u{N}:{N}");
assert_eq!(
pattern_key("kworker/0:0-wq_reclaim"),
"kworker/{N}:{N}-wq_reclaim",
);
assert_eq!(
pattern_key("kworker/47:2-wq_reclaim"),
"kworker/{N}:{N}-wq_reclaim",
);
assert_eq!(pattern_key("kworker/0:1H"), "kworker/{N}:{N}H");
assert_eq!(
pattern_key("kworker/0:1H-wq_prio"),
"kworker/{N}:{N}H-wq_prio",
);
}
#[test]
fn classify_token_digits_alpha_suffix_rule_4() {
assert_eq!(classify_token("1H"), "{N}H");
assert_eq!(classify_token("0H"), "{N}H");
assert_eq!(classify_token("100Hz"), "{N}Hz");
assert_eq!(classify_token("3z"), "{N}z");
assert_eq!(classify_token("1a"), "{H}");
assert_eq!(classify_token("0f"), "{H}");
assert_eq!(classify_token("42abc"), "{H}");
assert_eq!(classify_token("1aZ"), "{N}aZ");
assert_eq!(classify_token("42xyz"), "{N}xyz");
assert_eq!(classify_token("42"), "{N}");
}
#[test]
fn pattern_key_empty_input_returns_empty() {
assert_eq!(pattern_key(""), "");
}
#[test]
fn build_groups_comm_produces_pattern_buckets_and_singleton() {
let mut threads = Vec::new();
for i in 0..8 {
threads.push(make_thread("app", &format!("worker-{i}")));
}
for i in 0..4 {
threads.push(make_thread("app", &format!("rayon-pool-{i}")));
}
threads.push(make_thread("app", "main"));
let snap = snap_with(threads);
let groups = build_groups(&snap, GroupBy::Comm, &[], None, None, false);
assert!(
groups.contains_key("worker-{N}"),
"worker-{{N}} pattern bucket",
);
assert_eq!(groups["worker-{N}"].thread_count, 8);
assert!(
groups.contains_key("rayon-pool-{N}"),
"rayon-pool-{{N}} pattern bucket",
);
assert_eq!(groups["rayon-pool-{N}"].thread_count, 4);
assert!(
groups.contains_key("main"),
"singleton main reverts to literal comm",
);
assert_eq!(groups["main"].thread_count, 1);
assert_eq!(groups.len(), 3);
}
#[test]
fn pattern_display_label_grex_for_multi_member_else_join_key() {
let single = vec!["worker-0".to_string()];
assert_eq!(pattern_display_label("worker-0", &single), "worker-0");
let empty: Vec<String> = vec![];
assert_eq!(pattern_display_label("worker", &empty), "worker");
let multi = vec!["worker-0".to_string(), "worker-1".to_string()];
let label = pattern_display_label("worker", &multi);
assert!(
label.contains("worker"),
"grex label must mention the shared prefix; got {label:?}",
);
}
#[test]
fn compare_comm_pattern_emits_prefix_join_key_and_grex_display() {
let baseline = snap_with(vec![
make_thread("app", "worker-0"),
make_thread("app", "worker-1"),
]);
let candidate = snap_with(vec![
make_thread("app", "worker-2"),
make_thread("app", "worker-3"),
]);
let diff = compare(
&baseline,
&candidate,
&CompareOptions {
group_by: GroupBy::Comm.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let row = diff
.rows
.iter()
.find(|r| r.metric_name == "run_time_ns" && r.group_key == "worker-{N}")
.expect("worker-{N} row");
assert_eq!(
row.group_key, "worker-{N}",
"join key is the placeholder pattern"
);
assert!(
row.display_key.contains("worker"),
"display key reflects grex (or fallback to join key) over union; got {:?}",
row.display_key,
);
let mut union_members: Vec<String> = vec![
"worker-0".into(),
"worker-1".into(),
"worker-2".into(),
"worker-3".into(),
];
union_members.sort();
union_members.dedup();
let expected_label = pattern_display_label("worker-{N}", &union_members);
assert_eq!(
row.display_key, expected_label,
"display label must match pattern_display_label over union"
);
}
#[test]
fn compare_comm_pattern_joins_across_asymmetric_resize() {
let baseline = snap_with(vec![make_thread("app", "worker-7")]);
let candidate = snap_with(vec![
make_thread("app", "worker-0"),
make_thread("app", "worker-1"),
make_thread("app", "worker-2"),
]);
let diff = compare(
&baseline,
&candidate,
&CompareOptions {
group_by: GroupBy::Comm.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let row = diff
.rows
.iter()
.find(|r| r.metric_name == "run_time_ns" && r.group_key == "worker-{N}")
.expect("worker-{N} row joined across asymmetric snapshots");
assert_eq!(row.thread_count_a, 1, "baseline carries 1 worker");
assert_eq!(row.thread_count_b, 3, "candidate carries 3 workers");
let baseline_orphans: Vec<&String> = diff
.only_baseline
.iter()
.filter(|k| k.starts_with("worker"))
.collect();
assert!(
baseline_orphans.is_empty(),
"no worker-prefixed orphans in only_baseline; got {baseline_orphans:?}",
);
let candidate_orphans: Vec<&String> = diff
.only_candidate
.iter()
.filter(|k| k.starts_with("worker"))
.collect();
assert!(
candidate_orphans.is_empty(),
"no worker-prefixed orphans in only_candidate; got {candidate_orphans:?}",
);
}
#[test]
fn classify_token_pure_digits() {
assert_eq!(classify_token("0"), "{N}");
assert_eq!(classify_token("42"), "{N}");
assert_eq!(classify_token("999"), "{N}");
}
#[test]
fn classify_token_hex_like() {
assert_eq!(classify_token("a1234"), "{H}");
assert_eq!(classify_token("abc123def456"), "{H}");
assert_eq!(classify_token("7890ab"), "{H}");
assert_eq!(classify_token("1a2"), "{H}");
assert_eq!(classify_token("650ab12cd34ef"), "{H}");
assert_eq!(classify_token("abc"), "abc");
assert_eq!(classify_token("a"), "a");
assert_eq!(classify_token("a1"), "{H}");
assert_eq!(classify_token("u8"), "u{N}");
}
#[test]
fn classify_token_alpha_prefix_digits() {
assert_eq!(classify_token("worker7"), "worker{N}");
assert_eq!(classify_token("CamelCaseWord175"), "CamelCaseWord{N}");
assert_eq!(classify_token("u8"), "u{N}");
assert_eq!(classify_token("u16"), "u{N}");
assert_eq!(classify_token("v2"), "v{N}");
assert_eq!(classify_token("r2"), "r{N}");
assert_eq!(classify_token("1H"), "{N}H");
assert_eq!(classify_token("3z"), "{N}z");
assert_eq!(classify_token("proto303handler"), "proto303handler");
}
#[test]
fn classify_token_literal_fallback() {
assert_eq!(classify_token("BPF"), "BPF");
assert_eq!(classify_token("CUBIC"), "CUBIC");
assert_eq!(classify_token("AUTO"), "AUTO");
assert_eq!(classify_token("FLOWLABEL"), "FLOWLABEL");
assert_eq!(classify_token("hamster"), "hamster");
assert_eq!(classify_token("zilch"), "zilch");
}
#[test]
fn classify_token_empty_returns_empty() {
assert_eq!(classify_token(""), "");
}
#[test]
fn split_into_segments_alternates_token_and_separator_runs() {
assert!(split_into_segments("").is_empty());
let segs = split_into_segments("hamster");
assert_eq!(segs, vec![Segment::Token("hamster")]);
let segs = split_into_segments("worker-7");
assert_eq!(
segs,
vec![
Segment::Token("worker"),
Segment::Separator("-"),
Segment::Token("7"),
],
);
let segs = split_into_segments("a..b");
assert_eq!(
segs,
vec![
Segment::Token("a"),
Segment::Separator(".."),
Segment::Token("b"),
],
);
let segs = split_into_segments("/abc");
assert_eq!(segs, vec![Segment::Separator("/"), Segment::Token("abc")],);
let segs = split_into_segments("yy._650");
assert_eq!(
segs,
vec![
Segment::Token("yy"),
Segment::Separator("._"),
Segment::Token("650"),
],
);
let segs = split_into_segments("kworker/0:1+events");
assert_eq!(
segs,
vec![
Segment::Token("kworker"),
Segment::Separator("/"),
Segment::Token("0"),
Segment::Separator(":"),
Segment::Token("1"),
Segment::Separator("+"),
Segment::Token("events"),
],
);
}
#[test]
fn pattern_key_kworker_active_decoration_separator() {
assert_eq!(pattern_key("kworker/0:1+events"), "kworker/{N}:{N}+events",);
assert_eq!(pattern_key("kworker/1:0+events"), "kworker/{N}:{N}+events",);
assert_ne!(
pattern_key("kworker/0:1+events"),
pattern_key("kworker/0:1-events"),
);
assert_eq!(pattern_key("kworker/0:1-events"), "kworker/{N}:{N}-events",);
}
#[test]
fn apply_systemd_template_opaque_id_to_placeholder() {
assert_eq!(
apply_systemd_template("/user.slice/user-0.slice/user@0.service/boot.scope"),
"/user.slice/user-0.slice/user@{I}.service/boot.scope",
);
assert_eq!(
apply_systemd_template("/user.slice/user-1001.slice/user@1001.service/boot.scope"),
"/user.slice/user-1001.slice/user@{I}.service/boot.scope",
);
assert_eq!(
apply_systemd_template("/critical.slice/launcher@foo.bar.baz.service"),
"/critical.slice/launcher@foo.bar.baz.service",
);
assert_eq!(
apply_systemd_template("/system.slice/crond.service"),
"/system.slice/crond.service",
);
assert_eq!(apply_systemd_template("/"), "/");
}
#[test]
fn cgroup_tighten_keeps_instance_placeholders_when_constant() {
let path_1 = "/apps.slice/run-17.fluxcap9000_01.zz3";
let path_2 = "/apps.slice/run-22.fluxcap9000_01.zz3";
let snap = snap_with(vec![
{
let mut t = make_thread("p", "ta");
t.cgroup = path_1.into();
t
},
{
let mut t = make_thread("p", "tb");
t.cgroup = path_2.into();
t
},
]);
let diff = compare(
&snap,
&snap,
&CompareOptions {
group_by: GroupBy::Cgroup.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let group_keys: std::collections::BTreeSet<String> =
diff.rows.iter().map(|r| r.group_key.clone()).collect();
let expected = "/apps.slice/run-{N}.fluxcap{N}_{N}.zz{N}";
assert!(
group_keys.contains(expected),
"tightened key {expected:?} missing; got {group_keys:?}",
);
}
#[test]
fn cgroup_normalize_collapses_bracketed_hex_session_ids() {
let mut ta = make_thread("p", "ta");
ta.cgroup = "/user.slice/session-[a1b2c3d4]/scope".into();
let mut tb = make_thread("p", "tb");
tb.cgroup = "/user.slice/session-[dead1234]/scope".into();
let snap_a = snap_with(vec![ta]);
let snap_b = snap_with(vec![tb]);
let (skel_a, post_a, _) = cgroup_normalize_skeleton("/user.slice/session-[a1b2c3d4]/scope");
let (skel_b, post_b, _) = cgroup_normalize_skeleton("/user.slice/session-[dead1234]/scope");
assert_eq!(
skel_a, "/user.slice/session-[{H}]/scope",
"Layer-2 skeleton for path1 mismatch; got {skel_a:?}",
);
assert_eq!(
skel_b, "/user.slice/session-[{H}]/scope",
"Layer-2 skeleton for path2 mismatch; got {skel_b:?}",
);
assert_eq!(post_a, "/user.slice/session-[a1b2c3d4]/scope");
assert_eq!(post_b, "/user.slice/session-[dead1234]/scope");
let key_map = build_cgroup_key_map(&snap_a, &snap_b, &[]);
assert_eq!(
key_map.get("/user.slice/session-[a1b2c3d4]/scope"),
Some(&"/user.slice/session-[{H}]/scope".to_string()),
"key_map must resolve path1 to the tightened skeleton",
);
assert_eq!(
key_map.get("/user.slice/session-[dead1234]/scope"),
Some(&"/user.slice/session-[{H}]/scope".to_string()),
"key_map must resolve path2 to the tightened skeleton",
);
let diff = compare(
&snap_a,
&snap_b,
&CompareOptions {
group_by: GroupBy::Cgroup.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let group_keys: std::collections::BTreeSet<String> =
diff.rows.iter().map(|r| r.group_key.clone()).collect();
let expected = "/user.slice/session-[{H}]/scope";
assert!(
group_keys.contains(expected),
"missing bracketed-hex cgroup bucket {expected:?}; got {group_keys:?}; \
diff.only_baseline={:?}; diff.only_candidate={:?}",
diff.only_baseline,
diff.only_candidate,
);
assert!(
diff.only_baseline.is_empty(),
"no orphans under bracketed-hex collapse, got {:?}",
diff.only_baseline,
);
assert!(
diff.only_candidate.is_empty(),
"no orphans under bracketed-hex collapse, got {:?}",
diff.only_candidate,
);
}
#[test]
fn pattern_key_normalizes_bracketed_digits() {
assert_eq!(pattern_key("worker[42]"), "worker[{N}]");
assert_eq!(
pattern_key("systemd-network[105904]"),
"systemd-network[{N}]"
);
assert_eq!(pattern_key("bash[4242]"), "bash[{N}]");
assert_eq!(pattern_key("dev[1ab]"), "dev[{H}]");
}
#[test]
fn split_into_segments_treats_brackets_as_separators() {
let segs = split_into_segments("worker[42]");
assert_eq!(
segs,
vec![
Segment::Token("worker"),
Segment::Separator("["),
Segment::Token("42"),
Segment::Separator("]"),
],
);
let segs = split_into_segments("a-[1]");
assert_eq!(
segs,
vec![
Segment::Token("a"),
Segment::Separator("-["),
Segment::Token("1"),
Segment::Separator("]"),
],
);
}
#[test]
fn is_token_separator_includes_brackets() {
assert!(is_token_separator('['));
assert!(is_token_separator(']'));
}
#[test]
fn compare_pcomm_pattern_joins_across_asymmetric_resize() {
let baseline = snap_with(vec![make_thread("worker-7", "t0")]);
let candidate = snap_with(vec![
make_thread("worker-0", "t0"),
make_thread("worker-1", "t1"),
]);
let diff = compare(
&baseline,
&candidate,
&CompareOptions {
group_by: GroupBy::Pcomm.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let row = diff
.rows
.iter()
.find(|r| r.metric_name == "run_time_ns" && r.group_key == "worker-{N}")
.expect("worker-{N} pcomm row joined across asymmetric snapshots");
assert_eq!(row.thread_count_a, 1, "baseline carries 1 worker process");
assert_eq!(
row.thread_count_b, 2,
"candidate carries 2 worker processes"
);
let baseline_orphans: Vec<&String> = diff
.only_baseline
.iter()
.filter(|k| k.starts_with("worker"))
.collect();
assert!(
baseline_orphans.is_empty(),
"no worker-prefixed pcomm orphans in only_baseline; got {baseline_orphans:?}",
);
let candidate_orphans: Vec<&String> = diff
.only_candidate
.iter()
.filter(|k| k.starts_with("worker"))
.collect();
assert!(
candidate_orphans.is_empty(),
"no worker-prefixed pcomm orphans in only_candidate; got {candidate_orphans:?}",
);
}
#[test]
fn compare_pcomm_pattern_emits_prefix_join_key_and_grex_display() {
let baseline = snap_with(vec![
make_thread("worker-0", "t0"),
make_thread("worker-1", "t1"),
]);
let candidate = snap_with(vec![
make_thread("worker-2", "t0"),
make_thread("worker-3", "t1"),
]);
let diff = compare(
&baseline,
&candidate,
&CompareOptions {
group_by: GroupBy::Pcomm.into(),
cgroup_flatten: vec![],
no_thread_normalize: false,
no_cg_normalize: false,
sort_by: Vec::new(),
},
);
let row = diff
.rows
.iter()
.find(|r| r.metric_name == "run_time_ns" && r.group_key == "worker-{N}")
.expect("worker-{N} pcomm row");
assert_eq!(
row.group_key, "worker-{N}",
"join key is the placeholder pattern under Pcomm normalization",
);
assert!(
row.display_key.contains("worker"),
"display key reflects grex (or fallback to join key) over union; got {:?}",
row.display_key,
);
let mut union_members: Vec<String> = vec![
"worker-0".into(),
"worker-1".into(),
"worker-2".into(),
"worker-3".into(),
];
union_members.sort();
union_members.dedup();
let expected_label = pattern_display_label("worker-{N}", &union_members);
assert_eq!(
row.display_key, expected_label,
"display label must match pattern_display_label over union"
);
}
#[test]
fn pattern_key_bracket_alpha_token_stays_literal() {
assert_eq!(pattern_key("foo[bar]"), "foo[bar]");
assert_eq!(pattern_key("a[b]"), "a[b]");
assert_eq!(pattern_key("dev[abc]"), "dev[abc]");
}
#[test]
fn pattern_display_label_handles_bracket_member_names() {
let members = vec![
"worker[0]".to_string(),
"worker[1]".to_string(),
"worker[2]".to_string(),
];
let label = pattern_display_label("worker[{N}]", &members);
assert!(
label.contains("worker"),
"grex must produce a label that contains the shared `worker` prefix; got {label:?}",
);
let _: Regex = Regex::new(&label)
.unwrap_or_else(|e| panic!("grex output {label:?} is not a valid regex: {e}"));
}
#[test]
fn cgroup_skeleton_tokens_handles_bracketed_segments() {
let (skeleton, tokens) = cgroup_skeleton_tokens("/runner-[xyz]/scope");
assert_eq!(
tokens,
vec!["runner".to_string(), "xyz".to_string(), "scope".to_string(),],
"bracket separators must split tokens cleanly; got {tokens:?}",
);
assert_eq!(
skeleton, "/runner-[xyz]/scope",
"skeleton must preserve separators including brackets; got {skeleton:?}",
);
}