use super::{
DecoderDynamicTable, EncoderDynamicTable, qif,
reference_out::{
OutGroup, WireHistogram, classify_encoder_stream, classify_header_block,
histogram_from_out_file, parse_encoder_stream_for_dump, parse_header_block_for_dump,
parse_out_groups, render_encoder_instruction, render_field_line,
},
};
use crate::h3::H3Settings;
use futures_lite::{future, io::Cursor};
use std::{
collections::BTreeMap,
fs::File,
io::{BufWriter, Write},
path::{Path, PathBuf},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
struct Config {
capacity: u64,
max_blocked: u64,
}
impl Config {
const fn new(capacity: u64, max_blocked: u64) -> Self {
Self {
capacity,
max_blocked,
}
}
}
const CONFIGS: &[Config] = &[
Config::new(0, 0),
Config::new(256, 100),
Config::new(4096, 0),
Config::new(4096, 100),
];
#[derive(Debug, Default, Clone, Copy)]
struct EncodeStats {
section_bytes: usize,
encoder_stream_bytes: usize,
}
impl EncodeStats {
fn total(&self) -> usize {
self.section_bytes + self.encoder_stream_bytes
}
}
struct DumpCtx<'a> {
writer: &'a mut BufWriter<File>,
their_groups: &'a [OutGroup],
}
fn normalize_pseudos_first(group: &mut qif::QifGroup) {
let mut write = 0;
for read in 0..group.len() {
if group[read].0.starts_with(':') {
group[write..=read].rotate_right(1);
write += 1;
}
}
}
fn qif_group_is_malformed(group: &qif::QifGroup) -> bool {
let mut saw_regular = false;
let mut seen: std::collections::HashSet<&str> = std::collections::HashSet::new();
for (name, _) in group {
if name.starts_with(':') {
if saw_regular || !seen.insert(name) {
return true;
}
} else {
saw_regular = true;
}
}
false
}
fn run_qif_at_config(
qif_path: &Path,
groups: &[qif::QifGroup],
config: Config,
chunk_size: Option<usize>,
observer: std::sync::Arc<super::HeaderObserver>,
mut dump: Option<DumpCtx<'_>>,
) -> (EncodeStats, WireHistogram) {
let mut stats = EncodeStats::default();
let mut wire = WireHistogram::default();
if groups.is_empty() {
return (stats, wire);
}
let effective_chunk_size = chunk_size.unwrap_or(groups.len()).max(1);
let mut chunk_start = 0;
while chunk_start < groups.len() {
let chunk_end = (chunk_start + effective_chunk_size).min(groups.len());
let chunk = &groups[chunk_start..chunk_end];
let mut context = crate::HttpContext::default();
context.observer = observer.clone();
context.config.dynamic_table_capacity = config.capacity as usize;
let encoder = EncoderDynamicTable::new(&context);
encoder.initialize_from_peer_settings(
H3Settings::default()
.with_qpack_max_table_capacity(config.capacity)
.with_qpack_blocked_streams(config.max_blocked),
);
let decoder =
DecoderDynamicTable::new(config.capacity as usize, config.max_blocked as usize);
let initial_ops: Vec<u8> = encoder.drain_pending_ops().into_iter().flatten().collect();
stats.encoder_stream_bytes += initial_ops.len();
wire.encoder_stream_bytes += initial_ops.len() as u64;
classify_encoder_stream(&initial_ops, &mut wire);
if !initial_ops.is_empty() {
let mut cursor = Cursor::new(&initial_ops[..]);
future::block_on(decoder.run_reader(&mut cursor)).unwrap_or_else(|e| {
panic!(
"{}: decoder rejected initial SetDynamicTableCapacity: {e}",
qif_path.display()
)
});
}
for (i, group) in chunk.iter().enumerate() {
let stream_id = (i as u64) + 1;
let global_index = chunk_start + i;
let field_lines = qif::build_field_lines(group)
.unwrap_or_else(|e| panic!("{}: group {global_index}: {e}", qif_path.display()));
let mut buf = Vec::new();
encoder.encode_field_lines(&field_lines, &mut buf, stream_id);
stats.section_bytes += buf.len();
wire.section_bytes += buf.len() as u64;
wire.n_sections += 1;
classify_header_block(&buf, &mut wire);
let enc_ops: Vec<u8> = encoder.drain_pending_ops().into_iter().flatten().collect();
stats.encoder_stream_bytes += enc_ops.len();
wire.encoder_stream_bytes += enc_ops.len() as u64;
classify_encoder_stream(&enc_ops, &mut wire);
if let Some(ctx) = dump.as_mut() {
let their_group = ctx.their_groups.get(global_index);
let our_snapshot = OurStateSnapshot {
insert_count: encoder.insert_count(),
entry_count: encoder.entry_count(),
current_size: encoder.current_size(),
capacity: encoder.capacity(),
};
dump_group(
ctx.writer,
global_index,
stream_id,
group,
&enc_ops,
&buf,
their_group,
&our_snapshot,
);
}
if !enc_ops.is_empty() {
let mut cursor = Cursor::new(&enc_ops[..]);
future::block_on(decoder.run_reader(&mut cursor)).unwrap_or_else(|e| {
panic!(
"{}: group {global_index}: decoder run_reader failed: {e}",
qif_path.display()
)
});
let increment = encoder.insert_count() - encoder.known_received_count();
if increment > 0 {
encoder
.on_insert_count_increment(increment)
.unwrap_or_else(|e| {
panic!(
"{}: group {global_index}: encoder rejected insert count \
increment {increment}: {e}",
qif_path.display()
)
});
}
}
let decode_result = future::block_on(decoder.decode(&buf, stream_id));
let field_section = match decode_result {
Ok(fs) => fs,
Err(e)
if e.to_string().contains("HTTP message was malformed.")
&& qif_group_is_malformed(group) =>
{
continue;
}
Err(e) => panic!(
"{}: group {global_index}: decode failed: {e}",
qif_path.display()
),
};
let mut got = qif::field_section_to_pairs(field_section);
let mut want = group.clone();
got.sort();
want.sort();
assert!(
got == want,
"{}: group {global_index} mismatch (section {} bytes, enc-stream {} bytes)\n \
want: {:?}\n got: {:?}",
qif_path.display(),
buf.len(),
enc_ops.len(),
want,
got,
);
if !buf.starts_with(&[0x00, 0x00]) {
encoder.on_section_ack(stream_id).unwrap_or_else(|e| {
panic!(
"{}: group {global_index}: encoder rejected section ack: {e}",
qif_path.display()
)
});
}
}
chunk_start = chunk_end;
}
(stats, wire)
}
struct OurStateSnapshot {
insert_count: u64,
entry_count: usize,
current_size: usize,
capacity: usize,
}
fn dump_group(
writer: &mut BufWriter<File>,
group_index: usize,
stream_id: u64,
input: &qif::QifGroup,
our_enc: &[u8],
our_hdr: &[u8],
their_group: Option<&OutGroup>,
our_snapshot: &OurStateSnapshot,
) {
let _ = writeln!(
writer,
"==== group {group_index} (stream_id={stream_id}) ===="
);
let fill_pct = if our_snapshot.capacity == 0 {
0.0
} else {
(our_snapshot.current_size as f64) * 100.0 / (our_snapshot.capacity as f64)
};
let _ = writeln!(
writer,
" our_state_after: insert_count={} entry_count={} used={}/{} ({:.1}%)",
our_snapshot.insert_count,
our_snapshot.entry_count,
our_snapshot.current_size,
our_snapshot.capacity,
fill_pct,
);
let _ = writeln!(writer, " input:");
for (name, value) in input {
let _ = writeln!(
writer,
" {name}: {}",
render_bytes_for_dump(value.as_bytes())
);
}
let _ = writeln!(writer, " ours:");
dump_enc_and_hdr(writer, our_enc, our_hdr);
let _ = writeln!(writer, " ls-qpack:");
if let Some(g) = their_group {
for chunk in &g.enc_stream {
for instr in parse_encoder_stream_for_dump(chunk) {
let _ = writeln!(writer, " enc: {}", render_encoder_instruction(&instr));
}
}
if let Some((prefix, lines)) = parse_header_block_for_dump(&g.header_block) {
let _ = writeln!(
writer,
" hdr.prefix: enc_ric={} sign={} delta_base={}",
prefix.encoded_required_insert_count,
u8::from(prefix.base_is_negative),
prefix.delta_base,
);
for instr in lines {
let _ = writeln!(writer, " hdr: {}", render_field_line(&instr));
}
}
} else {
let _ = writeln!(writer, " (no reference data for this group)");
}
let _ = writeln!(writer);
}
fn dump_enc_and_hdr(writer: &mut BufWriter<File>, enc: &[u8], hdr: &[u8]) {
for instr in parse_encoder_stream_for_dump(enc) {
let _ = writeln!(writer, " enc: {}", render_encoder_instruction(&instr));
}
if let Some((prefix, lines)) = parse_header_block_for_dump(hdr) {
let _ = writeln!(
writer,
" hdr.prefix: enc_ric={} sign={} delta_base={}",
prefix.encoded_required_insert_count,
u8::from(prefix.base_is_negative),
prefix.delta_base,
);
for instr in lines {
let _ = writeln!(writer, " hdr: {}", render_field_line(&instr));
}
}
}
fn render_bytes_for_dump(bytes: &[u8]) -> String {
const LIMIT: usize = 80;
let mut out = String::with_capacity(bytes.len().min(LIMIT) + 2);
out.push('"');
for (i, &b) in bytes.iter().enumerate() {
if i >= LIMIT {
out.push_str("...");
break;
}
match b {
0x20..=0x7e if b != b'"' && b != b'\\' => out.push(b as char),
b'\\' => out.push_str("\\\\"),
b'"' => out.push_str("\\\""),
b'\n' => out.push_str("\\n"),
b'\r' => out.push_str("\\r"),
b'\t' => out.push_str("\\t"),
_ => out.push_str(&format!("\\x{b:02x}")),
}
}
out.push('"');
out
}
fn sum_reference_out_bytes(out_path: &Path) -> std::io::Result<usize> {
let data = std::fs::read(out_path)?;
let mut total = 0usize;
let mut pos = 0usize;
while pos + 12 <= data.len() {
let length = u32::from_be_bytes(data[pos + 8..pos + 12].try_into().unwrap()) as usize;
pos += 12;
if pos + length > data.len() {
break;
}
total += length;
pos += length;
}
Ok(total)
}
fn reference_stats_for(
encoded_dir: &Path,
stem: &str,
config: Config,
) -> BTreeMap<String, (usize, Option<WireHistogram>)> {
let want_suffix = format!(".out.{}.{}.1", config.capacity, config.max_blocked);
let mut out = BTreeMap::new();
let Ok(read_dir) = std::fs::read_dir(encoded_dir) else {
return out;
};
for encoder_entry in read_dir.flatten() {
let encoder_dir = encoder_entry.path();
if !encoder_dir.is_dir() {
continue;
}
let encoder_name = encoder_dir
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
if encoder_name == "errors" || encoder_name == "examples" {
continue;
}
let candidate = encoder_dir.join(format!("{stem}{want_suffix}"));
if let Ok(total) = sum_reference_out_bytes(&candidate) {
let hist = histogram_from_out_file(&candidate).ok().flatten();
out.insert(encoder_name, (total, hist));
}
}
out
}
fn parse_chunk_sizes() -> Vec<Option<usize>> {
match std::env::var("QPACK_CHUNK_SIZES").ok() {
None => vec![None],
Some(s) => s
.split(',')
.map(str::trim)
.filter(|tok| !tok.is_empty())
.map(|tok| match tok {
"inf" | "∞" => None,
n => Some(
n.parse::<usize>()
.unwrap_or_else(|_| panic!("QPACK_CHUNK_SIZES: invalid token {n:?}")),
),
})
.collect(),
}
}
#[test]
fn qpack_encoder_corpus() {
let _ = env_logger::try_init();
let base = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/qifs");
if !base.exists() {
eprintln!("qifs submodule not checked out, skipping QPACK encoder corpus test");
return;
}
let qif_dir = base.join("qifs");
let encoded_dir = base.join("encoded/qpack-06");
let filter = std::env::var("QPACK_ENCODER_CORPUS_FILTER").ok();
let stats_enabled = std::env::var("QPACK_ENCODER_STATS").is_ok_and(|v| v == "1");
let chunk_sizes = parse_chunk_sizes();
let chunked_mode = chunk_sizes.iter().any(Option::is_some);
let dump_filter = std::env::var("QPACK_ENCODER_DUMP").ok();
let dump_dir: Option<PathBuf> = dump_filter.as_ref().map(|_| {
let d = Path::new(env!("CARGO_MANIFEST_DIR")).join("target/qpack-dump");
let _ = std::fs::create_dir_all(&d);
d
});
type MetricRow = (
Option<usize>,
String,
Config,
EncodeStats,
BTreeMap<String, (usize, Option<WireHistogram>)>,
WireHistogram,
);
let mut metric: Vec<MetricRow> = Vec::new();
let mut observers: std::collections::HashMap<
(Option<usize>, Config),
std::sync::Arc<super::HeaderObserver>,
> = std::collections::HashMap::new();
let mut tested = 0usize;
let mut entries: Vec<_> = std::fs::read_dir(&qif_dir)
.unwrap_or_else(|e| panic!("reading {}: {e}", qif_dir.display()))
.flatten()
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|e| e == "qif"))
.filter(|p| p.file_stem().is_none_or(|s| s != "draft-examples"))
.collect();
entries.sort();
for qif_path in entries {
if let Some(needle) = &filter
&& !qif_path.to_string_lossy().contains(needle.as_str())
{
continue;
}
let content = std::fs::read_to_string(&qif_path)
.unwrap_or_else(|e| panic!("reading {}: {e}", qif_path.display()));
let mut groups = qif::parse(&content);
if groups.is_empty() {
continue;
}
for group in &mut groups {
normalize_pseudos_first(group);
}
let stem = qif_path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_owned();
for &chunk_size in &chunk_sizes {
for &config in CONFIGS {
eprintln!(
"testing {} @ ({}, {}) chunk={}",
qif_path.display(),
config.capacity,
config.max_blocked,
chunk_size.map_or_else(|| "inf".to_string(), |n| n.to_string()),
);
let dump_file = if chunk_size.is_none() {
dump_filter.as_deref().and_then(|needle| {
if !stem.contains(needle) {
return None;
}
let path = dump_dir.as_ref()?.join(format!(
"{stem}_{}_{}.txt",
config.capacity, config.max_blocked
));
let file = File::create(&path).ok()?;
Some((path, BufWriter::new(file)))
})
} else {
None
};
let (their_groups, mut dump_writer) = match dump_file {
Some((path, mut w)) => {
let reference_path = encoded_dir.join(format!(
"ls-qpack/{stem}.out.{}.{}.1",
config.capacity, config.max_blocked
));
let groups = parse_out_groups(&reference_path).unwrap_or_default();
let _ = writeln!(
&mut w,
"# {} @ ({},{}) — ours vs ls-qpack, one line per instruction",
qif_path.display(),
config.capacity,
config.max_blocked
);
let _ = writeln!(&mut w, "# reference: {}", reference_path.display());
let _ = writeln!(&mut w);
eprintln!(" (dumping to {})", path.display());
(Some(groups), Some(w))
}
None => (None, None),
};
let dump_ctx = match (their_groups.as_deref(), dump_writer.as_mut()) {
(Some(g), Some(w)) => Some(DumpCtx {
writer: w,
their_groups: g,
}),
_ => None,
};
let observer = observers
.entry((chunk_size, config))
.or_insert_with(|| std::sync::Arc::new(super::HeaderObserver::default()))
.clone();
let (stats, our_wire) =
run_qif_at_config(&qif_path, &groups, config, chunk_size, observer, dump_ctx);
tested += 1;
if stats_enabled {
let refs = if chunk_size.is_none() {
reference_stats_for(&encoded_dir, &stem, config)
} else {
BTreeMap::new()
};
metric.push((chunk_size, stem.clone(), config, stats, refs, our_wire));
}
}
}
}
assert!(
tested > 0,
"no qif files were tested — check that tests/qifs is populated{}",
filter
.as_deref()
.map(|f| format!(" or that QPACK_ENCODER_CORPUS_FILTER={f:?} matches something"))
.unwrap_or_default()
);
if stats_enabled {
if chunked_mode {
print_curve_report(&metric, &chunk_sizes);
} else {
let unchunked: Vec<_> = metric
.iter()
.filter(|row| row.0.is_none())
.map(|(_, stem, c, s, r, w)| (stem.clone(), *c, *s, r.clone(), *w))
.collect();
print_metric_report(&unchunked);
}
}
}
fn print_curve_report(
metric: &[(
Option<usize>,
String,
Config,
EncodeStats,
BTreeMap<String, (usize, Option<WireHistogram>)>,
WireHistogram,
)],
chunk_sizes: &[Option<usize>],
) {
eprintln!("\n=== QPACK Encoder Curve — total bytes by chunk size ===");
eprintln!(
"(bytes = section_bytes + encoder_stream_bytes; chunk_size = simulated connection length \
in header blocks; inf = one connection per qif)\n"
);
let fmt_cs =
|cs: Option<usize>| -> String { cs.map_or_else(|| "N=inf".into(), |n| format!("N={n}")) };
let mut per_qif: BTreeMap<(String, u64, u64), BTreeMap<Option<usize>, usize>> = BTreeMap::new();
let mut agg: BTreeMap<(u64, u64), BTreeMap<Option<usize>, usize>> = BTreeMap::new();
for (cs, stem, config, stats, _, _) in metric {
*per_qif
.entry((stem.clone(), config.capacity, config.max_blocked))
.or_default()
.entry(*cs)
.or_default() += stats.total();
*agg.entry((config.capacity, config.max_blocked))
.or_default()
.entry(*cs)
.or_default() += stats.total();
}
let mut header = format!("{:<20} {:<12}", "qif", "config");
for &cs in chunk_sizes {
header.push_str(&format!(" {:>12}", fmt_cs(cs)));
}
eprintln!("{header}");
for ((stem, cap, blk), by_chunk) in &per_qif {
let config_str = format!("({cap},{blk})");
let mut row = format!("{stem:<20} {config_str:<12}");
for &cs in chunk_sizes {
let val = by_chunk.get(&cs).copied().unwrap_or(0);
row.push_str(&format!(" {val:>12}"));
}
eprintln!("{row}");
}
eprintln!("\n--- aggregate across qifs ---");
let mut header = format!("{:<12}", "config");
for &cs in chunk_sizes {
header.push_str(&format!(" {:>12}", fmt_cs(cs)));
}
eprintln!("{header}");
for ((cap, blk), by_chunk) in &agg {
let config_str = format!("({cap},{blk})");
let mut row = format!("{config_str:<12}");
for &cs in chunk_sizes {
let val = by_chunk.get(&cs).copied().unwrap_or(0);
row.push_str(&format!(" {val:>12}"));
}
eprintln!("{row}");
}
let baseline = chunk_sizes
.iter()
.rev()
.copied()
.find(|cs| agg.values().any(|m| m.contains_key(cs)));
if let Some(base_cs) = baseline {
eprintln!(
"\n--- aggregate ratio to {} (lower is better; >100% = chunking costs bytes) ---",
fmt_cs(base_cs)
);
let mut header = format!("{:<12}", "config");
for &cs in chunk_sizes {
header.push_str(&format!(" {:>10}", fmt_cs(cs)));
}
eprintln!("{header}");
for ((cap, blk), by_chunk) in &agg {
let config_str = format!("({cap},{blk})");
let mut row = format!("{config_str:<12}");
let base = by_chunk.get(&base_cs).copied().unwrap_or(0);
for &cs in chunk_sizes {
let val = by_chunk.get(&cs).copied().unwrap_or(0);
let cell = if base == 0 {
" —".to_string()
} else {
format!("{:.1}%", (val as f64) * 100.0 / (base as f64))
};
row.push_str(&format!(" {cell:>10}"));
}
eprintln!("{row}");
}
}
eprintln!();
}
fn print_metric_report(
metric: &[(
String,
Config,
EncodeStats,
BTreeMap<String, (usize, Option<WireHistogram>)>,
WireHistogram,
)],
) {
eprintln!("\n=== QPACK Encoder Corpus — Compression Report ===");
eprintln!(
"(totals are section_bytes + encoder_stream_bytes; reference = variant 1 / \
immediate-acks)\n"
);
eprintln!(
"{:<20} {:<12} {:>10} {:>10} {:>10} vs references (pct of ours)",
"qif", "config", "section", "enc_stream", "total"
);
for (stem, config, stats, refs, _) in metric {
let refs_str = if refs.is_empty() {
String::from("(no reference at this config)")
} else {
refs.iter()
.map(|(name, (total, _))| {
let pct = if stats.total() > 0 {
(*total as f64) * 100.0 / (stats.total() as f64)
} else {
0.0
};
format!("{name}={total} ({pct:.1}%)")
})
.collect::<Vec<_>>()
.join(" ")
};
eprintln!(
"{:<20} {:<12} {:>10} {:>10} {:>10} {}",
stem,
format!("({},{})", config.capacity, config.max_blocked),
stats.section_bytes,
stats.encoder_stream_bytes,
stats.total(),
refs_str,
);
}
let mut ours_by_config: BTreeMap<(u64, u64), usize> = BTreeMap::new();
let mut paired: BTreeMap<(u64, u64, String), (usize, usize)> = BTreeMap::new();
let mut our_wire_by_config: BTreeMap<(u64, u64), WireHistogram> = BTreeMap::new();
let mut their_wire_by_config: BTreeMap<(u64, u64, String), WireHistogram> = BTreeMap::new();
for (_, config, stats, refs, our_wire) in metric {
*ours_by_config
.entry((config.capacity, config.max_blocked))
.or_default() += stats.total();
for (name, (total, hist)) in refs {
let entry = paired
.entry((config.capacity, config.max_blocked, name.clone()))
.or_default();
entry.0 += stats.total();
entry.1 += *total;
if let Some(h) = hist {
their_wire_by_config
.entry((config.capacity, config.max_blocked, name.clone()))
.or_default()
.add(h);
}
}
our_wire_by_config
.entry((config.capacity, config.max_blocked))
.or_default()
.add(our_wire);
}
eprintln!(
"\n--- aggregates across all qifs (per-encoder pct is apples-to-apples: only files where \
that encoder has a reference) ---"
);
for ((cap, blk), ours) in &ours_by_config {
let refs_str = paired
.iter()
.filter(|((c, b, _), _)| *c == *cap && *b == *blk)
.map(|((_, _, name), (ours_matched, their_total))| {
let pct = if *ours_matched > 0 {
(*their_total as f64) * 100.0 / (*ours_matched as f64)
} else {
0.0
};
format!("{name}={their_total} ({pct:.1}% of ours on matching files)")
})
.collect::<Vec<_>>()
.join("\n ");
eprintln!("({cap},{blk}): ours={ours}\n {refs_str}");
}
eprintln!();
print_wire_comparison(&our_wire_by_config, &their_wire_by_config);
}
fn print_wire_comparison(
ours: &BTreeMap<(u64, u64), WireHistogram>,
theirs: &BTreeMap<(u64, u64, String), WireHistogram>,
) {
eprintln!("--- wire-instruction histograms (ours vs references) ---");
for ((cap, blk), our) in ours {
let mut refs: Vec<(&String, &WireHistogram)> = theirs
.iter()
.filter(|((c, b, _), _)| *c == *cap && *b == *blk)
.map(|((_, _, name), h)| (name, h))
.collect();
refs.sort_by_key(|(name, _)| name.as_str());
eprintln!(
"\n({cap},{blk}) — bucket: ours vs [{}]",
refs.iter()
.map(|(n, _)| n.as_str())
.collect::<Vec<_>>()
.join(", "),
);
print_row("set_capacity", our.set_capacity, &refs, |h| h.set_capacity);
print_row("insert_literal_name", our.insert_literal_name, &refs, |h| {
h.insert_literal_name
});
print_row(
"insert_static_name_ref",
our.insert_static_name_ref,
&refs,
|h| h.insert_static_name_ref,
);
print_row(
"insert_dynamic_name_ref",
our.insert_dynamic_name_ref,
&refs,
|h| h.insert_dynamic_name_ref,
);
print_row("duplicate", our.duplicate, &refs, |h| h.duplicate);
print_row(
" ∑ inserts (non-dup)",
our.inserts_total(),
&refs,
WireHistogram::inserts_total,
);
eprintln!(" --");
print_row("indexed_static", our.indexed_static, &refs, |h| {
h.indexed_static
});
print_row(
"indexed_dyn_pre_base",
our.indexed_dynamic_pre_base,
&refs,
|h| h.indexed_dynamic_pre_base,
);
print_row("indexed_post_base", our.indexed_post_base, &refs, |h| {
h.indexed_post_base
});
print_row(
" ∑ indexed dynamic",
our.indexed_dynamic_total(),
&refs,
WireHistogram::indexed_dynamic_total,
);
print_row(
"literal_static_name_ref",
our.literal_static_name_ref,
&refs,
|h| h.literal_static_name_ref,
);
print_row(
"literal_dyn_name_ref",
our.literal_dynamic_name_ref,
&refs,
|h| h.literal_dynamic_name_ref,
);
print_row(
"literal_post_base_name_ref",
our.literal_post_base_name_ref,
&refs,
|h| h.literal_post_base_name_ref,
);
print_row(
" ∑ literal dyn name",
our.literal_dyn_name_total(),
&refs,
WireHistogram::literal_dyn_name_total,
);
print_row(
"literal_literal_name",
our.literal_literal_name,
&refs,
|h| h.literal_literal_name,
);
}
eprintln!();
}
fn print_row(
label: &str,
ours: u64,
refs: &[(&String, &WireHistogram)],
accessor: impl Fn(&WireHistogram) -> u64,
) {
let refs_str = refs
.iter()
.map(|(name, h)| {
let v = accessor(h);
let flag = diverges(ours, v);
format!("{name}={v}{flag}")
})
.collect::<Vec<_>>()
.join(" ");
eprintln!(" {label:<28} ours={ours:<8} {refs_str}");
}
fn diverges(ours: u64, theirs: u64) -> &'static str {
if ours == 0 && theirs == 0 {
return "";
}
if ours == 0 || theirs == 0 {
return " ←Δ";
}
let (a, b) = if ours > theirs {
(ours, theirs)
} else {
(theirs, ours)
};
if a >= b * 2 { " ←Δ" } else { "" }
}