use super::{FieldSection, qif};
use crate::headers::qpack::decoder_dynamic_table::DecoderDynamicTable;
use futures_lite::{future, io::Cursor};
use std::{path::Path, sync::Arc, time::Duration};
use trillium_testing::RuntimeTrait as _;
use unicycle::FuturesUnordered;
const FILE_TIMEOUT: Duration = Duration::from_secs(5);
struct OutRecord {
stream_id: u64,
data: Vec<u8>,
}
fn parse_out_records(data: &[u8]) -> Result<Vec<OutRecord>, String> {
let mut records = Vec::new();
let mut pos = 0;
while pos + 12 <= data.len() {
let stream_id = u64::from_be_bytes(data[pos..pos + 8].try_into().unwrap());
let length = u32::from_be_bytes(data[pos + 8..pos + 12].try_into().unwrap()) as usize;
pos += 12;
if pos + length > data.len() {
return Err(format!(
"record at offset {} claims length {length} but only {} bytes remain",
pos - 12,
data.len() - pos
));
}
records.push(OutRecord {
stream_id,
data: data[pos..pos + length].to_vec(),
});
pos += length;
}
if pos != data.len() {
return Err(format!(
"trailing {} bytes after last record",
data.len() - pos
));
}
Ok(records)
}
fn run_interop_file(out_path: &Path, qif_path: &Path, capacity: usize) {
let qif_content = std::fs::read_to_string(qif_path)
.unwrap_or_else(|e| panic!("reading {}: {e}", qif_path.display()));
let expected_groups = qif::parse(&qif_content);
let out_data =
std::fs::read(out_path).unwrap_or_else(|e| panic!("reading {}: {e}", out_path.display()));
let records = parse_out_records(&out_data)
.unwrap_or_else(|e| panic!("parsing {}: {e}", out_path.display()));
let header_count = records.iter().filter(|r| r.stream_id != 0).count();
assert_eq!(
header_count,
expected_groups.len(),
"{}: {} header blocks but {} QIF groups",
out_path.display(),
header_count,
expected_groups.len()
);
if header_count == 0 {
return;
}
let table = Arc::new(DecoderDynamicTable::new(capacity, usize::MAX));
if capacity > 0 {
table
.set_capacity(capacity)
.expect("pre-setting capacity should always succeed at max_capacity");
}
let runtime = trillium_testing::runtime();
type Outcome = Result<FieldSection<'static>, String>;
let mut results: Vec<(usize, Outcome)> = Vec::with_capacity(header_count);
let drive = async {
let mut pending: FuturesUnordered<_> = FuturesUnordered::new();
for record in &records {
if record.stream_id == 0 {
let mut cursor = Cursor::new(&record.data[..]);
table.run_reader(&mut cursor).await.unwrap_or_else(|e| {
panic!("encoder stream error in {}: {e}", out_path.display())
});
} else {
let table = Arc::clone(&table);
let stream_id = record.stream_id;
let header_idx = (stream_id - 1) as usize;
let data = record.data.clone();
pending.push(async move {
let outcome: Outcome = match table.decode(&data, stream_id).await {
Ok(fs) => Ok(fs),
Err(e) => Err(format!("decode error: {e}")),
};
(header_idx, outcome)
});
}
loop {
match future::poll_once(pending.next()).await {
Some(Some(item)) => results.push(item),
_ => break,
}
}
}
while let Some(item) = pending.next().await {
results.push(item);
}
};
runtime.block_on(async {
assert!(
runtime.timeout(FILE_TIMEOUT, drive).await.is_some(),
"{}: timed out after {FILE_TIMEOUT:?} — some stream is hung (likely a wake-plumbing \
bug in DecoderDynamicTable::get). Rerun with RUST_LOG=trace to narrow down which \
stream id never resolved.",
out_path.display()
);
});
assert_eq!(
results.len(),
header_count,
"{}: collected {} results but expected {header_count}",
out_path.display(),
results.len()
);
let mut failures: Vec<String> = Vec::new();
for (header_idx, outcome) in results {
let expected = &expected_groups[header_idx];
match outcome {
Err(e) => {
if e.contains("HTTP message was malformed.") && qif_is_malformed(expected) {
continue;
}
failures.push(format!(" header {header_idx}: {e}"));
}
Ok(fs) => {
let mut got: Vec<_> = qif::field_section_to_pairs(fs);
let mut want: Vec<_> = expected.clone();
got.sort();
want.sort();
if got != want {
failures.push(format!(
" header {header_idx}: mismatch\n want: {want:?}\n got: {got:?}"
));
}
}
}
}
assert!(
failures.is_empty(),
"{}:\n{}",
out_path.display(),
failures.join("\n")
);
}
fn qif_is_malformed(group: &qif::QifGroup) -> bool {
let mut saw_regular = false;
let mut seen: std::collections::HashSet<&str> = std::collections::HashSet::new();
for (name, _) in group {
if name.starts_with(':') {
if saw_regular || !seen.insert(name) {
return true;
}
} else {
saw_regular = true;
}
}
false
}
#[test]
fn qpack_interop_corpus() {
let _ = env_logger::try_init();
let base = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/qifs");
if !base.exists() {
eprintln!("qifs submodule not checked out, skipping QPACK corpus test");
return;
}
let encoded_dir = base.join("encoded/qpack-06");
let qif_dir = base.join("qifs");
let filter = std::env::var("QPACK_CORPUS_FILTER").ok();
let mut tested = 0usize;
let mut skipped = 0usize;
for encoder_entry in std::fs::read_dir(&encoded_dir)
.unwrap_or_else(|e| panic!("reading {}: {e}", encoded_dir.display()))
{
let encoder_dir = encoder_entry.unwrap().path();
if !encoder_dir.is_dir() {
continue; }
let dir_name = encoder_dir.file_name().unwrap().to_string_lossy();
if dir_name == "errors" {
continue; }
if dir_name == "examples" {
continue;
}
for file_entry in std::fs::read_dir(&encoder_dir)
.unwrap_or_else(|e| panic!("reading {}: {e}", encoder_dir.display()))
{
let out_path = file_entry.unwrap().path();
let filename = out_path.file_name().unwrap().to_string_lossy();
let Some((qif_stem, config)) = filename.split_once(".out.") else {
continue;
};
let config_parts: Vec<&str> = config.split('.').collect();
if config_parts.len() < 3 {
continue;
}
let Ok(capacity) = config_parts[0].parse::<usize>() else {
continue;
};
if let Some(needle) = &filter
&& !out_path.to_string_lossy().contains(needle.as_str())
{
continue;
}
let qif_path = qif_dir.join(format!("{qif_stem}.qif"));
if !qif_path.exists() {
skipped += 1;
continue;
}
eprintln!("testing {}", out_path.display());
run_interop_file(&out_path, &qif_path, capacity);
tested += 1;
}
}
assert!(
tested > 0,
"no corpus files were tested — check that tests/qifs is populated{}",
filter
.as_deref()
.map(|f| format!(" or that QPACK_CORPUS_FILTER={f:?} matches something"))
.unwrap_or_default()
);
if skipped > 0 {
eprintln!("QPACK corpus: {tested} files tested, {skipped} skipped (no matching .qif)");
}
}