#![allow(clippy::unwrap_used)]
use std::path::PathBuf;
use std::time::{Duration, Instant};
use ff_probe::open;
fn assets_dir() -> PathBuf {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
PathBuf::from(format!("{}/../../assets", manifest_dir))
}
fn test_video_path() -> PathBuf {
assets_dir().join("video/gameplay.mp4")
}
fn test_audio_path() -> PathBuf {
assets_dir().join("audio/konekonoosanpo.mp3")
}
fn measure<F, R>(f: F) -> (R, Duration)
where
F: FnOnce() -> R,
{
let start = Instant::now();
let result = f();
let duration = start.elapsed();
(result, duration)
}
#[test]
fn test_probe_video_performance_target() {
let path = test_video_path();
const THRESHOLD_MS: u128 = 100;
let (result, duration) = measure(|| open(&path).expect("Failed to probe video"));
assert!(result.has_video(), "Should detect video stream");
let elapsed_ms = duration.as_millis();
println!("Video probe took: {}ms", elapsed_ms);
assert!(
elapsed_ms < THRESHOLD_MS,
"Probe performance target not met: {}ms (threshold: {}ms)",
elapsed_ms,
THRESHOLD_MS
);
}
#[test]
fn test_probe_audio_performance_target() {
let path = test_audio_path();
const THRESHOLD_MS: u128 = 100;
let (result, duration) = measure(|| open(&path).expect("Failed to probe audio"));
assert!(result.has_audio(), "Should detect audio stream");
let elapsed_ms = duration.as_millis();
println!("Audio probe took: {}ms", elapsed_ms);
assert!(
elapsed_ms < THRESHOLD_MS,
"Probe performance target not met: {}ms (threshold: {}ms)",
elapsed_ms,
THRESHOLD_MS
);
}
#[test]
fn test_repeated_probe_performance() {
let path = test_video_path();
const AVG_THRESHOLD_MS: u128 = 50;
const ITERATIONS: usize = 10;
let (_, duration) = measure(|| {
for _ in 0..ITERATIONS {
let _ = open(&path).expect("Failed to probe");
}
});
let avg_ms = duration.as_millis() / ITERATIONS as u128;
println!(
"Average probe time over {} iterations: {}ms",
ITERATIONS, avg_ms
);
assert!(
avg_ms < AVG_THRESHOLD_MS,
"Average probe performance target not met: {}ms (threshold: {}ms)",
avg_ms,
AVG_THRESHOLD_MS
);
}
#[test]
fn test_metadata_access_performance() {
let path = test_video_path();
let info = open(&path).expect("Failed to probe video");
const THRESHOLD_MICROS: u128 = 1000;
let (_, duration) = measure(|| {
let _ = info.has_video();
let _ = info.video_stream_count();
let _ = info.primary_video();
let _ = info.resolution();
let _ = info.frame_rate();
let _ = info.has_audio();
let _ = info.audio_stream_count();
let _ = info.primary_audio();
let _ = info.duration();
let _ = info.file_size();
let _ = info.format();
});
let elapsed_micros = duration.as_micros();
println!("Metadata access took: {}µs", elapsed_micros);
assert!(
elapsed_micros < THRESHOLD_MICROS,
"Metadata access too slow: {}µs (threshold: {}µs)",
elapsed_micros,
THRESHOLD_MICROS
);
}
#[test]
fn test_probe_performance_consistency() {
let path = test_video_path();
const ITERATIONS: usize = 20;
let mut durations = Vec::with_capacity(ITERATIONS);
for _ in 0..ITERATIONS {
let (_, duration) = measure(|| open(&path).expect("Failed to probe"));
durations.push(duration.as_micros());
}
let avg = durations.iter().sum::<u128>() / durations.len() as u128;
let max = *durations.iter().max().unwrap();
let min = *durations.iter().min().unwrap();
println!("Probe performance statistics (microseconds):");
println!(" Average: {}", avg);
println!(" Min: {}", min);
println!(" Max: {}", max);
println!(" Range: {}", max - min);
assert!(
max < avg * 5,
"Performance variance too high: max={}µs, avg={}µs",
max,
avg
);
}
#[test]
fn test_probe_cold_vs_warm() {
let path = test_video_path();
let (_, cold_duration) = measure(|| open(&path).expect("Failed to probe"));
let (_, warm_duration) = measure(|| open(&path).expect("Failed to probe"));
let cold_ms = cold_duration.as_millis();
let warm_ms = warm_duration.as_millis();
println!("Cold probe: {}ms", cold_ms);
println!("Warm probe: {}ms", warm_ms);
assert!(
warm_ms <= cold_ms * 2,
"Warm probe unexpectedly slower: warm={}ms, cold={}ms",
warm_ms,
cold_ms
);
}