use alloc::format;
use alloc::string::String;
use alloc::vec;
use alloc::vec::Vec;
use core::fmt::Display;
use core::time::Duration;
use serde::{Deserialize, Serialize};
#[cfg(all(not(target_family = "wasm"), feature = "std"))]
use std::time::Instant;
#[cfg(all(target_family = "wasm", feature = "std"))]
use web_time::Instant;
#[derive(new, Debug, Default, Clone, Serialize, Deserialize)]
pub struct BenchmarkDurations {
pub durations: Vec<Duration>,
}
impl BenchmarkDurations {
fn min_max_median_durations(&self) -> (Duration, Duration, Duration) {
let mut sorted = self.durations.clone();
sorted.sort();
let min = *sorted.first().unwrap();
let max = *sorted.last().unwrap();
let median = *sorted.get(sorted.len() / 2).unwrap();
(min, max, median)
}
pub(crate) fn mean_duration(&self) -> Duration {
self.durations.iter().sum::<Duration>() / self.durations.len() as u32
}
pub(crate) fn variance_duration(&self, mean: Duration) -> Duration {
let var = self
.durations
.iter()
.map(|duration| {
let tmp = duration.as_secs_f64() - mean.as_secs_f64();
Duration::from_secs_f64(tmp * tmp)
})
.sum::<Duration>()
/ self.durations.len() as u32;
var
}
}
impl Display for BenchmarkDurations {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let computed = BenchmarkComputations::new(self);
let BenchmarkComputations {
mean,
median,
variance,
min,
max,
} = computed;
let num_sample = self.durations.len();
f.write_str(
format!(
"
―――――――― Result ―――――――――
Samples {num_sample}
Mean {mean:.3?}
Variance {variance:.3?}
Median {median:.3?}
Min {min:.3?}
Max {max:.3?}
―――――――――――――――――――――――――"
)
.as_str(),
)
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct BenchmarkComputations {
pub mean: Duration,
pub median: Duration,
pub variance: Duration,
pub min: Duration,
pub max: Duration,
}
impl BenchmarkComputations {
pub fn new(durations: &BenchmarkDurations) -> Self {
let mean = durations.mean_duration();
let (min, max, median) = durations.min_max_median_durations();
Self {
mean,
median,
min,
max,
variance: durations.variance_duration(mean),
}
}
}
pub trait Benchmark {
type Args: Clone;
fn prepare(&self) -> Self::Args;
fn execute(&self, args: Self::Args);
fn num_samples(&self) -> usize {
10
}
fn name(&self) -> String;
fn options(&self) -> Option<String> {
None
}
fn shapes(&self) -> Vec<Vec<usize>> {
vec![]
}
fn sync(&self);
fn run(&self) -> BenchmarkDurations {
#[cfg(not(feature = "std"))]
panic!("Attempting to run benchmark in a no-std environment");
#[cfg(feature = "std")]
{
let args = self.prepare();
self.execute(args.clone());
self.sync();
let mut durations = Vec::with_capacity(self.num_samples());
for _ in 0..self.num_samples() {
self.sync();
let start = Instant::now();
self.execute(args.clone());
self.sync();
let end = Instant::now();
durations.push(end - start);
}
BenchmarkDurations { durations }
}
}
}
#[derive(Default, Clone)]
pub struct BenchmarkResult {
pub raw: BenchmarkDurations,
pub computed: BenchmarkComputations,
pub git_hash: String,
pub name: String,
pub options: Option<String>,
pub shapes: Vec<Vec<usize>>,
pub timestamp: u128,
}
impl Display for BenchmarkResult {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.write_str(
format!(
"
Timestamp: {}
Git Hash: {}
Benchmarking - {}{}
",
self.timestamp, self.git_hash, self.name, self.raw
)
.as_str(),
)
}
}
#[cfg(feature = "std")]
pub fn run_benchmark<BM>(benchmark: BM) -> BenchmarkResult
where
BM: Benchmark,
{
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
let output = std::process::Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.unwrap();
let git_hash = String::from_utf8(output.stdout).unwrap().trim().to_string();
let durations = benchmark.run();
BenchmarkResult {
raw: durations.clone(),
computed: BenchmarkComputations::new(&durations),
git_hash,
name: benchmark.name(),
options: benchmark.options(),
shapes: benchmark.shapes(),
timestamp,
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::vec;
#[test]
fn test_min_max_median_durations_even_number_of_samples() {
let durations = BenchmarkDurations {
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
Duration::new(50, 0),
],
};
let (min, max, median) = durations.min_max_median_durations();
assert_eq!(min, Duration::from_secs(10));
assert_eq!(max, Duration::from_secs(50));
assert_eq!(median, Duration::from_secs(30));
}
#[test]
fn test_min_max_median_durations_odd_number_of_samples() {
let durations = BenchmarkDurations {
durations: vec![
Duration::new(18, 5),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
],
};
let (min, max, median) = durations.min_max_median_durations();
assert_eq!(min, Duration::from_nanos(18000000005_u64));
assert_eq!(max, Duration::from_secs(40));
assert_eq!(median, Duration::from_secs(30));
}
#[test]
fn test_mean_duration() {
let durations = BenchmarkDurations {
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
],
};
let mean = durations.mean_duration();
assert_eq!(mean, Duration::from_secs(25));
}
#[test]
fn test_variance_duration() {
let durations = BenchmarkDurations {
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
Duration::new(50, 0),
],
};
let mean = durations.mean_duration();
let variance = durations.variance_duration(mean);
assert_eq!(variance, Duration::from_secs(200));
}
}