use alloc::format;
use alloc::string::String;
use alloc::vec;
use alloc::vec::Vec;
use core::fmt::Display;
use core::time::Duration;
pub use crate::profile::{Instant, TimingMethod};
#[cfg(feature = "std")]
pub use crate::profile::ProfileDuration;
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(new, Debug, Clone)]
pub struct BenchmarkDurations {
pub timing_method: TimingMethod,
pub durations: Vec<Duration>,
}
impl BenchmarkDurations {
pub fn from_durations(timing_method: TimingMethod, durations: Vec<Duration>) -> Self {
Self {
timing_method,
durations,
}
}
fn min_max_median_durations(&self) -> (Duration, Duration, Duration) {
let mut sorted = self.durations.clone();
sorted.sort();
let min = *sorted.first().unwrap();
let max = *sorted.last().unwrap();
let median = *sorted.get(sorted.len() / 2).unwrap();
(min, max, median)
}
pub(crate) fn mean_duration(&self) -> Duration {
self.durations.iter().sum::<Duration>() / self.durations.len() as u32
}
pub(crate) fn variance_duration(&self, mean: Duration) -> Duration {
self.durations
.iter()
.map(|duration| {
let tmp = duration.as_secs_f64() - mean.as_secs_f64();
Duration::from_secs_f64(tmp * tmp)
})
.sum::<Duration>()
/ self.durations.len() as u32
}
}
impl Display for BenchmarkDurations {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let computed = BenchmarkComputations::new(self);
let BenchmarkComputations {
mean,
median,
variance,
min,
max,
} = computed;
let num_sample = self.durations.len();
let timing_method = self.timing_method;
f.write_str(
format!(
"
―――――――― Result ―――――――――
Timing {timing_method}
Samples {num_sample}
Mean {mean:.3?}
Variance {variance:.3?}
Median {median:.3?}
Min {min:.3?}
Max {max:.3?}
―――――――――――――――――――――――――"
)
.as_str(),
)
}
}
#[cfg_attr(
feature = "serde",
derive(serde::Serialize, serde::Deserialize, PartialEq, Eq)
)]
#[derive(Debug, Default, Clone)]
pub struct BenchmarkComputations {
pub mean: Duration,
pub median: Duration,
pub variance: Duration,
pub min: Duration,
pub max: Duration,
}
impl BenchmarkComputations {
pub fn new(durations: &BenchmarkDurations) -> Self {
let mean = durations.mean_duration();
let (min, max, median) = durations.min_max_median_durations();
Self {
mean,
median,
min,
max,
variance: durations.variance_duration(mean),
}
}
}
pub trait Benchmark {
type Input: Clone;
type Output;
fn prepare(&self) -> Self::Input;
fn execute(&self, input: Self::Input) -> Result<Self::Output, String>;
fn num_samples(&self) -> usize {
const DEFAULT: usize = 10;
#[cfg(feature = "std")]
{
std::env::var("BENCH_NUM_SAMPLES")
.map(|val| str::parse::<usize>(&val).unwrap_or(DEFAULT))
.unwrap_or(DEFAULT)
}
#[cfg(not(feature = "std"))]
{
DEFAULT
}
}
fn name(&self) -> String;
fn options(&self) -> Option<String> {
None
}
fn shapes(&self) -> Vec<Vec<usize>> {
vec![]
}
fn sync(&self);
#[cfg(feature = "std")]
fn profile(&self, args: Self::Input) -> Result<ProfileDuration, String> {
self.profile_full(args)
}
#[cfg(feature = "std")]
fn profile_full(&self, args: Self::Input) -> Result<ProfileDuration, String> {
self.sync();
let start_time = Instant::now();
let out = self.execute(args)?;
self.sync();
core::mem::drop(out);
Ok(ProfileDuration::new_system_time(start_time, Instant::now()))
}
#[allow(unused_variables)]
fn run(&self, timing_method: TimingMethod) -> Result<BenchmarkDurations, String> {
#[cfg(not(feature = "std"))]
panic!("Attempting to run benchmark in a no-std environment");
#[cfg(feature = "std")]
{
let execute = |args: &Self::Input| {
let profile: Result<ProfileDuration, String> = match timing_method {
TimingMethod::System => self.profile_full(args.clone()),
TimingMethod::Device => self.profile(args.clone()),
};
let profile = match profile {
Ok(val) => val,
Err(err) => return Err(err),
};
Ok(crate::future::block_on(profile.resolve()))
};
let args = self.prepare();
for _ in 0..3 {
let _duration: Result<crate::profile::ProfileTicks, _> = execute(&args);
}
std::thread::sleep(Duration::from_secs(1));
let mut durations = Vec::with_capacity(self.num_samples());
for _ in 0..self.num_samples() {
match execute(&args) {
Ok(val) => durations.push(val.duration()),
Err(err) => {
return Err(err);
}
}
}
Ok(BenchmarkDurations {
timing_method,
durations,
})
}
}
}
#[derive(Clone)]
pub struct BenchmarkResult {
pub raw: BenchmarkDurations,
pub computed: BenchmarkComputations,
pub git_hash: String,
pub name: String,
pub options: Option<String>,
pub shapes: Vec<Vec<usize>>,
pub timestamp: u128,
}
impl Display for BenchmarkResult {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.write_str(
format!(
"
Timestamp: {}
Git Hash: {}
Benchmarking - {}{}
",
self.timestamp, self.git_hash, self.name, self.raw
)
.as_str(),
)
}
}
#[cfg(feature = "std")]
pub fn run_benchmark<BM>(benchmark: BM) -> Result<BenchmarkResult, String>
where
BM: Benchmark,
{
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
let output = std::process::Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.unwrap();
let git_hash = String::from_utf8(output.stdout).unwrap().trim().to_string();
let durations = benchmark.run(TimingMethod::System)?;
Ok(BenchmarkResult {
raw: durations.clone(),
computed: BenchmarkComputations::new(&durations),
git_hash,
name: benchmark.name(),
options: benchmark.options(),
shapes: benchmark.shapes(),
timestamp,
})
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::vec;
#[test]
fn test_min_max_median_durations_even_number_of_samples() {
let durations = BenchmarkDurations {
timing_method: TimingMethod::System,
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
Duration::new(50, 0),
],
};
let (min, max, median) = durations.min_max_median_durations();
assert_eq!(min, Duration::from_secs(10));
assert_eq!(max, Duration::from_secs(50));
assert_eq!(median, Duration::from_secs(30));
}
#[test]
fn test_min_max_median_durations_odd_number_of_samples() {
let durations = BenchmarkDurations {
timing_method: TimingMethod::System,
durations: vec![
Duration::new(18, 5),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
],
};
let (min, max, median) = durations.min_max_median_durations();
assert_eq!(min, Duration::from_nanos(18000000005_u64));
assert_eq!(max, Duration::from_secs(40));
assert_eq!(median, Duration::from_secs(30));
}
#[test]
fn test_mean_duration() {
let durations = BenchmarkDurations {
timing_method: TimingMethod::System,
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
],
};
let mean = durations.mean_duration();
assert_eq!(mean, Duration::from_secs(25));
}
#[test]
fn test_variance_duration() {
let durations = BenchmarkDurations {
timing_method: TimingMethod::System,
durations: vec![
Duration::new(10, 0),
Duration::new(20, 0),
Duration::new(30, 0),
Duration::new(40, 0),
Duration::new(50, 0),
],
};
let mean = durations.mean_duration();
let variance = durations.variance_duration(mean);
assert_eq!(variance, Duration::from_secs(200));
}
}