mobench_sdk/runner.rs
1//! Benchmark execution runtime
2//!
3//! This module provides the execution engine that runs registered benchmarks
4//! and collects timing data.
5
6use crate::registry::{find_benchmark, list_benchmark_names};
7use crate::timing::BenchSpec;
8use crate::types::{BenchError, RunnerReport};
9
10/// Runs a benchmark by name
11///
12/// Looks up the benchmark function in the registry and executes it with the
13/// given specification. The benchmark's runner handles all timing, including
14/// any setup/teardown logic.
15///
16/// # Arguments
17///
18/// * `spec` - Benchmark specification including function name, iterations, and warmup
19///
20/// # Returns
21///
22/// * `Ok(BenchReport)` - Report containing timing samples
23/// * `Err(BenchError)` - If the function is not found or execution fails
24///
25/// # Example
26///
27/// ```no_run
28/// use mobench_sdk::{BenchSpec, run_benchmark};
29///
30/// fn main() -> Result<(), Box<dyn std::error::Error>> {
31/// let spec = BenchSpec::new("my_benchmark", 100, 10)?;
32///
33/// let report = run_benchmark(spec)?;
34/// println!("Mean: {} ns", report.mean_ns());
35/// Ok(())
36/// }
37/// ```
38pub fn run_benchmark(spec: BenchSpec) -> Result<RunnerReport, BenchError> {
39 // Find the benchmark function in the registry
40 let bench_fn = find_benchmark(&spec.name).ok_or_else(|| {
41 let available = list_benchmark_names()
42 .into_iter()
43 .map(String::from)
44 .collect();
45 BenchError::UnknownFunction(spec.name.clone(), available)
46 })?;
47
48 // Call the runner directly - it handles setup/teardown and timing internally
49 let report = (bench_fn.runner)(spec)?;
50
51 Ok(report)
52}
53
54/// Builder for constructing and running benchmarks
55///
56/// Provides a fluent interface for configuring benchmark parameters.
57///
58/// # Example
59///
60/// ```no_run
61/// use mobench_sdk::BenchmarkBuilder;
62///
63/// fn main() -> Result<(), Box<dyn std::error::Error>> {
64/// let report = BenchmarkBuilder::new("my_benchmark")
65/// .iterations(100)
66/// .warmup(10)
67/// .run()?;
68///
69/// println!("Median: {} ns", report.median_ns());
70/// Ok(())
71/// }
72/// ```
73#[derive(Debug, Clone)]
74pub struct BenchmarkBuilder {
75 function: String,
76 iterations: u32,
77 warmup: u32,
78}
79
80impl BenchmarkBuilder {
81 /// Creates a new benchmark builder
82 ///
83 /// # Arguments
84 ///
85 /// * `function` - Name of the benchmark function to run
86 pub fn new(function: impl Into<String>) -> Self {
87 Self {
88 function: function.into(),
89 iterations: 100, // Default
90 warmup: 10, // Default
91 }
92 }
93
94 /// Sets the number of iterations
95 ///
96 /// # Arguments
97 ///
98 /// * `n` - Number of times to run the benchmark (after warmup)
99 pub fn iterations(mut self, n: u32) -> Self {
100 self.iterations = n;
101 self
102 }
103
104 /// Sets the number of warmup iterations
105 ///
106 /// # Arguments
107 ///
108 /// * `n` - Number of warmup runs (not measured)
109 pub fn warmup(mut self, n: u32) -> Self {
110 self.warmup = n;
111 self
112 }
113
114 /// Runs the benchmark and returns the report
115 ///
116 /// # Returns
117 ///
118 /// * `Ok(BenchReport)` - Report containing timing samples
119 /// * `Err(BenchError)` - If the function is not found or execution fails
120 pub fn run(self) -> Result<RunnerReport, BenchError> {
121 let spec = BenchSpec {
122 name: self.function,
123 iterations: self.iterations,
124 warmup: self.warmup,
125 };
126
127 run_benchmark(spec)
128 }
129}
130
131#[cfg(test)]
132mod tests {
133 use super::*;
134
135 #[test]
136 fn test_builder_defaults() {
137 let builder = BenchmarkBuilder::new("test_fn");
138 assert_eq!(builder.iterations, 100);
139 assert_eq!(builder.warmup, 10);
140 }
141
142 #[test]
143 fn test_builder_customization() {
144 let builder = BenchmarkBuilder::new("test_fn").iterations(50).warmup(5);
145 assert_eq!(builder.iterations, 50);
146 assert_eq!(builder.warmup, 5);
147 }
148}