mobench_sdk/runner.rs
1//! Benchmark execution runtime
2//!
3//! This module provides the execution engine that runs registered benchmarks
4//! and collects timing data.
5
6use crate::registry::find_benchmark;
7use crate::types::{BenchError, BenchSpec, RunnerReport};
8use mobench_runner::run_closure;
9
10/// Runs a benchmark by name
11///
12/// Looks up the benchmark function in the registry and executes it with the
13/// given specification.
14///
15/// # Arguments
16///
17/// * `spec` - Benchmark specification including function name, iterations, and warmup
18///
19/// # Returns
20///
21/// * `Ok(BenchReport)` - Report containing timing samples
22/// * `Err(BenchError)` - If the function is not found or execution fails
23///
24/// # Example
25///
26/// ```ignore
27/// use mobench_sdk::{BenchSpec, run_benchmark};
28///
29/// let spec = BenchSpec {
30/// name: "my_benchmark".to_string(),
31/// iterations: 100,
32/// warmup: 10,
33/// };
34///
35/// let report = run_benchmark(spec)?;
36/// println!("Mean: {} ns", report.mean());
37/// ```
38pub fn run_benchmark(spec: BenchSpec) -> Result<RunnerReport, BenchError> {
39 // Find the benchmark function in the registry
40 let bench_fn =
41 find_benchmark(&spec.name).ok_or_else(|| BenchError::UnknownFunction(spec.name.clone()))?;
42
43 // Create a closure that invokes the registered function
44 let closure =
45 || (bench_fn.invoke)(&[]).map_err(|e| mobench_runner::BenchError::Execution(e.to_string()));
46
47 // Run the benchmark using bench-runner's timing infrastructure
48 let report = run_closure(spec, closure)?;
49
50 Ok(report)
51}
52
53/// Builder for constructing and running benchmarks
54///
55/// Provides a fluent interface for configuring benchmark parameters.
56///
57/// # Example
58///
59/// ```ignore
60/// use mobench_sdk::BenchmarkBuilder;
61///
62/// let report = BenchmarkBuilder::new("my_benchmark")
63/// .iterations(100)
64/// .warmup(10)
65/// .run()?;
66/// ```
67#[derive(Debug, Clone)]
68pub struct BenchmarkBuilder {
69 function: String,
70 iterations: u32,
71 warmup: u32,
72}
73
74impl BenchmarkBuilder {
75 /// Creates a new benchmark builder
76 ///
77 /// # Arguments
78 ///
79 /// * `function` - Name of the benchmark function to run
80 pub fn new(function: impl Into<String>) -> Self {
81 Self {
82 function: function.into(),
83 iterations: 100, // Default
84 warmup: 10, // Default
85 }
86 }
87
88 /// Sets the number of iterations
89 ///
90 /// # Arguments
91 ///
92 /// * `n` - Number of times to run the benchmark (after warmup)
93 pub fn iterations(mut self, n: u32) -> Self {
94 self.iterations = n;
95 self
96 }
97
98 /// Sets the number of warmup iterations
99 ///
100 /// # Arguments
101 ///
102 /// * `n` - Number of warmup runs (not measured)
103 pub fn warmup(mut self, n: u32) -> Self {
104 self.warmup = n;
105 self
106 }
107
108 /// Runs the benchmark and returns the report
109 ///
110 /// # Returns
111 ///
112 /// * `Ok(BenchReport)` - Report containing timing samples
113 /// * `Err(BenchError)` - If the function is not found or execution fails
114 pub fn run(self) -> Result<RunnerReport, BenchError> {
115 let spec = BenchSpec {
116 name: self.function,
117 iterations: self.iterations,
118 warmup: self.warmup,
119 };
120
121 run_benchmark(spec)
122 }
123}
124
125#[cfg(test)]
126mod tests {
127 use super::*;
128
129 #[test]
130 fn test_builder_defaults() {
131 let builder = BenchmarkBuilder::new("test_fn");
132 assert_eq!(builder.iterations, 100);
133 assert_eq!(builder.warmup, 10);
134 }
135
136 #[test]
137 fn test_builder_customization() {
138 let builder = BenchmarkBuilder::new("test_fn").iterations(50).warmup(5);
139 assert_eq!(builder.iterations, 50);
140 assert_eq!(builder.warmup, 5);
141 }
142}