mobench_sdk/
timing.rs

1//! Lightweight benchmarking harness for mobile platforms.
2//!
3//! This module provides the core timing infrastructure for the mobench ecosystem.
4//! It was previously a separate crate (`mobench-runner`) but has been consolidated
5//! into `mobench-sdk` for a simpler dependency graph.
6//!
7//! The module is designed to be minimal and portable, with no platform-specific
8//! dependencies, making it suitable for compilation to Android and iOS targets.
9//!
10//! ## Overview
11//!
12//! The timing module executes benchmark functions with:
13//! - Configurable warmup iterations
14//! - Precise nanosecond-resolution timing
15//! - Simple, serializable results
16//!
17//! ## Usage
18//!
19//! Most users should use this via the higher-level [`crate::run_benchmark`] function
20//! or [`crate::BenchmarkBuilder`]. Direct usage is for custom integrations:
21//!
22//! ```
23//! use mobench_sdk::timing::{BenchSpec, run_closure, TimingError};
24//!
25//! // Define a benchmark specification
26//! let spec = BenchSpec::new("my_benchmark", 100, 10)?;
27//!
28//! // Run the benchmark
29//! let report = run_closure(spec, || {
30//!     // Your benchmark code
31//!     let sum: u64 = (0..1000).sum();
32//!     std::hint::black_box(sum);
33//!     Ok(())
34//! })?;
35//!
36//! // Analyze results
37//! let mean_ns = report.samples.iter()
38//!     .map(|s| s.duration_ns)
39//!     .sum::<u64>() / report.samples.len() as u64;
40//!
41//! println!("Mean: {} ns", mean_ns);
42//! # Ok::<(), TimingError>(())
43//! ```
44//!
45//! ## Types
46//!
47//! | Type | Description |
48//! |------|-------------|
49//! | [`BenchSpec`] | Benchmark configuration (name, iterations, warmup) |
50//! | [`BenchSample`] | Single timing measurement in nanoseconds |
51//! | [`BenchReport`] | Complete results with all samples |
52//! | [`TimingError`] | Error conditions during benchmarking |
53//!
54//! ## Feature Flags
55//!
56//! This module is always available. When using `mobench-sdk` with default features,
57//! you also get build automation and template generation. For minimal binary size
58//! (e.g., on mobile targets), use the `runner-only` feature:
59//!
60//! ```toml
61//! [dependencies]
62//! mobench-sdk = { version = "0.1", default-features = false, features = ["runner-only"] }
63//! ```
64
65use serde::{Deserialize, Serialize};
66use std::time::{Duration, Instant};
67use thiserror::Error;
68
69/// Benchmark specification defining what and how to benchmark.
70///
71/// Contains the benchmark name, number of measurement iterations, and
72/// warmup iterations to perform before measuring.
73///
74/// # Example
75///
76/// ```
77/// use mobench_sdk::timing::BenchSpec;
78///
79/// // Create a spec for 100 iterations with 10 warmup runs
80/// let spec = BenchSpec::new("sorting_benchmark", 100, 10)?;
81///
82/// assert_eq!(spec.name, "sorting_benchmark");
83/// assert_eq!(spec.iterations, 100);
84/// assert_eq!(spec.warmup, 10);
85/// # Ok::<(), mobench_sdk::timing::TimingError>(())
86/// ```
87///
88/// # Serialization
89///
90/// `BenchSpec` implements `Serialize` and `Deserialize` for JSON persistence:
91///
92/// ```
93/// use mobench_sdk::timing::BenchSpec;
94///
95/// let spec = BenchSpec {
96///     name: "my_bench".to_string(),
97///     iterations: 50,
98///     warmup: 5,
99/// };
100///
101/// let json = serde_json::to_string(&spec)?;
102/// let restored: BenchSpec = serde_json::from_str(&json)?;
103///
104/// assert_eq!(spec.name, restored.name);
105/// # Ok::<(), serde_json::Error>(())
106/// ```
107#[derive(Clone, Debug, Serialize, Deserialize)]
108pub struct BenchSpec {
109    /// Name of the benchmark, typically the fully-qualified function name.
110    ///
111    /// Examples: `"my_crate::fibonacci"`, `"sorting_benchmark"`
112    pub name: String,
113
114    /// Number of iterations to measure.
115    ///
116    /// Each iteration produces one [`BenchSample`]. Must be greater than zero.
117    pub iterations: u32,
118
119    /// Number of warmup iterations before measurement.
120    ///
121    /// Warmup iterations are not recorded. They allow CPU caches to warm
122    /// and any JIT compilation to complete. Can be zero.
123    pub warmup: u32,
124}
125
126impl BenchSpec {
127    /// Creates a new benchmark specification.
128    ///
129    /// # Arguments
130    ///
131    /// * `name` - Name identifier for the benchmark
132    /// * `iterations` - Number of measured iterations (must be > 0)
133    /// * `warmup` - Number of warmup iterations (can be 0)
134    ///
135    /// # Errors
136    ///
137    /// Returns [`TimingError::NoIterations`] if `iterations` is zero.
138    ///
139    /// # Example
140    ///
141    /// ```
142    /// use mobench_sdk::timing::BenchSpec;
143    ///
144    /// let spec = BenchSpec::new("test", 100, 10)?;
145    /// assert_eq!(spec.iterations, 100);
146    ///
147    /// // Zero iterations is an error
148    /// let err = BenchSpec::new("test", 0, 10);
149    /// assert!(err.is_err());
150    /// # Ok::<(), mobench_sdk::timing::TimingError>(())
151    /// ```
152    pub fn new(name: impl Into<String>, iterations: u32, warmup: u32) -> Result<Self, TimingError> {
153        if iterations == 0 {
154            return Err(TimingError::NoIterations);
155        }
156
157        Ok(Self {
158            name: name.into(),
159            iterations,
160            warmup,
161        })
162    }
163}
164
165/// A single timing sample from a benchmark iteration.
166///
167/// Contains the elapsed time in nanoseconds for one execution of the
168/// benchmark function.
169///
170/// # Example
171///
172/// ```
173/// use mobench_sdk::timing::BenchSample;
174///
175/// let sample = BenchSample { duration_ns: 1_500_000 };
176///
177/// // Convert to milliseconds
178/// let ms = sample.duration_ns as f64 / 1_000_000.0;
179/// assert_eq!(ms, 1.5);
180/// ```
181#[derive(Clone, Debug, Serialize, Deserialize)]
182pub struct BenchSample {
183    /// Duration of the iteration in nanoseconds.
184    ///
185    /// Measured using [`std::time::Instant`] for monotonic, high-resolution timing.
186    pub duration_ns: u64,
187}
188
189impl BenchSample {
190    /// Creates a sample from a [`Duration`].
191    fn from_duration(duration: Duration) -> Self {
192        Self {
193            duration_ns: duration.as_nanos() as u64,
194        }
195    }
196}
197
198/// Complete benchmark report with all timing samples.
199///
200/// Contains the original specification and all collected samples.
201/// Can be serialized to JSON for storage or transmission.
202///
203/// # Example
204///
205/// ```
206/// use mobench_sdk::timing::{BenchSpec, run_closure};
207///
208/// let spec = BenchSpec::new("example", 50, 5)?;
209/// let report = run_closure(spec, || {
210///     std::hint::black_box(42);
211///     Ok(())
212/// })?;
213///
214/// // Calculate statistics
215/// let samples: Vec<u64> = report.samples.iter()
216///     .map(|s| s.duration_ns)
217///     .collect();
218///
219/// let min = samples.iter().min().unwrap();
220/// let max = samples.iter().max().unwrap();
221/// let mean = samples.iter().sum::<u64>() / samples.len() as u64;
222///
223/// println!("Min: {} ns, Max: {} ns, Mean: {} ns", min, max, mean);
224/// # Ok::<(), mobench_sdk::timing::TimingError>(())
225/// ```
226#[derive(Clone, Debug, Serialize, Deserialize)]
227pub struct BenchReport {
228    /// The specification used for this benchmark run.
229    pub spec: BenchSpec,
230
231    /// All collected timing samples.
232    ///
233    /// The length equals `spec.iterations`. Samples are in execution order.
234    pub samples: Vec<BenchSample>,
235}
236
237/// Errors that can occur during benchmark execution.
238///
239/// # Example
240///
241/// ```
242/// use mobench_sdk::timing::{BenchSpec, TimingError};
243///
244/// // Zero iterations produces an error
245/// let result = BenchSpec::new("test", 0, 10);
246/// assert!(matches!(result, Err(TimingError::NoIterations)));
247/// ```
248#[derive(Debug, Error)]
249pub enum TimingError {
250    /// The iteration count was zero.
251    ///
252    /// At least one iteration is required to produce a measurement.
253    #[error("iterations must be greater than zero")]
254    NoIterations,
255
256    /// The benchmark function failed during execution.
257    ///
258    /// Contains a description of the failure.
259    #[error("benchmark function failed: {0}")]
260    Execution(String),
261}
262
263/// Runs a benchmark by executing a closure repeatedly.
264///
265/// This is the core benchmarking function. It:
266///
267/// 1. Executes the closure `spec.warmup` times without recording
268/// 2. Executes the closure `spec.iterations` times, recording each duration
269/// 3. Returns a [`BenchReport`] with all samples
270///
271/// # Arguments
272///
273/// * `spec` - Benchmark configuration specifying iterations and warmup
274/// * `f` - Closure to benchmark; must return `Result<(), TimingError>`
275///
276/// # Returns
277///
278/// A [`BenchReport`] containing all timing samples, or a [`TimingError`] if
279/// the benchmark fails.
280///
281/// # Example
282///
283/// ```
284/// use mobench_sdk::timing::{BenchSpec, run_closure, TimingError};
285///
286/// let spec = BenchSpec::new("sum_benchmark", 100, 10)?;
287///
288/// let report = run_closure(spec, || {
289///     let sum: u64 = (0..1000).sum();
290///     std::hint::black_box(sum);
291///     Ok(())
292/// })?;
293///
294/// assert_eq!(report.samples.len(), 100);
295///
296/// // Calculate mean duration
297/// let total_ns: u64 = report.samples.iter().map(|s| s.duration_ns).sum();
298/// let mean_ns = total_ns / report.samples.len() as u64;
299/// println!("Mean: {} ns", mean_ns);
300/// # Ok::<(), TimingError>(())
301/// ```
302///
303/// # Error Handling
304///
305/// If the closure returns an error, the benchmark stops immediately:
306///
307/// ```
308/// use mobench_sdk::timing::{BenchSpec, run_closure, TimingError};
309///
310/// let spec = BenchSpec::new("failing_bench", 100, 0)?;
311///
312/// let result = run_closure(spec, || {
313///     Err(TimingError::Execution("simulated failure".into()))
314/// });
315///
316/// assert!(result.is_err());
317/// # Ok::<(), TimingError>(())
318/// ```
319///
320/// # Timing Precision
321///
322/// Uses [`std::time::Instant`] for timing, which provides monotonic,
323/// nanosecond-resolution measurements on most platforms.
324pub fn run_closure<F>(spec: BenchSpec, mut f: F) -> Result<BenchReport, TimingError>
325where
326    F: FnMut() -> Result<(), TimingError>,
327{
328    if spec.iterations == 0 {
329        return Err(TimingError::NoIterations);
330    }
331
332    // Warmup phase - not measured
333    for _ in 0..spec.warmup {
334        f()?;
335    }
336
337    // Measurement phase
338    let mut samples = Vec::with_capacity(spec.iterations as usize);
339    for _ in 0..spec.iterations {
340        let start = Instant::now();
341        f()?;
342        samples.push(BenchSample::from_duration(start.elapsed()));
343    }
344
345    Ok(BenchReport { spec, samples })
346}
347
348#[cfg(test)]
349mod tests {
350    use super::*;
351
352    #[test]
353    fn runs_benchmark() {
354        let spec = BenchSpec::new("noop", 3, 1).unwrap();
355        let report = run_closure(spec, || Ok(())).unwrap();
356
357        assert_eq!(report.samples.len(), 3);
358        let non_zero = report.samples.iter().filter(|s| s.duration_ns > 0).count();
359        assert!(non_zero >= 1);
360    }
361
362    #[test]
363    fn rejects_zero_iterations() {
364        let result = BenchSpec::new("test", 0, 10);
365        assert!(matches!(result, Err(TimingError::NoIterations)));
366    }
367
368    #[test]
369    fn allows_zero_warmup() {
370        let spec = BenchSpec::new("test", 5, 0).unwrap();
371        assert_eq!(spec.warmup, 0);
372
373        let report = run_closure(spec, || Ok(())).unwrap();
374        assert_eq!(report.samples.len(), 5);
375    }
376
377    #[test]
378    fn serializes_to_json() {
379        let spec = BenchSpec::new("test", 10, 2).unwrap();
380        let report = run_closure(spec, || Ok(())).unwrap();
381
382        let json = serde_json::to_string(&report).unwrap();
383        let restored: BenchReport = serde_json::from_str(&json).unwrap();
384
385        assert_eq!(restored.spec.name, "test");
386        assert_eq!(restored.samples.len(), 10);
387    }
388}