#![allow(unused)]
#![cfg_attr(feature = "document-features", doc = document_features::document_features!())]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![allow(clippy::style, clippy::complexity)]
#![warn(bare_trait_objects)]
#![cfg_attr(feature = "codspeed", allow(unused))]
#[cfg(all(feature = "rayon", target_arch = "wasm32"))]
compile_error!("Rayon cannot be used when targeting wasi32. Try disabling default features.");
use serde::{Deserialize, Serialize};
#[macro_use]
mod macros_private;
#[macro_use]
mod analysis;
mod benchmark;
#[macro_use]
mod benchmark_group;
#[cfg(feature = "codspeed")]
#[macro_use]
pub mod codspeed;
pub mod async_executor;
mod bencher;
mod cli;
mod connection;
mod criterion;
mod error;
mod estimate;
mod format;
mod fs;
mod kde;
pub mod measurement;
pub mod profiler;
mod report;
mod routine;
mod stats;
#[cfg(not(feature = "codspeed"))]
#[macro_use]
mod macros;
#[cfg(feature = "codspeed")]
#[macro_use]
mod macros_codspeed;
use std::{
default::Default,
env,
net::TcpStream,
path::PathBuf,
process::Command,
sync::{Mutex, OnceLock},
time::Duration,
};
#[cfg(feature = "async")]
#[cfg(not(feature = "codspeed"))]
pub use crate::bencher::AsyncBencher;
#[cfg(not(feature = "codspeed"))]
pub use crate::bencher::Bencher;
#[cfg(not(feature = "codspeed"))]
pub use crate::benchmark_group::{BenchmarkGroup, BenchmarkId};
#[cfg(feature = "async")]
#[cfg(feature = "codspeed")]
pub use crate::codspeed::bencher::AsyncBencher;
#[cfg(feature = "codspeed")]
pub use crate::codspeed::bencher::Bencher;
#[cfg(feature = "codspeed")]
pub use crate::codspeed::benchmark_group::{BenchmarkGroup, BenchmarkId};
#[cfg(feature = "codspeed")]
pub use crate::codspeed::criterion::Criterion;
#[cfg(not(feature = "codspeed"))]
pub use crate::criterion::Criterion;
use crate::{
benchmark::BenchmarkConfig,
connection::{Connection, OutgoingMessage},
measurement::{Measurement, WallTime},
profiler::{ExternalProfiler, Profiler},
report::{BencherReport, CliReport, CliVerbosity, Report, ReportContext, Reports},
};
fn cargo_criterion_connection() -> &'static Option<Mutex<Connection>> {
static CARGO_CRITERION_CONNECTION: OnceLock<Option<Mutex<Connection>>> = OnceLock::new();
CARGO_CRITERION_CONNECTION.get_or_init(|| match std::env::var("CARGO_CRITERION_PORT") {
Ok(port_str) => {
let port: u16 = port_str.parse().ok()?;
let stream = TcpStream::connect(("localhost", port)).ok()?;
Some(Mutex::new(Connection::new(stream).ok()?))
}
Err(_) => None,
})
}
fn default_output_directory() -> &'static PathBuf {
static DEFAULT_OUTPUT_DIRECTORY: OnceLock<PathBuf> = OnceLock::new();
DEFAULT_OUTPUT_DIRECTORY.get_or_init(|| {
if let Some(value) = env::var_os("CRITERION_HOME") {
PathBuf::from(value)
} else if let Some(path) = cargo_target_directory() {
path.join("criterion")
} else {
PathBuf::from("target/criterion")
}
})
}
fn debug_enabled() -> bool {
static DEBUG_ENABLED: OnceLock<bool> = OnceLock::new();
*DEBUG_ENABLED.get_or_init(|| std::env::var_os("CRITERION_DEBUG").is_some())
}
#[inline]
pub fn black_box<T>(dummy: T) -> T {
std::hint::black_box(dummy)
}
#[derive(Debug, Eq, PartialEq, Copy, Hash, Clone)]
pub enum BatchSize {
SmallInput,
LargeInput,
PerIteration,
NumBatches(u64),
NumIterations(u64),
#[doc(hidden)]
__NonExhaustive,
}
impl BatchSize {
fn iters_per_batch(self, iters: u64) -> u64 {
match self {
BatchSize::SmallInput => (iters + 10 - 1) / 10,
BatchSize::LargeInput => (iters + 1000 - 1) / 1000,
BatchSize::PerIteration => 1,
BatchSize::NumBatches(batches) => (iters + batches - 1) / batches,
BatchSize::NumIterations(size) => size,
BatchSize::__NonExhaustive => panic!("__NonExhaustive is not a valid BatchSize."),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum Baseline {
CompareLenient,
CompareStrict,
Save,
Discard,
}
#[derive(Debug, Clone)]
pub(crate) enum Mode {
Benchmark,
List(ListFormat),
Test,
Profile(Duration),
}
impl Mode {
pub fn is_benchmark(&self) -> bool {
matches!(self, Mode::Benchmark)
}
pub fn is_terse(&self) -> bool {
matches!(self, Mode::List(ListFormat::Terse))
}
}
#[derive(Debug, Clone, Copy)]
pub(crate) enum ListFormat {
Pretty,
Terse,
}
impl Default for ListFormat {
fn default() -> Self {
Self::Pretty
}
}
#[derive(Clone, Debug)]
pub enum BenchmarkFilter {
AcceptAll,
Exact(String),
Substring(String),
RejectAll,
}
fn cargo_target_directory() -> Option<PathBuf> {
#[derive(Deserialize)]
struct Metadata {
target_directory: PathBuf,
}
env::var_os("CARGO_TARGET_DIR").map(PathBuf::from).or_else(|| {
let output = Command::new(env::var_os("CARGO")?)
.args(["metadata", "--format-version", "1"])
.output()
.ok()?;
let metadata: Metadata = serde_json::from_slice(&output.stdout).ok()?;
Some(metadata.target_directory)
})
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum Throughput {
Bytes(u64),
BytesDecimal(u64),
Elements(u64),
}
#[derive(Debug, Clone, Copy)]
pub enum AxisScale {
Linear,
Logarithmic,
}
#[derive(Debug, Clone, Copy)]
pub enum SamplingMode {
Auto,
Linear,
Flat,
}
impl SamplingMode {
pub(crate) fn choose_sampling_mode(
&self,
warmup_mean_execution_time: f64,
sample_count: u64,
target_time: f64,
) -> ActualSamplingMode {
match self {
SamplingMode::Linear => ActualSamplingMode::Linear,
SamplingMode::Flat => ActualSamplingMode::Flat,
SamplingMode::Auto => {
let total_runs = sample_count * (sample_count + 1) / 2;
let d =
(target_time / warmup_mean_execution_time / total_runs as f64).ceil() as u64;
let expected_ns = total_runs as f64 * d as f64 * warmup_mean_execution_time;
if expected_ns > (2.0 * target_time) {
ActualSamplingMode::Flat
} else {
ActualSamplingMode::Linear
}
}
}
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub(crate) enum ActualSamplingMode {
Linear,
Flat,
}
impl ActualSamplingMode {
pub(crate) fn iteration_counts(
&self,
warmup_mean_execution_time: f64,
sample_count: u64,
target_time: &Duration,
) -> Vec<u64> {
match self {
ActualSamplingMode::Linear => {
let n = sample_count;
let met = warmup_mean_execution_time;
let m_ns = target_time.as_nanos();
let total_runs = n * (n + 1) / 2;
let d = ((m_ns as f64 / met / total_runs as f64).ceil() as u64).max(1);
let expected_ns = total_runs as f64 * d as f64 * met;
if d == 1 {
let recommended_sample_size =
ActualSamplingMode::recommend_linear_sample_size(m_ns as f64, met);
let actual_time = Duration::from_nanos(expected_ns as u64);
eprint!(
"\nWarning: Unable to complete {} samples in {:.1?}. You may wish to increase target time to {:.1?}",
n, target_time, actual_time
);
if recommended_sample_size != n {
eprintln!(
", enable flat sampling, or reduce sample count to {}.",
recommended_sample_size
);
} else {
eprintln!(" or enable flat sampling.");
}
}
(1..(n + 1)).map(|a| a * d).collect::<Vec<u64>>()
}
ActualSamplingMode::Flat => {
let n = sample_count;
let met = warmup_mean_execution_time;
let m_ns = target_time.as_nanos() as f64;
let time_per_sample = m_ns / (n as f64);
let iterations_per_sample = ((time_per_sample / met).ceil() as u64).max(1);
let expected_ns = met * (iterations_per_sample * n) as f64;
if iterations_per_sample == 1 {
let recommended_sample_size =
ActualSamplingMode::recommend_flat_sample_size(m_ns, met);
let actual_time = Duration::from_nanos(expected_ns as u64);
eprint!(
"\nWarning: Unable to complete {} samples in {:.1?}. You may wish to increase target time to {:.1?}",
n, target_time, actual_time
);
if recommended_sample_size != n {
eprintln!(", or reduce sample count to {}.", recommended_sample_size);
} else {
eprintln!(".");
}
}
vec![iterations_per_sample; n as usize]
}
}
}
fn is_linear(&self) -> bool {
matches!(self, ActualSamplingMode::Linear)
}
fn recommend_linear_sample_size(target_time: f64, met: f64) -> u64 {
let c = target_time / met;
let sample_size = (-1.0 + (4.0 * c).sqrt()) / 2.0;
let sample_size = sample_size as u64;
let sample_size = (sample_size / 10) * 10;
if sample_size < 10 { 10 } else { sample_size }
}
fn recommend_flat_sample_size(target_time: f64, met: f64) -> u64 {
let sample_size = (target_time / met) as u64;
let sample_size = (sample_size / 10) * 10;
if sample_size < 10 { 10 } else { sample_size }
}
}
#[derive(Debug, Serialize, Deserialize)]
pub(crate) struct SavedSample {
sampling_mode: ActualSamplingMode,
iters: Vec<f64>,
times: Vec<f64>,
}
#[doc(hidden)]
pub fn runner(benches: &[&dyn Fn()]) {
for bench in benches {
bench();
}
crate::criterion::Criterion::default().configure_from_args().final_summary();
}