use crate::errors::{performance_error, Result};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceBaseline {
pub benchmark_name: String,
pub mean_time_ns: u64,
pub std_dev_ns: u64,
pub throughput_ops_per_sec: Option<f64>,
pub memory_usage_bytes: Option<u64>,
pub timestamp: u64,
pub commit_hash: Option<String>,
pub hardware_config: HardwareConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HardwareConfig {
pub cpu_model: Option<String>,
pub cpu_cores: u32,
pub system_memory_bytes: u64,
pub gpu_info: Option<String>,
pub os: String,
}
#[derive(Debug, Clone)]
pub struct RegressionResult {
pub is_regression: bool,
pub severity: f64,
pub performance_change_percent: f64,
pub current_measurement: PerformanceMeasurement,
pub baseline: PerformanceBaseline,
pub analysis: String,
}
#[derive(Debug, Clone)]
pub struct PerformanceMeasurement {
pub time_ns: u64,
pub throughput_ops_per_sec: Option<f64>,
pub memory_usage_bytes: Option<u64>,
}
#[derive(Debug, Clone)]
pub struct RegressionConfig {
pub regression_threshold: f64,
pub std_dev_threshold: f64,
pub check_memory_regression: bool,
pub check_throughput_regression: bool,
pub min_measurements: usize,
}
impl Default for RegressionConfig {
fn default() -> Self {
Self {
regression_threshold: 0.05, std_dev_threshold: 2.0, check_memory_regression: true,
check_throughput_regression: true,
min_measurements: 5,
}
}
}
pub struct RegressionDetector {
config: RegressionConfig,
storage_path: PathBuf,
baselines: HashMap<String, PerformanceBaseline>,
}
impl RegressionDetector {
pub fn new(storage_path: impl AsRef<Path>, config: RegressionConfig) -> Result<Self> {
let storage_path = storage_path.as_ref().to_path_buf();
if let Some(parent) = storage_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| {
performance_error(format!(
"Failed to create baseline storage directory: {}",
e
))
})?;
}
let mut detector = Self {
config,
storage_path,
baselines: HashMap::new(),
};
detector.load_baselines()?;
Ok(detector)
}
pub fn record_baseline(
&mut self,
benchmark_name: impl Into<String>,
measurement: PerformanceMeasurement,
) -> Result<()> {
let benchmark_name = benchmark_name.into();
let hardware_config = Self::detect_hardware_config()?;
let baseline = PerformanceBaseline {
benchmark_name: benchmark_name.clone(),
mean_time_ns: measurement.time_ns,
std_dev_ns: 0, throughput_ops_per_sec: measurement.throughput_ops_per_sec,
memory_usage_bytes: measurement.memory_usage_bytes,
timestamp: SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("SystemTime should be after UNIX_EPOCH")
.as_secs(),
commit_hash: Self::get_git_commit_hash(),
hardware_config,
};
self.baselines.insert(benchmark_name, baseline);
self.save_baselines()?;
Ok(())
}
pub fn check_regression(
&self,
benchmark_name: &str,
current_measurement: PerformanceMeasurement,
) -> Result<Option<RegressionResult>> {
let baseline = match self.baselines.get(benchmark_name) {
Some(baseline) => baseline,
None => return Ok(None), };
let time_change = if baseline.mean_time_ns > 0 {
(current_measurement.time_ns as f64 - baseline.mean_time_ns as f64)
/ baseline.mean_time_ns as f64
} else {
0.0
};
let mut is_regression = false;
let mut severity = 0.0;
let mut analysis_parts = Vec::new();
if time_change > self.config.regression_threshold {
is_regression = true;
severity = (time_change / self.config.regression_threshold).min(1.0);
analysis_parts.push(format!(
"Execution time increased by {:.1}% (threshold: {:.1}%)",
time_change * 100.0,
self.config.regression_threshold * 100.0
));
}
if self.config.check_throughput_regression {
if let (Some(current_throughput), Some(baseline_throughput)) = (
current_measurement.throughput_ops_per_sec,
baseline.throughput_ops_per_sec,
) {
let throughput_change =
(baseline_throughput - current_throughput) / baseline_throughput;
if throughput_change > self.config.regression_threshold {
is_regression = true;
severity = severity
.max((throughput_change / self.config.regression_threshold).min(1.0));
analysis_parts.push(format!(
"Throughput decreased by {:.1}% (threshold: {:.1}%)",
throughput_change * 100.0,
self.config.regression_threshold * 100.0
));
}
}
}
if self.config.check_memory_regression {
if let (Some(current_memory), Some(baseline_memory)) = (
current_measurement.memory_usage_bytes,
baseline.memory_usage_bytes,
) {
let memory_change = if baseline_memory > 0 {
(current_memory as f64 - baseline_memory as f64) / baseline_memory as f64
} else {
0.0
};
if memory_change > self.config.regression_threshold {
is_regression = true;
severity =
severity.max((memory_change / self.config.regression_threshold).min(1.0));
analysis_parts.push(format!(
"Memory usage increased by {:.1}% (threshold: {:.1}%)",
memory_change * 100.0,
self.config.regression_threshold * 100.0
));
}
}
}
let analysis = if analysis_parts.is_empty() {
"No performance regression detected".to_string()
} else {
analysis_parts.join("; ")
};
Ok(Some(RegressionResult {
is_regression,
severity,
performance_change_percent: time_change * 100.0,
current_measurement,
baseline: baseline.clone(),
analysis,
}))
}
pub fn update_baseline(
&mut self,
benchmark_name: &str,
measurement: PerformanceMeasurement,
) -> Result<()> {
if let Some(baseline) = self.baselines.get_mut(benchmark_name) {
let old_mean = baseline.mean_time_ns as f64;
let new_value = measurement.time_ns as f64;
let new_mean = (old_mean + new_value) / 2.0;
let old_variance = (baseline.std_dev_ns as f64).powi(2);
let new_variance = (old_variance + (new_value - old_mean).powi(2)) / 2.0;
baseline.mean_time_ns = new_mean as u64;
baseline.std_dev_ns = new_variance.sqrt() as u64;
baseline.timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("SystemTime should be after UNIX_EPOCH")
.as_secs();
if let Some(throughput) = measurement.throughput_ops_per_sec {
baseline.throughput_ops_per_sec = Some(throughput);
}
if let Some(memory) = measurement.memory_usage_bytes {
baseline.memory_usage_bytes = Some(memory);
}
self.save_baselines()?;
}
Ok(())
}
pub fn get_baselines(&self) -> &HashMap<String, PerformanceBaseline> {
&self.baselines
}
fn load_baselines(&mut self) -> Result<()> {
if !self.storage_path.exists() {
return Ok(());
}
let content = std::fs::read_to_string(&self.storage_path)
.map_err(|e| performance_error(format!("Failed to read baselines file: {}", e)))?;
let baselines: HashMap<String, PerformanceBaseline> = serde_json::from_str(&content)
.map_err(|e| performance_error(format!("Failed to parse baselines file: {}", e)))?;
self.baselines = baselines;
Ok(())
}
fn save_baselines(&self) -> Result<()> {
let content = serde_json::to_string_pretty(&self.baselines)
.map_err(|e| performance_error(format!("Failed to serialize baselines: {}", e)))?;
std::fs::write(&self.storage_path, content)
.map_err(|e| performance_error(format!("Failed to write baselines file: {}", e)))?;
Ok(())
}
fn detect_hardware_config() -> Result<HardwareConfig> {
Ok(HardwareConfig {
cpu_model: Self::detect_cpu_model(),
cpu_cores: num_cpus::get() as u32,
system_memory_bytes: Self::get_system_memory(),
gpu_info: Self::detect_gpu_info(),
os: format!("{} {}", std::env::consts::OS, Self::get_os_version()),
})
}
fn get_system_memory() -> u64 {
#[cfg(target_os = "linux")]
{
if let Ok(meminfo) = std::fs::read_to_string("/proc/meminfo") {
for line in meminfo.lines() {
if line.starts_with("MemTotal:") {
if let Some(kb_str) = line.split_whitespace().nth(1) {
if let Ok(kb) = kb_str.parse::<u64>() {
return kb * 1024; }
}
}
}
}
}
#[cfg(target_os = "macos")]
{
let output = std::process::Command::new("sysctl").args(["-n", "hw.memsize"]).output();
if let Ok(output) = output {
if let Ok(memory_str) = String::from_utf8(output.stdout) {
if let Ok(memory_bytes) = memory_str.trim().parse::<u64>() {
return memory_bytes;
}
}
}
}
#[cfg(target_os = "windows")]
{
}
let cpu_count = num_cpus::get() as u64;
match cpu_count {
1..=2 => 4 * 1024 * 1024 * 1024, 3..=4 => 8 * 1024 * 1024 * 1024, 5..=8 => 16 * 1024 * 1024 * 1024, _ => 32 * 1024 * 1024 * 1024, }
}
fn detect_cpu_model() -> Option<String> {
#[cfg(target_os = "linux")]
{
if let Ok(cpuinfo) = std::fs::read_to_string("/proc/cpuinfo") {
for line in cpuinfo.lines() {
if line.starts_with("model name") {
if let Some(model) = line.split(':').nth(1) {
return Some(model.trim().to_string());
}
}
}
}
}
#[cfg(target_os = "macos")]
{
let output = std::process::Command::new("sysctl")
.args(["-n", "machdep.cpu.brand_string"])
.output();
if let Ok(output) = output {
if let Ok(cpu_model) = String::from_utf8(output.stdout) {
return Some(cpu_model.trim().to_string());
}
}
}
#[cfg(target_os = "windows")]
{
let output = std::process::Command::new("wmic")
.args(["cpu", "get", "name", "/format:list"])
.output();
if let Ok(output) = output {
if let Ok(cpu_info) = String::from_utf8(output.stdout) {
for line in cpu_info.lines() {
if line.starts_with("Name=") {
return Some(line[5..].trim().to_string());
}
}
}
}
}
None
}
fn detect_gpu_info() -> Option<String> {
#[cfg(target_os = "linux")]
{
if let Ok(output) = std::process::Command::new("nvidia-smi")
.args(["--query-gpu=gpu_name", "--format=csv,noheader,nounits"])
.output()
{
if output.status.success() {
if let Ok(gpu_name) = String::from_utf8(output.stdout) {
let gpu_name = gpu_name.trim();
if !gpu_name.is_empty() {
return Some(format!("NVIDIA {}", gpu_name));
}
}
}
}
if let Ok(output) =
std::process::Command::new("rocm-smi").args(["--showproductname"]).output()
{
if output.status.success() {
if let Ok(gpu_info) = String::from_utf8(output.stdout) {
for line in gpu_info.lines() {
if line.contains("Card series:") {
if let Some(series) = line.split(':').nth(1) {
return Some(format!("AMD {}", series.trim()));
}
}
}
}
}
}
if let Ok(output) = std::process::Command::new("lspci").args(["-nn"]).output() {
if output.status.success() {
if let Ok(pci_info) = String::from_utf8(output.stdout) {
for line in pci_info.lines() {
if line.contains("VGA compatible controller")
|| line.contains("3D controller")
{
return Some(
line.split(':')
.next_back()
.unwrap_or("Unknown GPU")
.trim()
.to_string(),
);
}
}
}
}
}
}
#[cfg(target_os = "macos")]
{
let output = std::process::Command::new("system_profiler")
.args(["SPDisplaysDataType", "-xml"])
.output();
if let Ok(output) = output {
if let Ok(display_info) = String::from_utf8(output.stdout) {
if display_info.contains("Apple") {
if display_info.contains("M1") {
return Some("Apple M1 GPU".to_string());
} else if display_info.contains("M2") {
return Some("Apple M2 GPU".to_string());
} else if display_info.contains("M3") {
return Some("Apple M3 GPU".to_string());
} else {
return Some("Apple Silicon GPU".to_string());
}
}
}
}
}
#[cfg(target_os = "windows")]
{
let output = std::process::Command::new("wmic")
.args([
"path",
"win32_VideoController",
"get",
"name",
"/format:list",
])
.output();
if let Ok(output) = output {
if let Ok(gpu_info) = String::from_utf8(output.stdout) {
for line in gpu_info.lines() {
if line.starts_with("Name=") && line.len() > 5 {
let name = &line[5..];
if !name.trim().is_empty() {
return Some(name.trim().to_string());
}
}
}
}
}
}
None
}
fn get_os_version() -> String {
#[cfg(target_os = "linux")]
{
if let Ok(os_release) = std::fs::read_to_string("/etc/os-release") {
let mut name = None;
let mut version = None;
for line in os_release.lines() {
if let Some(rest) = line.strip_prefix("NAME=") {
name = Some(rest.trim_matches('"').to_string());
} else if let Some(rest) = line.strip_prefix("VERSION=") {
version = Some(rest.trim_matches('"').to_string());
}
}
match (name, version) {
(Some(n), Some(v)) => return format!("{} {}", n, v),
(Some(n), None) => return n,
_ => {},
}
}
if let Ok(output) = std::process::Command::new("uname").args(["-r"]).output() {
if let Ok(version) = String::from_utf8(output.stdout) {
return version.trim().to_string();
}
}
}
#[cfg(target_os = "macos")]
{
if let Ok(output) =
std::process::Command::new("sw_vers").args(["-productVersion"]).output()
{
if let Ok(version) = String::from_utf8(output.stdout) {
return version.trim().to_string();
}
}
}
#[cfg(target_os = "windows")]
{
if let Ok(output) = std::process::Command::new("ver").output() {
if let Ok(version) = String::from_utf8(output.stdout) {
return version.trim().to_string();
}
}
}
"Unknown".to_string()
}
fn get_git_commit_hash() -> Option<String> {
std::process::Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.ok()
.and_then(|output| {
if output.status.success() {
String::from_utf8(output.stdout).ok().map(|s| s.trim().to_string())
} else {
None
}
})
}
}
#[macro_export]
macro_rules! measure_performance {
($detector:expr, $benchmark_name:expr, $code:block) => {{
let start = std::time::Instant::now();
let result = $code;
let duration = start.elapsed();
let measurement = $crate::performance::regression_detector::PerformanceMeasurement {
time_ns: duration.as_nanos() as u64,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let _ = $detector.record_baseline($benchmark_name, measurement);
result
}};
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_regression_detector_creation() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let detector = RegressionDetector::new(storage_path, RegressionConfig::default());
assert!(detector.is_ok());
}
#[test]
fn test_baseline_recording() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let measurement = PerformanceMeasurement {
time_ns: 1_000_000, throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
};
assert!(detector.record_baseline("test_benchmark", measurement).is_ok());
assert!(detector.baselines.contains_key("test_benchmark"));
}
#[test]
fn test_regression_detection() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline_measurement = PerformanceMeasurement {
time_ns: 1_000_000, throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
};
detector
.record_baseline("test_benchmark", baseline_measurement)
.expect("operation failed in test");
let faster_measurement = PerformanceMeasurement {
time_ns: 900_000, throughput_ops_per_sec: Some(1100.0),
memory_usage_bytes: Some(1000),
};
let result = detector
.check_regression("test_benchmark", faster_measurement)
.expect("operation failed in test");
assert!(result.is_some());
assert!(!result.expect("operation failed in test").is_regression);
let slower_measurement = PerformanceMeasurement {
time_ns: 1_200_000, throughput_ops_per_sec: Some(800.0),
memory_usage_bytes: Some(1200),
};
let result = detector
.check_regression("test_benchmark", slower_measurement)
.expect("operation failed in test");
assert!(result.is_some());
let regression = result.expect("operation failed in test");
assert!(regression.is_regression);
assert!(regression.severity > 0.0);
}
#[test]
fn test_regression_config_default() {
let config = RegressionConfig::default();
assert!((config.regression_threshold - 0.05).abs() < 1e-6);
assert!((config.std_dev_threshold - 2.0).abs() < 1e-6);
assert!(config.check_memory_regression);
assert!(config.check_throughput_regression);
assert_eq!(config.min_measurements, 5);
}
#[test]
fn test_regression_config_custom() {
let config = RegressionConfig {
regression_threshold: 0.1,
std_dev_threshold: 3.0,
check_memory_regression: false,
check_throughput_regression: false,
min_measurements: 10,
};
assert!((config.regression_threshold - 0.1).abs() < 1e-6);
assert!(!config.check_memory_regression);
}
#[test]
fn test_hardware_config_clone() {
let config = HardwareConfig {
cpu_model: Some("TestCPU".to_string()),
cpu_cores: 8,
system_memory_bytes: 16_000_000_000,
gpu_info: None,
os: "TestOS".to_string(),
};
let cloned = config.clone();
assert_eq!(cloned.cpu_cores, 8);
assert_eq!(cloned.os, "TestOS");
}
#[test]
fn test_performance_baseline_clone() {
let baseline = PerformanceBaseline {
benchmark_name: "test_bench".to_string(),
mean_time_ns: 1_000_000,
std_dev_ns: 50_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
timestamp: 0,
commit_hash: Some("abc123".to_string()),
hardware_config: HardwareConfig {
cpu_model: None,
cpu_cores: 4,
system_memory_bytes: 8_000_000_000,
gpu_info: None,
os: "test".to_string(),
},
};
let cloned = baseline.clone();
assert_eq!(cloned.benchmark_name, "test_bench");
assert_eq!(cloned.mean_time_ns, 1_000_000);
}
#[test]
fn test_performance_measurement_clone() {
let measurement = PerformanceMeasurement {
time_ns: 500_000,
throughput_ops_per_sec: Some(2000.0),
memory_usage_bytes: Some(2048),
};
let cloned = measurement.clone();
assert_eq!(cloned.time_ns, 500_000);
assert_eq!(cloned.throughput_ops_per_sec, Some(2000.0));
}
#[test]
fn test_check_regression_no_baseline() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let measurement = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let result = detector
.check_regression("nonexistent_benchmark", measurement)
.expect("operation failed in test");
assert!(result.is_none());
}
#[test]
fn test_check_regression_identical_performance() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let measurement = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
};
detector
.record_baseline("test", measurement.clone())
.expect("operation failed in test");
let result = detector
.check_regression("test", measurement)
.expect("operation failed in test");
assert!(result.is_some());
assert!(!result.expect("should be present").is_regression);
}
#[test]
fn test_check_regression_memory_increase() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: Some(1024),
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let current = PerformanceMeasurement {
time_ns: 1_000_000, throughput_ops_per_sec: None,
memory_usage_bytes: Some(1536),
};
let result = detector.check_regression("test", current).expect("operation failed in test");
assert!(result.is_some());
let r = result.expect("should be present");
assert!(r.is_regression);
assert!(r.analysis.contains("Memory"));
}
#[test]
fn test_check_regression_throughput_decrease() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: None,
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let current = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(500.0),
memory_usage_bytes: None,
};
let result = detector.check_regression("test", current).expect("operation failed in test");
assert!(result.is_some());
let r = result.expect("should be present");
assert!(r.is_regression);
assert!(r.analysis.contains("Throughput"));
}
#[test]
fn test_update_baseline() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let measurement1 = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
};
detector
.record_baseline("test", measurement1)
.expect("operation failed in test");
let measurement2 = PerformanceMeasurement {
time_ns: 2_000_000,
throughput_ops_per_sec: Some(500.0),
memory_usage_bytes: Some(2048),
};
detector
.update_baseline("test", measurement2)
.expect("operation failed in test");
let baselines = detector.get_baselines();
let baseline = baselines.get("test").expect("baseline should exist");
assert_eq!(baseline.mean_time_ns, 1_500_000);
}
#[test]
fn test_update_nonexistent_baseline() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let measurement = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
assert!(detector.update_baseline("nonexistent", measurement).is_ok());
}
#[test]
fn test_get_baselines_empty() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
assert!(detector.get_baselines().is_empty());
}
#[test]
fn test_multiple_baselines() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
for i in 0..5 {
let measurement = PerformanceMeasurement {
time_ns: (i + 1) * 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
detector
.record_baseline(format!("bench_{}", i), measurement)
.expect("operation failed in test");
}
assert_eq!(detector.get_baselines().len(), 5);
}
#[test]
fn test_regression_severity_scaling() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(
storage_path,
RegressionConfig {
regression_threshold: 0.1, ..RegressionConfig::default()
},
)
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let current = PerformanceMeasurement {
time_ns: 1_500_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let result = detector.check_regression("test", current).expect("operation failed in test");
let r = result.expect("should be present");
assert!(r.severity <= 1.0);
assert!(r.severity > 0.0);
}
#[test]
fn test_regression_result_analysis_text() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let current = PerformanceMeasurement {
time_ns: 900_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let result = detector.check_regression("test", current).expect("operation failed in test");
let r = result.expect("should be present");
assert!(r.analysis.contains("No performance regression detected"));
}
#[test]
fn test_regression_with_disabled_checks() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(
storage_path,
RegressionConfig {
check_memory_regression: false,
check_throughput_regression: false,
..RegressionConfig::default()
},
)
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: Some(1024),
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let current = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(100.0), memory_usage_bytes: Some(10240), };
let result = detector.check_regression("test", current).expect("operation failed in test");
let r = result.expect("should be present");
assert!(!r.is_regression);
}
#[test]
fn test_baselines_persist_and_reload() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
{
let mut detector =
RegressionDetector::new(storage_path.clone(), RegressionConfig::default())
.expect("operation failed in test");
let measurement = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: Some(1000.0),
memory_usage_bytes: None,
};
detector
.record_baseline("persisted_bench", measurement)
.expect("operation failed in test");
}
let detector2 = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
assert!(detector2.get_baselines().contains_key("persisted_bench"));
}
#[test]
fn test_performance_change_percent_positive() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let slower = PerformanceMeasurement {
time_ns: 1_200_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let result = detector.check_regression("test", slower).expect("operation failed in test");
let r = result.expect("should be present");
assert!(r.performance_change_percent > 0.0);
}
#[test]
fn test_performance_change_percent_negative() {
let temp_dir = TempDir::new().expect("temp file creation failed");
let storage_path = temp_dir.path().join("baselines.json");
let mut detector = RegressionDetector::new(storage_path, RegressionConfig::default())
.expect("operation failed in test");
let baseline = PerformanceMeasurement {
time_ns: 1_000_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
detector.record_baseline("test", baseline).expect("operation failed in test");
let faster = PerformanceMeasurement {
time_ns: 800_000,
throughput_ops_per_sec: None,
memory_usage_bytes: None,
};
let result = detector.check_regression("test", faster).expect("operation failed in test");
let r = result.expect("should be present");
assert!(r.performance_change_percent < 0.0);
}
}