use std::collections::HashMap;
use std::time::Duration;
use chrono::{DateTime, Utc};
use mabi_core::RELEASE_VERSION;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestMetrics {
pub total_requests: u64,
pub successful_requests: u64,
pub failed_requests: u64,
pub total_connections: u64,
pub peak_connections: u64,
pub avg_tps: u64,
pub peak_tps: u64,
pub p50_latency_ms: f64,
pub p95_latency_ms: f64,
pub p99_latency_ms: f64,
pub error_rate: f64,
pub memory_peak_mb: f64,
}
impl TestMetrics {
pub fn is_healthy(&self) -> bool {
self.error_rate < 0.05 && self.p99_latency_ms < 100.0
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestSummary {
pub name: String,
pub description: String,
pub passed: bool,
#[serde(with = "humantime_serde")]
pub duration: Duration,
pub start_time: DateTime<Utc>,
pub end_time: DateTime<Utc>,
pub targets_checked: usize,
pub targets_passed: usize,
pub failure_reasons: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestReport {
pub metadata: ReportMetadata,
pub summary: TestSummary,
pub metrics: TestMetrics,
pub target_results: Vec<TargetResultEntry>,
pub timeline: Vec<TimelineEvent>,
pub notes: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReportMetadata {
pub version: String,
pub generated_at: DateTime<Utc>,
pub environment: String,
pub host_info: HostInfo,
pub config: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HostInfo {
pub os: String,
pub cpu_count: usize,
pub total_memory_bytes: u64,
pub hostname: String,
}
impl HostInfo {
pub fn collect() -> Self {
Self {
os: std::env::consts::OS.to_string(),
cpu_count: num_cpus::get(),
total_memory_bytes: Self::get_total_memory(),
hostname: hostname::get()
.map(|h| h.to_string_lossy().to_string())
.unwrap_or_else(|_| "unknown".to_string()),
}
}
#[cfg(target_os = "linux")]
fn get_total_memory() -> u64 {
std::fs::read_to_string("/proc/meminfo")
.ok()
.and_then(|s| {
s.lines()
.find(|l| l.starts_with("MemTotal:"))
.and_then(|l| {
l.split_whitespace()
.nth(1)
.and_then(|v| v.parse::<u64>().ok())
.map(|kb| kb * 1024)
})
})
.unwrap_or(0)
}
#[cfg(not(target_os = "linux"))]
fn get_total_memory() -> u64 {
0
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TargetResultEntry {
pub name: String,
pub expected: String,
pub actual: String,
pub passed: bool,
pub comparison: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimelineEvent {
#[serde(with = "humantime_serde")]
pub timestamp: Duration,
pub event_type: EventType,
pub message: String,
pub metrics: Option<HashMap<String, f64>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum EventType {
TestStart,
TestEnd,
RampUpComplete,
TargetReached,
TargetMissed,
Error,
Warning,
Milestone,
Snapshot,
}
impl TestReport {
pub fn builder(name: impl Into<String>) -> TestReportBuilder {
TestReportBuilder::new(name)
}
pub fn to_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
pub fn to_yaml(&self) -> Result<String, serde_yaml::Error> {
serde_yaml::to_string(self)
}
pub fn to_markdown(&self) -> String {
let mut md = String::new();
md.push_str(&format!("# Test Report: {}\n\n", self.summary.name));
md.push_str(&format!(
"**Description:** {}\n\n",
self.summary.description
));
md.push_str(&format!(
"**Status:** {}\n\n",
if self.summary.passed {
"PASSED"
} else {
"FAILED"
}
));
md.push_str("## Summary\n\n");
md.push_str(&format!("| Metric | Value |\n"));
md.push_str(&format!("|--------|-------|\n"));
md.push_str(&format!("| Duration | {:?} |\n", self.summary.duration));
md.push_str(&format!("| Start Time | {} |\n", self.summary.start_time));
md.push_str(&format!("| End Time | {} |\n", self.summary.end_time));
md.push_str(&format!(
"| Targets Passed | {}/{} |\n",
self.summary.targets_passed, self.summary.targets_checked
));
md.push_str("\n## Metrics\n\n");
md.push_str(&format!("| Metric | Value |\n"));
md.push_str(&format!("|--------|-------|\n"));
md.push_str(&format!(
"| Total Requests | {} |\n",
self.metrics.total_requests
));
md.push_str(&format!(
"| Successful | {} |\n",
self.metrics.successful_requests
));
md.push_str(&format!("| Failed | {} |\n", self.metrics.failed_requests));
md.push_str(&format!("| Avg TPS | {} |\n", self.metrics.avg_tps));
md.push_str(&format!("| Peak TPS | {} |\n", self.metrics.peak_tps));
md.push_str(&format!(
"| P50 Latency | {:.2}ms |\n",
self.metrics.p50_latency_ms
));
md.push_str(&format!(
"| P95 Latency | {:.2}ms |\n",
self.metrics.p95_latency_ms
));
md.push_str(&format!(
"| P99 Latency | {:.2}ms |\n",
self.metrics.p99_latency_ms
));
md.push_str(&format!(
"| Error Rate | {:.2}% |\n",
self.metrics.error_rate * 100.0
));
md.push_str(&format!(
"| Peak Memory | {:.2}MB |\n",
self.metrics.memory_peak_mb
));
md.push_str("\n## Target Results\n\n");
md.push_str(&format!("| Target | Expected | Actual | Status |\n"));
md.push_str(&format!("|--------|----------|--------|--------|\n"));
for target in &self.target_results {
let status = if target.passed { "PASSED" } else { "FAILED" };
md.push_str(&format!(
"| {} | {} {} | {} | {} |\n",
target.name, target.comparison, target.expected, target.actual, status
));
}
if !self.summary.failure_reasons.is_empty() {
md.push_str("\n## Failure Reasons\n\n");
for reason in &self.summary.failure_reasons {
md.push_str(&format!("- {}\n", reason));
}
}
md.push_str("\n## Environment\n\n");
md.push_str(&format!("- **OS:** {}\n", self.metadata.host_info.os));
md.push_str(&format!(
"- **CPUs:** {}\n",
self.metadata.host_info.cpu_count
));
md.push_str(&format!(
"- **Memory:** {:.2}GB\n",
self.metadata.host_info.total_memory_bytes as f64 / (1024.0 * 1024.0 * 1024.0)
));
md.push_str(&format!(
"- **Host:** {}\n",
self.metadata.host_info.hostname
));
md
}
pub fn save(&self, path: &str) -> std::io::Result<()> {
let content = if path.ends_with(".json") {
self.to_json()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?
} else if path.ends_with(".yaml") || path.ends_with(".yml") {
self.to_yaml()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?
} else if path.ends_with(".md") {
self.to_markdown()
} else {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Unsupported file format",
));
};
std::fs::write(path, content)
}
}
pub struct TestReportBuilder {
name: String,
description: String,
environment: String,
config: HashMap<String, String>,
start_time: DateTime<Utc>,
target_results: Vec<TargetResultEntry>,
timeline: Vec<TimelineEvent>,
notes: Vec<String>,
}
impl TestReportBuilder {
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
description: String::new(),
environment: "development".to_string(),
config: HashMap::new(),
start_time: Utc::now(),
target_results: Vec::new(),
timeline: Vec::new(),
notes: Vec::new(),
}
}
pub fn description(mut self, desc: impl Into<String>) -> Self {
self.description = desc.into();
self
}
pub fn environment(mut self, env: impl Into<String>) -> Self {
self.environment = env.into();
self
}
pub fn config(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.config.insert(key.into(), value.into());
self
}
pub fn add_target_result(
mut self,
name: impl Into<String>,
expected: impl Into<String>,
actual: impl Into<String>,
comparison: impl Into<String>,
passed: bool,
) -> Self {
self.target_results.push(TargetResultEntry {
name: name.into(),
expected: expected.into(),
actual: actual.into(),
comparison: comparison.into(),
passed,
});
self
}
pub fn add_event(
mut self,
timestamp: Duration,
event_type: EventType,
message: impl Into<String>,
) -> Self {
self.timeline.push(TimelineEvent {
timestamp,
event_type,
message: message.into(),
metrics: None,
});
self
}
pub fn add_note(mut self, note: impl Into<String>) -> Self {
self.notes.push(note.into());
self
}
pub fn build(self, metrics: TestMetrics, passed: bool, duration: Duration) -> TestReport {
let end_time = Utc::now();
let targets_passed = self.target_results.iter().filter(|t| t.passed).count();
let failure_reasons: Vec<String> = self
.target_results
.iter()
.filter(|t| !t.passed)
.map(|t| {
format!(
"{}: expected {} {}, got {}",
t.name, t.comparison, t.expected, t.actual
)
})
.collect();
TestReport {
metadata: ReportMetadata {
version: RELEASE_VERSION.to_string(),
generated_at: end_time,
environment: self.environment,
host_info: HostInfo::collect(),
config: self.config,
},
summary: TestSummary {
name: self.name,
description: self.description,
passed,
duration,
start_time: self.start_time,
end_time,
targets_checked: self.target_results.len(),
targets_passed,
failure_reasons,
},
metrics,
target_results: self.target_results,
timeline: self.timeline,
notes: self.notes,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_test_metrics_healthy() {
let healthy = TestMetrics {
total_requests: 10000,
successful_requests: 9900,
failed_requests: 100,
total_connections: 100,
peak_connections: 100,
avg_tps: 1000,
peak_tps: 1500,
p50_latency_ms: 5.0,
p95_latency_ms: 15.0,
p99_latency_ms: 30.0,
error_rate: 0.01,
memory_peak_mb: 256.0,
};
assert!(healthy.is_healthy());
let unhealthy = TestMetrics {
error_rate: 0.10,
..healthy.clone()
};
assert!(!unhealthy.is_healthy());
}
#[test]
fn test_report_builder() {
let metrics = TestMetrics {
total_requests: 10000,
successful_requests: 9900,
failed_requests: 100,
total_connections: 100,
peak_connections: 100,
avg_tps: 1000,
peak_tps: 1500,
p50_latency_ms: 5.0,
p95_latency_ms: 15.0,
p99_latency_ms: 30.0,
error_rate: 0.01,
memory_peak_mb: 256.0,
};
let report = TestReport::builder("Test Run")
.description("Performance test")
.environment("test")
.config("connections", "100")
.add_target_result("Min TPS", "1000", "1000", ">=", true)
.add_note("Test completed successfully")
.build(metrics, true, Duration::from_secs(60));
assert!(report.summary.passed);
assert_eq!(report.summary.targets_passed, 1);
assert_eq!(report.metadata.version, RELEASE_VERSION);
}
#[test]
fn test_report_to_markdown() {
let metrics = TestMetrics {
total_requests: 10000,
successful_requests: 9900,
failed_requests: 100,
total_connections: 100,
peak_connections: 100,
avg_tps: 1000,
peak_tps: 1500,
p50_latency_ms: 5.0,
p95_latency_ms: 15.0,
p99_latency_ms: 30.0,
error_rate: 0.01,
memory_peak_mb: 256.0,
};
let report = TestReport::builder("Test").build(metrics, true, Duration::from_secs(60));
let md = report.to_markdown();
assert!(md.contains("# Test Report: Test"));
assert!(md.contains("PASSED"));
}
#[test]
fn test_host_info_collect() {
let info = HostInfo::collect();
assert!(!info.os.is_empty());
assert!(info.cpu_count > 0);
}
}