1use crate::brick::BrickScore;
4use serde::{Deserialize, Serialize};
5
6#[derive(Debug, Clone, Copy, PartialEq, Eq)]
8pub enum OutputFormat {
9 Json,
10 Text,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct CpuGovernorInfo {
16 pub governor: String,
17 pub is_performance: bool,
18 #[serde(skip_serializing_if = "Option::is_none")]
19 pub current_freq_mhz: Option<u32>,
20 #[serde(skip_serializing_if = "Option::is_none")]
21 pub max_freq_mhz: Option<u32>,
22}
23
24#[derive(Debug, Clone, Serialize, Deserialize)]
26pub struct SystemInfo {
27 pub cpu: String,
28 pub cores: usize,
29 pub memory_gb: u64,
30 #[serde(skip_serializing_if = "Option::is_none")]
31 pub gpu: Option<String>,
32 #[serde(skip_serializing_if = "Option::is_none")]
34 pub cpu_governor: Option<CpuGovernorInfo>,
35}
36
37impl SystemInfo {
38 pub fn detect() -> Self {
39 let cores = std::thread::available_parallelism()
40 .map(|n| n.get())
41 .unwrap_or(1);
42
43 let cpu = Self::detect_cpu();
45
46 let memory_gb = Self::detect_memory_gb();
48
49 let cpu_governor = Self::detect_cpu_governor();
51
52 Self {
53 cpu,
54 cores,
55 memory_gb,
56 gpu: None, cpu_governor,
58 }
59 }
60
61 fn detect_cpu() -> String {
62 #[cfg(target_os = "linux")]
63 {
64 if let Ok(content) = std::fs::read_to_string("/proc/cpuinfo") {
65 for line in content.lines() {
66 if line.starts_with("model name") {
67 if let Some(name) = line.split(':').nth(1) {
68 return name.trim().to_string();
69 }
70 }
71 }
72 }
73 }
74 "Unknown CPU".to_string()
75 }
76
77 fn detect_memory_gb() -> u64 {
78 #[cfg(target_os = "linux")]
79 {
80 if let Ok(content) = std::fs::read_to_string("/proc/meminfo") {
81 for line in content.lines() {
82 if line.starts_with("MemTotal:") {
83 if let Some(kb_str) = line.split_whitespace().nth(1) {
84 if let Ok(kb) = kb_str.parse::<u64>() {
85 return kb / 1024 / 1024; }
87 }
88 }
89 }
90 }
91 }
92 0
93 }
94
95 fn detect_cpu_governor() -> Option<CpuGovernorInfo> {
98 #[cfg(target_os = "linux")]
99 {
100 let governor_path = "/sys/devices/system/cpu/cpu0/cpufreq/scaling_governor";
102 let cur_freq_path = "/sys/devices/system/cpu/cpu0/cpufreq/scaling_cur_freq";
103 let max_freq_path = "/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq";
104
105 if let Ok(governor) = std::fs::read_to_string(governor_path) {
106 let governor = governor.trim().to_string();
107 let is_performance = governor == "performance";
108
109 let current_freq_mhz = std::fs::read_to_string(cur_freq_path)
110 .ok()
111 .and_then(|s| s.trim().parse::<u32>().ok())
112 .map(|khz| khz / 1000);
113
114 let max_freq_mhz = std::fs::read_to_string(max_freq_path)
115 .ok()
116 .and_then(|s| s.trim().parse::<u32>().ok())
117 .map(|khz| khz / 1000);
118
119 return Some(CpuGovernorInfo {
120 governor,
121 is_performance,
122 current_freq_mhz,
123 max_freq_mhz,
124 });
125 }
126 }
127
128 None
129 }
130
131 pub fn check_benchmark_readiness(&self) -> Vec<String> {
133 let mut warnings = Vec::new();
134
135 if let Some(ref gov) = self.cpu_governor {
136 if !gov.is_performance {
137 warnings.push(format!(
138 "CPU governor is '{}' (not 'performance'). For deterministic benchmarks, run: \
139 sudo cpupower frequency-set -g performance",
140 gov.governor
141 ));
142 }
143
144 if let (Some(cur), Some(max)) = (gov.current_freq_mhz, gov.max_freq_mhz) {
145 let ratio = cur as f64 / max as f64;
146 if ratio < 0.9 {
147 warnings.push(format!(
148 "CPU running at {}MHz ({:.0}% of max {}MHz). Thermal throttling may affect results.",
149 cur, ratio * 100.0, max
150 ));
151 }
152 }
153 }
154
155 warnings
156 }
157}
158
159#[derive(Debug, Clone, Serialize, Deserialize)]
161pub struct BenchmarkConfig {
162 pub backend: String,
163 pub workload: String,
164 pub size: usize,
165 pub iterations: u64,
166}
167
168#[derive(Debug, Clone, Serialize, Deserialize)]
170pub struct LatencyStats {
171 pub mean: f64,
172 pub min: f64,
173 pub max: f64,
174 pub p50: f64,
175 pub p95: f64,
176 pub p99: f64,
177 pub cv_percent: f64,
178}
179
180#[derive(Debug, Clone, Serialize, Deserialize)]
182pub struct ScoreInfo {
183 pub total: u8,
184 pub grade: String,
185 pub performance: u8,
186 pub efficiency: u8,
187 pub correctness: u8,
188 pub stability: u8,
189}
190
191impl From<BrickScore> for ScoreInfo {
192 fn from(score: BrickScore) -> Self {
193 Self {
194 total: score.total(),
195 grade: format!("{:?}", score.grade()),
196 performance: score.performance,
197 efficiency: score.efficiency,
198 correctness: score.correctness,
199 stability: score.stability,
200 }
201 }
202}
203
204#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct BenchmarkResult {
207 pub version: String,
208 pub timestamp: String,
209 pub duration_secs: f64,
210 pub system: SystemInfo,
211 pub benchmark: BenchmarkConfig,
212 pub results: BenchmarkResults,
213 pub score: ScoreInfo,
214 #[serde(skip_serializing_if = "Vec::is_empty")]
216 pub warnings: Vec<String>,
217}
218
219#[derive(Debug, Clone, Serialize, Deserialize)]
221pub struct BenchmarkResults {
222 pub gflops: f64,
223 pub throughput_ops_sec: f64,
224 pub latency_ms: LatencyStats,
225}
226
227impl BenchmarkResult {
228 pub fn format(&self, format: OutputFormat) -> String {
230 match format {
231 OutputFormat::Json => {
232 serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
233 }
234 OutputFormat::Text => self.format_text(),
235 }
236 }
237
238 fn format_text(&self) -> String {
239 format!(
240 r#"
241=== cbtop Benchmark Results ===
242
243System:
244 CPU: {}
245 Cores: {}
246 Memory: {} GB
247
248Benchmark:
249 Backend: {}
250 Workload: {}
251 Size: {} elements
252 Iterations: {}
253 Duration: {:.2}s
254
255Results:
256 GFLOP/s: {:.2}
257 Throughput: {:.0} ops/sec
258 Latency (ms):
259 Mean: {:.3}
260 P50: {:.3}
261 P95: {:.3}
262 P99: {:.3}
263 CV: {:.1}%
264
265Score: {}/100 (Grade: {})
266 Performance: {}/40
267 Efficiency: {}/25
268 Correctness: {}/20
269 Stability: {}/15
270{}
271"#,
272 self.system.cpu,
273 self.system.cores,
274 self.system.memory_gb,
275 self.benchmark.backend,
276 self.benchmark.workload,
277 self.benchmark.size,
278 self.benchmark.iterations,
279 self.duration_secs,
280 self.results.gflops,
281 self.results.throughput_ops_sec,
282 self.results.latency_ms.mean,
283 self.results.latency_ms.p50,
284 self.results.latency_ms.p95,
285 self.results.latency_ms.p99,
286 self.results.latency_ms.cv_percent,
287 self.score.total,
288 self.score.grade,
289 self.score.performance,
290 self.score.efficiency,
291 self.score.correctness,
292 self.score.stability,
293 if self.warnings.is_empty() {
295 String::new()
296 } else {
297 format!(
298 "\nWarnings:\n{}",
299 self.warnings
300 .iter()
301 .map(|w| format!(" - {}", w))
302 .collect::<Vec<_>>()
303 .join("\n")
304 )
305 },
306 )
307 }
308
309 pub fn check_regression(&self, baseline: &BenchmarkResult, threshold: f64) -> RegressionResult {
311 let change_percent =
312 (self.results.gflops - baseline.results.gflops) / baseline.results.gflops * 100.0;
313
314 RegressionResult {
315 baseline_gflops: baseline.results.gflops,
316 current_gflops: self.results.gflops,
317 change_percent,
318 threshold_percent: threshold,
319 is_regression: change_percent < -threshold,
320 status: if change_percent < -threshold {
321 "REGRESSION".to_string()
322 } else if change_percent > threshold {
323 "IMPROVEMENT".to_string()
324 } else {
325 "STABLE".to_string()
326 },
327 }
328 }
329
330 pub fn compare(results: &[(String, BenchmarkResult)]) -> ComparisonResult {
332 let comparisons: Vec<_> = results
333 .iter()
334 .map(|(name, r)| BackendComparison {
335 backend: name.clone(),
336 gflops: r.results.gflops,
337 score: r.score.total,
338 latency_mean_ms: r.results.latency_ms.mean,
339 })
340 .collect();
341
342 let best = comparisons
343 .iter()
344 .max_by(|a, b| {
345 a.gflops
346 .partial_cmp(&b.gflops)
347 .unwrap_or(std::cmp::Ordering::Equal)
348 })
349 .map(|c| c.backend.clone())
350 .unwrap_or_default();
351
352 ComparisonResult {
353 backends: comparisons,
354 recommended: best,
355 }
356 }
357}
358
359#[derive(Debug, Clone, Serialize, Deserialize)]
361pub struct RegressionResult {
362 pub baseline_gflops: f64,
363 pub current_gflops: f64,
364 pub change_percent: f64,
365 pub threshold_percent: f64,
366 pub is_regression: bool,
367 pub status: String,
368}
369
370impl RegressionResult {
371 pub fn format(&self, format: OutputFormat) -> String {
372 match format {
373 OutputFormat::Json => {
374 serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
375 }
376 OutputFormat::Text => {
377 format!(
378 r#"
379=== Regression Check ===
380
381Baseline: {:.2} GFLOP/s
382Current: {:.2} GFLOP/s
383Change: {:+.1}%
384Threshold: {:.1}%
385
386Status: {}
387"#,
388 self.baseline_gflops,
389 self.current_gflops,
390 self.change_percent,
391 self.threshold_percent,
392 self.status,
393 )
394 }
395 }
396 }
397}
398
399#[derive(Debug, Clone, Serialize, Deserialize)]
401pub struct BackendComparison {
402 pub backend: String,
403 pub gflops: f64,
404 pub score: u8,
405 pub latency_mean_ms: f64,
406}
407
408#[derive(Debug, Clone, Serialize, Deserialize)]
410pub struct ComparisonResult {
411 pub backends: Vec<BackendComparison>,
412 pub recommended: String,
413}
414
415impl ComparisonResult {
416 pub fn format(&self, format: OutputFormat) -> String {
417 match format {
418 OutputFormat::Json => {
419 serde_json::to_string_pretty(self).unwrap_or_else(|_| "{}".to_string())
420 }
421 OutputFormat::Text => {
422 let mut s = String::from("\n=== Backend Comparison ===\n\n");
423 s.push_str("Backend GFLOP/s Score Latency\n");
424 s.push_str("----------------------------------------\n");
425 for c in &self.backends {
426 s.push_str(&format!(
427 "{:<12} {:>7.2} {:>3} {:.3}ms\n",
428 c.backend, c.gflops, c.score, c.latency_mean_ms
429 ));
430 }
431 s.push_str(&format!("\nRecommended: {}\n", self.recommended));
432 s
433 }
434 }
435 }
436}