Skip to main content

jugar_probar/
performance.rs

1//! Performance Profiling (Feature 10)
2//!
3//! Capture and analyze performance metrics during test execution.
4//!
5//! ## EXTREME TDD: Tests written FIRST per spec
6//!
7//! ## Toyota Way Application
8//!
9//! - **Mieruka**: Visual representation of performance data
10//! - **Heijunka**: Smooth performance without spikes
11//! - **Kaizen**: Continuous performance improvement
12
13use crate::result::{ProbarError, ProbarResult};
14use serde::{Deserialize, Serialize};
15use std::collections::HashMap;
16use std::time::Instant;
17
18/// Types of performance metrics
19#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
20pub enum MetricType {
21    /// Frame rendering time
22    FrameTime,
23    /// JavaScript/WASM execution time
24    ScriptTime,
25    /// Layout calculation time
26    LayoutTime,
27    /// Paint time
28    PaintTime,
29    /// Network request time
30    NetworkTime,
31    /// Memory usage
32    MemoryUsage,
33    /// Garbage collection time
34    GcTime,
35    /// First contentful paint
36    FirstContentfulPaint,
37    /// Largest contentful paint
38    LargestContentfulPaint,
39    /// Time to interactive
40    TimeToInteractive,
41    /// Total blocking time
42    TotalBlockingTime,
43    /// Cumulative layout shift
44    CumulativeLayoutShift,
45    /// Custom metric
46    Custom,
47}
48
49/// A single performance measurement
50#[derive(Debug, Clone, Serialize, Deserialize)]
51pub struct Measurement {
52    /// Metric type
53    pub metric_type: MetricType,
54    /// Metric name (for custom metrics)
55    pub name: String,
56    /// Value
57    pub value: f64,
58    /// Unit (ms, bytes, etc.)
59    pub unit: String,
60    /// Timestamp (ms since profiling start)
61    pub timestamp_ms: u64,
62    /// Optional context/tags
63    pub tags: HashMap<String, String>,
64}
65
66impl Measurement {
67    /// Create a new measurement
68    #[must_use]
69    pub fn new(metric_type: MetricType, name: &str, value: f64, unit: &str) -> Self {
70        Self {
71            metric_type,
72            name: name.to_string(),
73            value,
74            unit: unit.to_string(),
75            timestamp_ms: 0,
76            tags: HashMap::new(),
77        }
78    }
79
80    /// Create a timing measurement in milliseconds
81    #[must_use]
82    pub fn timing(name: &str, ms: f64) -> Self {
83        Self::new(MetricType::Custom, name, ms, "ms")
84    }
85
86    /// Create a memory measurement in bytes
87    #[must_use]
88    pub fn memory(name: &str, bytes: u64) -> Self {
89        Self::new(MetricType::MemoryUsage, name, bytes as f64, "bytes")
90    }
91
92    /// Create a frame time measurement
93    #[must_use]
94    pub fn frame_time(ms: f64) -> Self {
95        Self::new(MetricType::FrameTime, "frame_time", ms, "ms")
96    }
97
98    /// Set timestamp
99    #[must_use]
100    pub const fn with_timestamp(mut self, timestamp_ms: u64) -> Self {
101        self.timestamp_ms = timestamp_ms;
102        self
103    }
104
105    /// Add a tag
106    #[must_use]
107    pub fn with_tag(mut self, key: &str, value: &str) -> Self {
108        self.tags.insert(key.to_string(), value.to_string());
109        self
110    }
111}
112
113/// Statistics for a set of measurements
114#[derive(Debug, Clone, Serialize, Deserialize)]
115pub struct MetricStats {
116    /// Number of samples
117    pub count: usize,
118    /// Minimum value
119    pub min: f64,
120    /// Maximum value
121    pub max: f64,
122    /// Mean value
123    pub mean: f64,
124    /// Median value
125    pub median: f64,
126    /// 95th percentile
127    pub p95: f64,
128    /// 99th percentile
129    pub p99: f64,
130    /// Standard deviation
131    pub std_dev: f64,
132    /// Sum of all values
133    pub sum: f64,
134}
135
136impl MetricStats {
137    /// Calculate statistics from a slice of values
138    #[must_use]
139    pub fn from_values(values: &[f64]) -> Self {
140        if values.is_empty() {
141            return Self {
142                count: 0,
143                min: 0.0,
144                max: 0.0,
145                mean: 0.0,
146                median: 0.0,
147                p95: 0.0,
148                p99: 0.0,
149                std_dev: 0.0,
150                sum: 0.0,
151            };
152        }
153
154        let mut sorted = values.to_vec();
155        sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
156
157        let count = sorted.len();
158        let sum: f64 = sorted.iter().sum();
159        let mean = sum / count as f64;
160        let min = sorted[0];
161        let max = sorted[count - 1];
162        let median = Self::percentile(&sorted, 50.0);
163        let p95 = Self::percentile(&sorted, 95.0);
164        let p99 = Self::percentile(&sorted, 99.0);
165
166        let variance: f64 = sorted.iter().map(|v| (v - mean).powi(2)).sum::<f64>() / count as f64;
167        let std_dev = variance.sqrt();
168
169        Self {
170            count,
171            min,
172            max,
173            mean,
174            median,
175            p95,
176            p99,
177            std_dev,
178            sum,
179        }
180    }
181
182    fn percentile(sorted: &[f64], p: f64) -> f64 {
183        if sorted.is_empty() {
184            return 0.0;
185        }
186        let rank = (p / 100.0) * (sorted.len() - 1) as f64;
187        let lower = rank.floor() as usize;
188        let upper = rank.ceil() as usize;
189        if lower == upper {
190            sorted[lower]
191        } else {
192            let weight = rank - lower as f64;
193            sorted[lower] * (1.0 - weight) + sorted[upper] * weight
194        }
195    }
196}
197
198/// Performance threshold for assertions
199#[derive(Debug, Clone, Serialize, Deserialize)]
200pub struct PerformanceThreshold {
201    /// Metric name
202    pub name: String,
203    /// Maximum allowed value
204    pub max: Option<f64>,
205    /// Minimum allowed value
206    pub min: Option<f64>,
207    /// Maximum allowed mean
208    pub max_mean: Option<f64>,
209    /// Maximum allowed p95
210    pub max_p95: Option<f64>,
211    /// Maximum allowed p99
212    pub max_p99: Option<f64>,
213}
214
215impl PerformanceThreshold {
216    /// Create a new threshold
217    #[must_use]
218    pub fn new(name: &str) -> Self {
219        Self {
220            name: name.to_string(),
221            max: None,
222            min: None,
223            max_mean: None,
224            max_p95: None,
225            max_p99: None,
226        }
227    }
228
229    /// Set maximum value
230    #[must_use]
231    pub const fn with_max(mut self, max: f64) -> Self {
232        self.max = Some(max);
233        self
234    }
235
236    /// Set minimum value
237    #[must_use]
238    pub const fn with_min(mut self, min: f64) -> Self {
239        self.min = Some(min);
240        self
241    }
242
243    /// Set maximum mean
244    #[must_use]
245    pub const fn with_max_mean(mut self, max_mean: f64) -> Self {
246        self.max_mean = Some(max_mean);
247        self
248    }
249
250    /// Set maximum p95
251    #[must_use]
252    pub const fn with_max_p95(mut self, max_p95: f64) -> Self {
253        self.max_p95 = Some(max_p95);
254        self
255    }
256
257    /// Set maximum p99
258    #[must_use]
259    pub const fn with_max_p99(mut self, max_p99: f64) -> Self {
260        self.max_p99 = Some(max_p99);
261        self
262    }
263
264    /// Check if stats pass this threshold
265    pub fn check(&self, stats: &MetricStats) -> ProbarResult<()> {
266        if let Some(max) = self.max {
267            if stats.max > max {
268                return Err(ProbarError::AssertionError {
269                    message: format!(
270                        "{}: max value {:.2} exceeds threshold {:.2}",
271                        self.name, stats.max, max
272                    ),
273                });
274            }
275        }
276
277        if let Some(min) = self.min {
278            if stats.min < min {
279                return Err(ProbarError::AssertionError {
280                    message: format!(
281                        "{}: min value {:.2} below threshold {:.2}",
282                        self.name, stats.min, min
283                    ),
284                });
285            }
286        }
287
288        if let Some(max_mean) = self.max_mean {
289            if stats.mean > max_mean {
290                return Err(ProbarError::AssertionError {
291                    message: format!(
292                        "{}: mean {:.2} exceeds threshold {:.2}",
293                        self.name, stats.mean, max_mean
294                    ),
295                });
296            }
297        }
298
299        if let Some(max_p95) = self.max_p95 {
300            if stats.p95 > max_p95 {
301                return Err(ProbarError::AssertionError {
302                    message: format!(
303                        "{}: p95 {:.2} exceeds threshold {:.2}",
304                        self.name, stats.p95, max_p95
305                    ),
306                });
307            }
308        }
309
310        if let Some(max_p99) = self.max_p99 {
311            if stats.p99 > max_p99 {
312                return Err(ProbarError::AssertionError {
313                    message: format!(
314                        "{}: p99 {:.2} exceeds threshold {:.2}",
315                        self.name, stats.p99, max_p99
316                    ),
317                });
318            }
319        }
320
321        Ok(())
322    }
323}
324
325/// A performance profile containing all measurements
326#[derive(Debug, Clone, Default, Serialize, Deserialize)]
327pub struct PerformanceProfile {
328    /// All measurements grouped by name
329    pub measurements: HashMap<String, Vec<Measurement>>,
330    /// Profile start time
331    #[serde(skip)]
332    pub start_time: Option<Instant>,
333    /// Profile duration
334    pub duration_ms: Option<u64>,
335    /// Test name
336    pub test_name: String,
337}
338
339impl PerformanceProfile {
340    /// Create a new profile
341    #[must_use]
342    pub fn new(test_name: &str) -> Self {
343        Self {
344            measurements: HashMap::new(),
345            start_time: None,
346            duration_ms: None,
347            test_name: test_name.to_string(),
348        }
349    }
350
351    /// Add a measurement
352    pub fn add(&mut self, measurement: Measurement) {
353        self.measurements
354            .entry(measurement.name.clone())
355            .or_default()
356            .push(measurement);
357    }
358
359    /// Get statistics for a metric
360    #[must_use]
361    pub fn stats(&self, name: &str) -> Option<MetricStats> {
362        self.measurements.get(name).map(|measurements| {
363            let values: Vec<f64> = measurements.iter().map(|m| m.value).collect();
364            MetricStats::from_values(&values)
365        })
366    }
367
368    /// Get all metric names
369    #[must_use]
370    pub fn metric_names(&self) -> Vec<String> {
371        self.measurements.keys().cloned().collect()
372    }
373
374    /// Get total measurement count
375    #[must_use]
376    pub fn measurement_count(&self) -> usize {
377        self.measurements.values().map(|v| v.len()).sum()
378    }
379
380    /// Check thresholds
381    pub fn check_thresholds(&self, thresholds: &[PerformanceThreshold]) -> ProbarResult<()> {
382        for threshold in thresholds {
383            if let Some(stats) = self.stats(&threshold.name) {
384                threshold.check(&stats)?;
385            }
386        }
387        Ok(())
388    }
389
390    /// Generate a summary report
391    #[must_use]
392    pub fn summary(&self) -> PerformanceSummary {
393        let mut metrics = HashMap::new();
394        for name in self.metric_names() {
395            if let Some(stats) = self.stats(&name) {
396                metrics.insert(name, stats);
397            }
398        }
399        PerformanceSummary {
400            test_name: self.test_name.clone(),
401            duration_ms: self.duration_ms.unwrap_or(0),
402            metrics,
403        }
404    }
405}
406
407/// Summary of performance profile
408#[derive(Debug, Clone, Serialize, Deserialize)]
409pub struct PerformanceSummary {
410    /// Test name
411    pub test_name: String,
412    /// Total duration
413    pub duration_ms: u64,
414    /// Statistics per metric
415    pub metrics: HashMap<String, MetricStats>,
416}
417
418/// Performance profiler for capturing metrics
419#[derive(Debug)]
420pub struct PerformanceProfiler {
421    /// Current profile
422    profile: PerformanceProfile,
423    /// Profiling start time
424    start_time: Instant,
425    /// Whether profiling is active
426    active: bool,
427    /// Thresholds to check
428    thresholds: Vec<PerformanceThreshold>,
429    /// Open timers
430    timers: HashMap<String, Instant>,
431}
432
433impl Default for PerformanceProfiler {
434    fn default() -> Self {
435        Self::new("")
436    }
437}
438
439impl PerformanceProfiler {
440    /// Create a new profiler
441    #[must_use]
442    pub fn new(test_name: &str) -> Self {
443        Self {
444            profile: PerformanceProfile::new(test_name),
445            start_time: Instant::now(),
446            active: false,
447            thresholds: Vec::new(),
448            timers: HashMap::new(),
449        }
450    }
451
452    /// Start profiling
453    pub fn start(&mut self) {
454        self.active = true;
455        self.start_time = Instant::now();
456        self.profile.start_time = Some(self.start_time);
457    }
458
459    /// Stop profiling
460    pub fn stop(&mut self) -> PerformanceProfile {
461        self.active = false;
462        self.profile.duration_ms = Some(self.start_time.elapsed().as_millis() as u64);
463        self.profile.clone()
464    }
465
466    /// Check if profiling is active
467    #[must_use]
468    pub const fn is_active(&self) -> bool {
469        self.active
470    }
471
472    /// Get elapsed time in milliseconds
473    #[must_use]
474    pub fn elapsed_ms(&self) -> u64 {
475        self.start_time.elapsed().as_millis() as u64
476    }
477
478    /// Record a measurement
479    pub fn record(&mut self, measurement: Measurement) {
480        if self.active {
481            let measurement = measurement.with_timestamp(self.elapsed_ms());
482            self.profile.add(measurement);
483        }
484    }
485
486    /// Record a frame time
487    pub fn record_frame_time(&mut self, ms: f64) {
488        self.record(Measurement::frame_time(ms));
489    }
490
491    /// Record a custom timing
492    pub fn record_timing(&mut self, name: &str, ms: f64) {
493        self.record(Measurement::timing(name, ms));
494    }
495
496    /// Record memory usage
497    pub fn record_memory(&mut self, name: &str, bytes: u64) {
498        self.record(Measurement::memory(name, bytes));
499    }
500
501    /// Start a timer
502    pub fn start_timer(&mut self, name: &str) {
503        self.timers.insert(name.to_string(), Instant::now());
504    }
505
506    /// Stop a timer and record the measurement
507    pub fn stop_timer(&mut self, name: &str) -> Option<f64> {
508        self.timers.remove(name).map(|start| {
509            let duration_ms = start.elapsed().as_secs_f64() * 1000.0;
510            self.record_timing(name, duration_ms);
511            duration_ms
512        })
513    }
514
515    /// Add a threshold
516    pub fn add_threshold(&mut self, threshold: PerformanceThreshold) {
517        self.thresholds.push(threshold);
518    }
519
520    /// Check all thresholds
521    pub fn check_thresholds(&self) -> ProbarResult<()> {
522        self.profile.check_thresholds(&self.thresholds)
523    }
524
525    /// Get current profile
526    #[must_use]
527    pub fn profile(&self) -> &PerformanceProfile {
528        &self.profile
529    }
530
531    /// Get statistics for a metric
532    #[must_use]
533    pub fn stats(&self, name: &str) -> Option<MetricStats> {
534        self.profile.stats(name)
535    }
536
537    /// Measure a closure
538    pub fn measure<F, T>(&mut self, name: &str, f: F) -> T
539    where
540        F: FnOnce() -> T,
541    {
542        let start = Instant::now();
543        let result = f();
544        let duration_ms = start.elapsed().as_secs_f64() * 1000.0;
545        self.record_timing(name, duration_ms);
546        result
547    }
548}
549
550/// Performance monitor for continuous monitoring
551#[derive(Debug)]
552pub struct PerformanceMonitor {
553    /// Frame times buffer
554    frame_times: Vec<f64>,
555    /// Maximum buffer size
556    max_buffer_size: usize,
557    /// Target frame time (ms)
558    target_frame_time: f64,
559    /// Warning threshold (percentage above target)
560    warning_threshold: f64,
561    /// Frame drop count
562    frame_drops: u64,
563    /// Last frame time
564    last_frame_time: Option<Instant>,
565}
566
567impl Default for PerformanceMonitor {
568    fn default() -> Self {
569        Self::new()
570    }
571}
572
573impl PerformanceMonitor {
574    /// Create a new monitor targeting 60 FPS
575    #[must_use]
576    pub fn new() -> Self {
577        Self {
578            frame_times: Vec::new(),
579            max_buffer_size: 1000,
580            target_frame_time: 16.67, // 60 FPS
581            warning_threshold: 0.5,   // 50% above target
582            frame_drops: 0,
583            last_frame_time: None,
584        }
585    }
586
587    /// Set target frame rate
588    #[must_use]
589    pub fn with_target_fps(mut self, fps: u32) -> Self {
590        self.target_frame_time = 1000.0 / fps as f64;
591        self
592    }
593
594    /// Set warning threshold
595    #[must_use]
596    pub const fn with_warning_threshold(mut self, threshold: f64) -> Self {
597        self.warning_threshold = threshold;
598        self
599    }
600
601    /// Record a frame
602    pub fn record_frame(&mut self) {
603        let now = Instant::now();
604        if let Some(last) = self.last_frame_time {
605            let frame_time = now.duration_since(last).as_secs_f64() * 1000.0;
606            self.frame_times.push(frame_time);
607
608            // Check for frame drop
609            if frame_time > self.target_frame_time * 2.0 {
610                self.frame_drops += 1;
611            }
612
613            // Keep buffer bounded
614            if self.frame_times.len() > self.max_buffer_size {
615                self.frame_times.remove(0);
616            }
617        }
618        self.last_frame_time = Some(now);
619    }
620
621    /// Record a frame with explicit time
622    pub fn record_frame_time(&mut self, ms: f64) {
623        self.frame_times.push(ms);
624
625        if ms > self.target_frame_time * 2.0 {
626            self.frame_drops += 1;
627        }
628
629        if self.frame_times.len() > self.max_buffer_size {
630            self.frame_times.remove(0);
631        }
632    }
633
634    /// Get current FPS
635    #[must_use]
636    pub fn current_fps(&self) -> f64 {
637        if self.frame_times.is_empty() {
638            return 0.0;
639        }
640        let avg_frame_time: f64 =
641            self.frame_times.iter().sum::<f64>() / self.frame_times.len() as f64;
642        if avg_frame_time > 0.0 {
643            1000.0 / avg_frame_time
644        } else {
645            0.0
646        }
647    }
648
649    /// Get frame time statistics
650    #[must_use]
651    pub fn frame_time_stats(&self) -> MetricStats {
652        MetricStats::from_values(&self.frame_times)
653    }
654
655    /// Get frame drop count
656    #[must_use]
657    pub const fn frame_drops(&self) -> u64 {
658        self.frame_drops
659    }
660
661    /// Get frame count
662    #[must_use]
663    pub fn frame_count(&self) -> usize {
664        self.frame_times.len()
665    }
666
667    /// Check if performance is within target
668    #[must_use]
669    pub fn is_within_target(&self) -> bool {
670        if self.frame_times.is_empty() {
671            return true;
672        }
673        let stats = self.frame_time_stats();
674        stats.mean <= self.target_frame_time * (1.0 + self.warning_threshold)
675    }
676
677    /// Assert performance meets target
678    pub fn assert_performance(&self) -> ProbarResult<()> {
679        if self.frame_times.is_empty() {
680            return Ok(());
681        }
682
683        let stats = self.frame_time_stats();
684        let threshold = self.target_frame_time * (1.0 + self.warning_threshold);
685
686        if stats.mean > threshold {
687            return Err(ProbarError::AssertionError {
688                message: format!(
689                    "Mean frame time {:.2}ms exceeds threshold {:.2}ms (target: {:.2}ms @ {:.0} FPS)",
690                    stats.mean,
691                    threshold,
692                    self.target_frame_time,
693                    1000.0 / self.target_frame_time
694                ),
695            });
696        }
697
698        Ok(())
699    }
700
701    /// Reset the monitor
702    pub fn reset(&mut self) {
703        self.frame_times.clear();
704        self.frame_drops = 0;
705        self.last_frame_time = None;
706    }
707}
708
709/// Builder for performance profiler
710#[derive(Debug, Default)]
711pub struct PerformanceProfilerBuilder {
712    test_name: String,
713    thresholds: Vec<PerformanceThreshold>,
714}
715
716impl PerformanceProfilerBuilder {
717    /// Create a new builder
718    #[must_use]
719    pub fn new(test_name: &str) -> Self {
720        Self {
721            test_name: test_name.to_string(),
722            thresholds: Vec::new(),
723        }
724    }
725
726    /// Add a threshold
727    #[must_use]
728    pub fn threshold(mut self, threshold: PerformanceThreshold) -> Self {
729        self.thresholds.push(threshold);
730        self
731    }
732
733    /// Add a max frame time threshold
734    #[must_use]
735    pub fn max_frame_time(self, max_ms: f64) -> Self {
736        self.threshold(PerformanceThreshold::new("frame_time").with_max(max_ms))
737    }
738
739    /// Add a mean frame time threshold
740    #[must_use]
741    pub fn mean_frame_time(self, max_mean_ms: f64) -> Self {
742        self.threshold(PerformanceThreshold::new("frame_time").with_max_mean(max_mean_ms))
743    }
744
745    /// Build the profiler
746    #[must_use]
747    pub fn build(self) -> PerformanceProfiler {
748        let mut profiler = PerformanceProfiler::new(&self.test_name);
749        for threshold in self.thresholds {
750            profiler.add_threshold(threshold);
751        }
752        profiler
753    }
754}
755
756#[cfg(test)]
757#[allow(clippy::unwrap_used, clippy::expect_used)]
758mod tests {
759    use super::*;
760
761    mod measurement_tests {
762        use super::*;
763
764        #[test]
765        fn test_new() {
766            let m = Measurement::new(MetricType::Custom, "test", 42.0, "ms");
767            assert_eq!(m.name, "test");
768            assert!((m.value - 42.0).abs() < f64::EPSILON);
769            assert_eq!(m.unit, "ms");
770        }
771
772        #[test]
773        fn test_timing() {
774            let m = Measurement::timing("render", 16.5);
775            assert_eq!(m.name, "render");
776            assert_eq!(m.unit, "ms");
777        }
778
779        #[test]
780        fn test_memory() {
781            let m = Measurement::memory("heap", 1024);
782            assert!(matches!(m.metric_type, MetricType::MemoryUsage));
783            assert_eq!(m.unit, "bytes");
784        }
785
786        #[test]
787        fn test_frame_time() {
788            let m = Measurement::frame_time(16.67);
789            assert!(matches!(m.metric_type, MetricType::FrameTime));
790        }
791
792        #[test]
793        fn test_with_timestamp() {
794            let m = Measurement::timing("test", 10.0).with_timestamp(1000);
795            assert_eq!(m.timestamp_ms, 1000);
796        }
797
798        #[test]
799        fn test_with_tag() {
800            let m = Measurement::timing("test", 10.0).with_tag("component", "renderer");
801            assert_eq!(m.tags.get("component"), Some(&"renderer".to_string()));
802        }
803    }
804
805    mod metric_stats_tests {
806        use super::*;
807
808        #[test]
809        fn test_empty() {
810            let stats = MetricStats::from_values(&[]);
811            assert_eq!(stats.count, 0);
812            assert!((stats.mean - 0.0).abs() < f64::EPSILON);
813        }
814
815        #[test]
816        fn test_single_value() {
817            let stats = MetricStats::from_values(&[42.0]);
818            assert_eq!(stats.count, 1);
819            assert!((stats.min - 42.0).abs() < f64::EPSILON);
820            assert!((stats.max - 42.0).abs() < f64::EPSILON);
821            assert!((stats.mean - 42.0).abs() < f64::EPSILON);
822        }
823
824        #[test]
825        fn test_multiple_values() {
826            let stats = MetricStats::from_values(&[1.0, 2.0, 3.0, 4.0, 5.0]);
827            assert_eq!(stats.count, 5);
828            assert!((stats.min - 1.0).abs() < f64::EPSILON);
829            assert!((stats.max - 5.0).abs() < f64::EPSILON);
830            assert!((stats.mean - 3.0).abs() < f64::EPSILON);
831            assert!((stats.median - 3.0).abs() < f64::EPSILON);
832            assert!((stats.sum - 15.0).abs() < f64::EPSILON);
833        }
834
835        #[test]
836        fn test_percentiles() {
837            let values: Vec<f64> = (1..=100).map(|i| i as f64).collect();
838            let stats = MetricStats::from_values(&values);
839            assert!((stats.p95 - 95.0).abs() < 1.0);
840            assert!((stats.p99 - 99.0).abs() < 1.0);
841        }
842
843        #[test]
844        fn test_std_dev() {
845            let stats = MetricStats::from_values(&[2.0, 4.0, 4.0, 4.0, 5.0, 5.0, 7.0, 9.0]);
846            // Mean = 5, variance should be 4, std_dev should be 2
847            assert!((stats.std_dev - 2.0).abs() < 0.1);
848        }
849    }
850
851    mod performance_threshold_tests {
852        use super::*;
853
854        #[test]
855        fn test_new() {
856            let t = PerformanceThreshold::new("frame_time");
857            assert_eq!(t.name, "frame_time");
858            assert!(t.max.is_none());
859        }
860
861        #[test]
862        fn test_with_max() {
863            let t = PerformanceThreshold::new("frame_time").with_max(16.67);
864            assert_eq!(t.max, Some(16.67));
865        }
866
867        #[test]
868        fn test_check_passes() {
869            let t = PerformanceThreshold::new("test")
870                .with_max(100.0)
871                .with_max_mean(50.0);
872
873            let stats = MetricStats::from_values(&[10.0, 20.0, 30.0]);
874            assert!(t.check(&stats).is_ok());
875        }
876
877        #[test]
878        fn test_check_max_fails() {
879            let t = PerformanceThreshold::new("test").with_max(20.0);
880            let stats = MetricStats::from_values(&[10.0, 30.0]);
881            assert!(t.check(&stats).is_err());
882        }
883
884        #[test]
885        fn test_check_mean_fails() {
886            let t = PerformanceThreshold::new("test").with_max_mean(15.0);
887            let stats = MetricStats::from_values(&[20.0, 20.0, 20.0]);
888            assert!(t.check(&stats).is_err());
889        }
890
891        #[test]
892        fn test_check_p95_fails() {
893            let t = PerformanceThreshold::new("test").with_max_p95(50.0);
894            let values: Vec<f64> = (1..=100).map(|i| i as f64).collect();
895            let stats = MetricStats::from_values(&values);
896            assert!(t.check(&stats).is_err());
897        }
898    }
899
900    mod performance_profile_tests {
901        use super::*;
902
903        #[test]
904        fn test_new() {
905            let profile = PerformanceProfile::new("test");
906            assert_eq!(profile.test_name, "test");
907            assert!(profile.measurements.is_empty());
908        }
909
910        #[test]
911        fn test_add() {
912            let mut profile = PerformanceProfile::new("test");
913            profile.add(Measurement::timing("render", 16.0));
914            profile.add(Measurement::timing("render", 17.0));
915
916            assert_eq!(profile.measurement_count(), 2);
917        }
918
919        #[test]
920        fn test_stats() {
921            let mut profile = PerformanceProfile::new("test");
922            profile.add(Measurement::timing("render", 10.0));
923            profile.add(Measurement::timing("render", 20.0));
924            profile.add(Measurement::timing("render", 30.0));
925
926            let stats = profile.stats("render").unwrap();
927            assert_eq!(stats.count, 3);
928            assert!((stats.mean - 20.0).abs() < f64::EPSILON);
929        }
930
931        #[test]
932        fn test_metric_names() {
933            let mut profile = PerformanceProfile::new("test");
934            profile.add(Measurement::timing("render", 10.0));
935            profile.add(Measurement::timing("update", 5.0));
936
937            let names = profile.metric_names();
938            assert_eq!(names.len(), 2);
939        }
940
941        #[test]
942        fn test_check_thresholds() {
943            let mut profile = PerformanceProfile::new("test");
944            profile.add(Measurement::timing("render", 10.0));
945
946            let thresholds = vec![PerformanceThreshold::new("render").with_max(20.0)];
947            assert!(profile.check_thresholds(&thresholds).is_ok());
948        }
949
950        #[test]
951        fn test_summary() {
952            let mut profile = PerformanceProfile::new("test");
953            profile.add(Measurement::timing("render", 10.0));
954            profile.duration_ms = Some(1000);
955
956            let summary = profile.summary();
957            assert_eq!(summary.test_name, "test");
958            assert_eq!(summary.duration_ms, 1000);
959        }
960    }
961
962    mod performance_profiler_tests {
963        use super::*;
964
965        #[test]
966        fn test_new() {
967            let profiler = PerformanceProfiler::new("test");
968            assert!(!profiler.is_active());
969        }
970
971        #[test]
972        fn test_start_stop() {
973            let mut profiler = PerformanceProfiler::new("test");
974            profiler.start();
975            assert!(profiler.is_active());
976
977            let profile = profiler.stop();
978            assert!(!profiler.is_active());
979            assert!(profile.duration_ms.is_some());
980        }
981
982        #[test]
983        fn test_record() {
984            let mut profiler = PerformanceProfiler::new("test");
985            profiler.start();
986            profiler.record_frame_time(16.67);
987
988            let profile = profiler.stop();
989            assert_eq!(profile.measurement_count(), 1);
990        }
991
992        #[test]
993        fn test_record_when_inactive() {
994            let mut profiler = PerformanceProfiler::new("test");
995            profiler.record_frame_time(16.67);
996
997            assert_eq!(profiler.profile().measurement_count(), 0);
998        }
999
1000        #[test]
1001        fn test_timer() {
1002            let mut profiler = PerformanceProfiler::new("test");
1003            profiler.start();
1004
1005            profiler.start_timer("operation");
1006            std::thread::sleep(std::time::Duration::from_millis(10));
1007            let duration = profiler.stop_timer("operation");
1008
1009            assert!(duration.is_some());
1010            assert!(duration.unwrap() >= 10.0);
1011        }
1012
1013        #[test]
1014        fn test_measure() {
1015            let mut profiler = PerformanceProfiler::new("test");
1016            profiler.start();
1017
1018            let result = profiler.measure("calculation", || {
1019                std::thread::sleep(std::time::Duration::from_millis(5));
1020                42
1021            });
1022
1023            assert_eq!(result, 42);
1024            assert!(profiler.stats("calculation").is_some());
1025        }
1026
1027        #[test]
1028        fn test_add_threshold() {
1029            let mut profiler = PerformanceProfiler::new("test");
1030            profiler.add_threshold(PerformanceThreshold::new("frame_time").with_max(20.0));
1031            profiler.start();
1032            profiler.record_frame_time(15.0);
1033            profiler.stop();
1034
1035            assert!(profiler.check_thresholds().is_ok());
1036        }
1037    }
1038
1039    mod performance_monitor_tests {
1040        use super::*;
1041
1042        #[test]
1043        fn test_new() {
1044            let monitor = PerformanceMonitor::new();
1045            assert_eq!(monitor.frame_count(), 0);
1046            assert_eq!(monitor.frame_drops(), 0);
1047        }
1048
1049        #[test]
1050        fn test_with_target_fps() {
1051            let monitor = PerformanceMonitor::new().with_target_fps(30);
1052            assert!((monitor.target_frame_time - 33.33).abs() < 0.1);
1053        }
1054
1055        #[test]
1056        fn test_record_frame_time() {
1057            let mut monitor = PerformanceMonitor::new();
1058            monitor.record_frame_time(16.0);
1059            monitor.record_frame_time(17.0);
1060
1061            assert_eq!(monitor.frame_count(), 2);
1062        }
1063
1064        #[test]
1065        fn test_current_fps() {
1066            let mut monitor = PerformanceMonitor::new();
1067            monitor.record_frame_time(16.67);
1068            monitor.record_frame_time(16.67);
1069
1070            let fps = monitor.current_fps();
1071            assert!((fps - 60.0).abs() < 1.0);
1072        }
1073
1074        #[test]
1075        fn test_frame_drops() {
1076            let mut monitor = PerformanceMonitor::new();
1077            monitor.record_frame_time(16.0); // Normal
1078            monitor.record_frame_time(50.0); // Frame drop (> 2x target)
1079
1080            assert_eq!(monitor.frame_drops(), 1);
1081        }
1082
1083        #[test]
1084        fn test_is_within_target() {
1085            let mut monitor = PerformanceMonitor::new();
1086            monitor.record_frame_time(16.0);
1087            monitor.record_frame_time(17.0);
1088
1089            assert!(monitor.is_within_target());
1090        }
1091
1092        #[test]
1093        fn test_is_not_within_target() {
1094            let mut monitor = PerformanceMonitor::new();
1095            monitor.record_frame_time(30.0);
1096            monitor.record_frame_time(35.0);
1097
1098            assert!(!monitor.is_within_target());
1099        }
1100
1101        #[test]
1102        fn test_assert_performance() {
1103            let mut monitor = PerformanceMonitor::new();
1104            monitor.record_frame_time(16.0);
1105            monitor.record_frame_time(17.0);
1106
1107            assert!(monitor.assert_performance().is_ok());
1108        }
1109
1110        #[test]
1111        fn test_reset() {
1112            let mut monitor = PerformanceMonitor::new();
1113            monitor.record_frame_time(16.0);
1114            monitor.record_frame_time(50.0);
1115
1116            monitor.reset();
1117
1118            assert_eq!(monitor.frame_count(), 0);
1119            assert_eq!(monitor.frame_drops(), 0);
1120        }
1121    }
1122
1123    mod performance_profiler_builder_tests {
1124        use super::*;
1125
1126        #[test]
1127        fn test_builder() {
1128            let profiler = PerformanceProfilerBuilder::new("test")
1129                .max_frame_time(33.33)
1130                .mean_frame_time(16.67)
1131                .build();
1132
1133            assert_eq!(profiler.thresholds.len(), 2);
1134        }
1135    }
1136
1137    mod additional_performance_tests {
1138        use super::*;
1139
1140        #[test]
1141        fn test_measurement_with_tag() {
1142            let m = Measurement::timing("render", 16.0).with_tag("scene", "menu");
1143            assert_eq!(m.tags.get("scene"), Some(&"menu".to_string()));
1144        }
1145
1146        #[test]
1147        fn test_measurement_memory() {
1148            let m = Measurement::memory("heap", 1024 * 1024);
1149            assert_eq!(m.metric_type, MetricType::MemoryUsage);
1150            assert_eq!(m.unit, "bytes");
1151        }
1152
1153        #[test]
1154        fn test_measurement_frame_time() {
1155            let m = Measurement::frame_time(16.67);
1156            assert_eq!(m.metric_type, MetricType::FrameTime);
1157            assert_eq!(m.name, "frame_time");
1158        }
1159
1160        #[test]
1161        fn test_measurement_with_timestamp() {
1162            let m = Measurement::timing("test", 10.0).with_timestamp(100);
1163            assert_eq!(m.timestamp_ms, 100);
1164        }
1165
1166        #[test]
1167        fn test_metric_type_variants() {
1168            let types = [
1169                MetricType::FrameTime,
1170                MetricType::ScriptTime,
1171                MetricType::LayoutTime,
1172                MetricType::PaintTime,
1173                MetricType::NetworkTime,
1174                MetricType::MemoryUsage,
1175                MetricType::GcTime,
1176                MetricType::FirstContentfulPaint,
1177                MetricType::LargestContentfulPaint,
1178                MetricType::TimeToInteractive,
1179                MetricType::TotalBlockingTime,
1180                MetricType::CumulativeLayoutShift,
1181                MetricType::Custom,
1182            ];
1183            for t in &types {
1184                let debug = format!("{:?}", t);
1185                assert!(!debug.is_empty());
1186            }
1187        }
1188
1189        #[test]
1190        fn test_threshold_min_max() {
1191            let t = PerformanceThreshold::new("test")
1192                .with_min(10.0)
1193                .with_max(100.0);
1194            assert_eq!(t.min, Some(10.0));
1195            assert_eq!(t.max, Some(100.0));
1196        }
1197
1198        #[test]
1199        fn test_threshold_check_failure_max() {
1200            let mut profile = PerformanceProfile::new("test");
1201            profile.add(Measurement::timing("render", 100.0));
1202
1203            let thresholds = vec![PerformanceThreshold::new("render").with_max(50.0)];
1204            assert!(profile.check_thresholds(&thresholds).is_err());
1205        }
1206
1207        #[test]
1208        fn test_threshold_check_failure_min() {
1209            let mut profile = PerformanceProfile::new("test");
1210            profile.add(Measurement::timing("render", 5.0));
1211
1212            let thresholds = vec![PerformanceThreshold::new("render").with_min(10.0)];
1213            assert!(profile.check_thresholds(&thresholds).is_err());
1214        }
1215
1216        #[test]
1217        fn test_profile_stats_nonexistent() {
1218            let profile = PerformanceProfile::new("test");
1219            assert!(profile.stats("nonexistent").is_none());
1220        }
1221
1222        #[test]
1223        fn test_profiler_stop_timer_nonexistent() {
1224            let mut profiler = PerformanceProfiler::new("test");
1225            profiler.start();
1226            let duration = profiler.stop_timer("nonexistent");
1227            assert!(duration.is_none());
1228        }
1229
1230        #[test]
1231        fn test_metric_stats_debug() {
1232            let stats = MetricStats {
1233                count: 10,
1234                min: 1.0,
1235                max: 100.0,
1236                mean: 50.0,
1237                median: 45.0,
1238                std_dev: 25.0,
1239                p95: 90.0,
1240                p99: 98.0,
1241                sum: 500.0,
1242            };
1243            let debug = format!("{:?}", stats);
1244            assert!(debug.contains("MetricStats"));
1245        }
1246
1247        #[test]
1248        fn test_performance_summary_debug() {
1249            let summary = PerformanceSummary {
1250                test_name: "test".to_string(),
1251                duration_ms: 1000,
1252                metrics: HashMap::new(),
1253            };
1254            let debug = format!("{:?}", summary);
1255            assert!(debug.contains("PerformanceSummary"));
1256        }
1257
1258        #[test]
1259        fn test_monitor_assert_performance_failure() {
1260            let mut monitor = PerformanceMonitor::new();
1261            monitor.record_frame_time(50.0);
1262            monitor.record_frame_time(60.0);
1263
1264            assert!(monitor.assert_performance().is_err());
1265        }
1266    }
1267
1268    mod edge_case_coverage_tests {
1269        use super::*;
1270        use std::collections::hash_map::DefaultHasher;
1271        use std::hash::{Hash, Hasher};
1272
1273        // Test PerformanceProfiler::default()
1274        #[test]
1275        fn test_profiler_default() {
1276            let profiler = PerformanceProfiler::default();
1277            assert!(!profiler.is_active());
1278            assert_eq!(profiler.profile().test_name, "");
1279        }
1280
1281        // Test PerformanceMonitor::default()
1282        #[test]
1283        fn test_monitor_default() {
1284            let monitor = PerformanceMonitor::default();
1285            assert_eq!(monitor.frame_count(), 0);
1286            assert_eq!(monitor.frame_drops(), 0);
1287        }
1288
1289        // Test PerformanceProfilerBuilder::default()
1290        #[test]
1291        fn test_profiler_builder_default() {
1292            let builder = PerformanceProfilerBuilder::default();
1293            let profiler = builder.build();
1294            assert_eq!(profiler.profile().test_name, "");
1295        }
1296
1297        // Test PerformanceProfile::default()
1298        #[test]
1299        fn test_profile_default() {
1300            let profile = PerformanceProfile::default();
1301            assert!(profile.measurements.is_empty());
1302            assert!(profile.start_time.is_none());
1303            assert!(profile.duration_ms.is_none());
1304            assert_eq!(profile.test_name, "");
1305        }
1306
1307        // Test MetricType Hash implementation
1308        #[test]
1309        fn test_metric_type_hash() {
1310            let metric1 = MetricType::FrameTime;
1311            let metric2 = MetricType::FrameTime;
1312            let metric3 = MetricType::ScriptTime;
1313
1314            let mut hasher1 = DefaultHasher::new();
1315            let mut hasher2 = DefaultHasher::new();
1316            let mut hasher3 = DefaultHasher::new();
1317
1318            metric1.hash(&mut hasher1);
1319            metric2.hash(&mut hasher2);
1320            metric3.hash(&mut hasher3);
1321
1322            assert_eq!(hasher1.finish(), hasher2.finish());
1323            assert_ne!(hasher1.finish(), hasher3.finish());
1324        }
1325
1326        // Test MetricType as HashMap key
1327        #[test]
1328        fn test_metric_type_as_hashmap_key() {
1329            let mut map: HashMap<MetricType, i32> = HashMap::new();
1330            map.insert(MetricType::FrameTime, 1);
1331            map.insert(MetricType::ScriptTime, 2);
1332            map.insert(MetricType::Custom, 3);
1333
1334            assert_eq!(map.get(&MetricType::FrameTime), Some(&1));
1335            assert_eq!(map.get(&MetricType::ScriptTime), Some(&2));
1336            assert_eq!(map.get(&MetricType::Custom), Some(&3));
1337        }
1338
1339        // Test MetricType Clone
1340        #[test]
1341        fn test_metric_type_clone() {
1342            let metric = MetricType::LayoutTime;
1343            let cloned = metric;
1344            assert_eq!(metric, cloned);
1345        }
1346
1347        // Test PerformanceMonitor::with_warning_threshold
1348        #[test]
1349        fn test_monitor_with_warning_threshold() {
1350            let monitor = PerformanceMonitor::new().with_warning_threshold(0.25);
1351            assert!((monitor.warning_threshold - 0.25).abs() < f64::EPSILON);
1352        }
1353
1354        // Test PerformanceMonitor buffer overflow behavior
1355        #[test]
1356        fn test_monitor_buffer_overflow() {
1357            let mut monitor = PerformanceMonitor::new();
1358            // Record more frames than max_buffer_size (1000)
1359            for i in 0..1005 {
1360                monitor.record_frame_time(16.0 + (i as f64 * 0.001));
1361            }
1362            // Buffer should be capped at max_buffer_size
1363            assert_eq!(monitor.frame_count(), 1000);
1364        }
1365
1366        // Test PerformanceMonitor::record_frame actual frame recording
1367        #[test]
1368        fn test_monitor_record_frame_actual() {
1369            let mut monitor = PerformanceMonitor::new();
1370            // First frame - no timing recorded (establishes baseline)
1371            monitor.record_frame();
1372            assert_eq!(monitor.frame_count(), 0);
1373
1374            // Small sleep then second frame
1375            std::thread::sleep(std::time::Duration::from_millis(5));
1376            monitor.record_frame();
1377            assert_eq!(monitor.frame_count(), 1);
1378            assert!(monitor.frame_time_stats().mean >= 5.0);
1379        }
1380
1381        // Test PerformanceMonitor::current_fps with zero frame time
1382        #[test]
1383        fn test_monitor_current_fps_zero_time() {
1384            let mut monitor = PerformanceMonitor::new();
1385            monitor.record_frame_time(0.0);
1386            // When frame time is 0, FPS should be 0 (avoid division by zero)
1387            let fps = monitor.current_fps();
1388            assert!(fps == 0.0 || fps.is_infinite());
1389        }
1390
1391        // Test PerformanceMonitor::current_fps with empty buffer
1392        #[test]
1393        fn test_monitor_current_fps_empty() {
1394            let monitor = PerformanceMonitor::new();
1395            assert!((monitor.current_fps() - 0.0).abs() < f64::EPSILON);
1396        }
1397
1398        // Test PerformanceMonitor::is_within_target with empty buffer
1399        #[test]
1400        fn test_monitor_is_within_target_empty() {
1401            let monitor = PerformanceMonitor::new();
1402            assert!(monitor.is_within_target());
1403        }
1404
1405        // Test PerformanceMonitor::assert_performance with empty buffer
1406        #[test]
1407        fn test_monitor_assert_performance_empty() {
1408            let monitor = PerformanceMonitor::new();
1409            assert!(monitor.assert_performance().is_ok());
1410        }
1411
1412        // Test PerformanceThreshold::with_max_p99
1413        #[test]
1414        fn test_threshold_with_max_p99() {
1415            let t = PerformanceThreshold::new("test").with_max_p99(100.0);
1416            assert_eq!(t.max_p99, Some(100.0));
1417        }
1418
1419        // Test PerformanceThreshold p99 check fails
1420        #[test]
1421        fn test_threshold_check_p99_fails() {
1422            let t = PerformanceThreshold::new("test").with_max_p99(50.0);
1423            let values: Vec<f64> = (1..=100).map(|i| i as f64).collect();
1424            let stats = MetricStats::from_values(&values);
1425            assert!(t.check(&stats).is_err());
1426        }
1427
1428        // Test PerformanceThreshold::with_max_p95
1429        #[test]
1430        fn test_threshold_with_max_p95() {
1431            let t = PerformanceThreshold::new("test").with_max_p95(80.0);
1432            assert_eq!(t.max_p95, Some(80.0));
1433        }
1434
1435        // Test PerformanceThreshold all checks pass
1436        #[test]
1437        fn test_threshold_all_checks_pass() {
1438            let t = PerformanceThreshold::new("test")
1439                .with_min(0.0)
1440                .with_max(100.0)
1441                .with_max_mean(50.0)
1442                .with_max_p95(90.0)
1443                .with_max_p99(99.0);
1444
1445            let stats = MetricStats::from_values(&[10.0, 20.0, 30.0, 40.0, 50.0]);
1446            assert!(t.check(&stats).is_ok());
1447        }
1448
1449        // Test PerformanceProfiler::record_memory
1450        #[test]
1451        fn test_profiler_record_memory() {
1452            let mut profiler = PerformanceProfiler::new("test");
1453            profiler.start();
1454            profiler.record_memory("heap_usage", 1024 * 1024);
1455            let profile = profiler.stop();
1456            assert_eq!(profile.measurement_count(), 1);
1457            let stats = profile.stats("heap_usage").unwrap();
1458            assert!((stats.mean - 1_048_576.0).abs() < f64::EPSILON);
1459        }
1460
1461        // Test PerformanceProfiler::record_timing
1462        #[test]
1463        fn test_profiler_record_timing() {
1464            let mut profiler = PerformanceProfiler::new("test");
1465            profiler.start();
1466            profiler.record_timing("render_pass", 5.5);
1467            profiler.record_timing("render_pass", 6.5);
1468            let profile = profiler.stop();
1469            let stats = profile.stats("render_pass").unwrap();
1470            assert_eq!(stats.count, 2);
1471            assert!((stats.mean - 6.0).abs() < f64::EPSILON);
1472        }
1473
1474        // Test PerformanceProfiler::elapsed_ms
1475        #[test]
1476        fn test_profiler_elapsed_ms() {
1477            let mut profiler = PerformanceProfiler::new("test");
1478            profiler.start();
1479            std::thread::sleep(std::time::Duration::from_millis(10));
1480            let elapsed = profiler.elapsed_ms();
1481            assert!(elapsed >= 10);
1482        }
1483
1484        // Test PerformanceProfile::summary with no duration
1485        #[test]
1486        fn test_profile_summary_no_duration() {
1487            let profile = PerformanceProfile::new("test");
1488            let summary = profile.summary();
1489            assert_eq!(summary.duration_ms, 0);
1490        }
1491
1492        // Test PerformanceProfile::check_thresholds skips missing metrics
1493        #[test]
1494        fn test_profile_check_thresholds_missing_metric() {
1495            let profile = PerformanceProfile::new("test");
1496            let thresholds = vec![PerformanceThreshold::new("nonexistent").with_max(100.0)];
1497            // Should pass because metric doesn't exist
1498            assert!(profile.check_thresholds(&thresholds).is_ok());
1499        }
1500
1501        // Test MetricStats percentile with two values (interpolation)
1502        #[test]
1503        fn test_metric_stats_percentile_interpolation() {
1504            let stats = MetricStats::from_values(&[10.0, 20.0]);
1505            // Median should be interpolated between 10 and 20
1506            assert!((stats.median - 15.0).abs() < f64::EPSILON);
1507        }
1508
1509        // Test MetricStats percentile with single value
1510        #[test]
1511        fn test_metric_stats_percentile_single() {
1512            let stats = MetricStats::from_values(&[42.0]);
1513            assert!((stats.median - 42.0).abs() < f64::EPSILON);
1514            assert!((stats.p95 - 42.0).abs() < f64::EPSILON);
1515            assert!((stats.p99 - 42.0).abs() < f64::EPSILON);
1516        }
1517
1518        // Test Measurement clone
1519        #[test]
1520        fn test_measurement_clone() {
1521            let m = Measurement::timing("test", 10.0).with_tag("key", "value");
1522            let cloned = m.clone();
1523            assert_eq!(cloned.name, m.name);
1524            assert!((cloned.value - m.value).abs() < f64::EPSILON);
1525            assert_eq!(cloned.tags, m.tags);
1526        }
1527
1528        // Test MetricStats clone
1529        #[test]
1530        fn test_metric_stats_clone() {
1531            let stats = MetricStats::from_values(&[1.0, 2.0, 3.0]);
1532            let cloned = stats.clone();
1533            assert_eq!(cloned.count, stats.count);
1534            assert!((cloned.mean - stats.mean).abs() < f64::EPSILON);
1535        }
1536
1537        // Test PerformanceThreshold clone
1538        #[test]
1539        fn test_threshold_clone() {
1540            let t = PerformanceThreshold::new("test")
1541                .with_min(1.0)
1542                .with_max(100.0);
1543            let cloned = t.clone();
1544            assert_eq!(cloned.name, t.name);
1545            assert_eq!(cloned.min, t.min);
1546            assert_eq!(cloned.max, t.max);
1547        }
1548
1549        // Test PerformanceProfile clone
1550        #[test]
1551        fn test_profile_clone() {
1552            let mut profile = PerformanceProfile::new("test");
1553            profile.add(Measurement::timing("render", 16.0));
1554            let cloned = profile.clone();
1555            assert_eq!(cloned.test_name, profile.test_name);
1556            assert_eq!(cloned.measurement_count(), profile.measurement_count());
1557        }
1558
1559        // Test PerformanceSummary clone
1560        #[test]
1561        fn test_summary_clone() {
1562            let summary = PerformanceSummary {
1563                test_name: "test".to_string(),
1564                duration_ms: 1000,
1565                metrics: HashMap::new(),
1566            };
1567            let cloned = summary.clone();
1568            assert_eq!(cloned.test_name, summary.test_name);
1569            assert_eq!(cloned.duration_ms, summary.duration_ms);
1570        }
1571
1572        // Test PerformanceProfiler debug
1573        #[test]
1574        fn test_profiler_debug() {
1575            let profiler = PerformanceProfiler::new("test");
1576            let debug = format!("{:?}", profiler);
1577            assert!(debug.contains("PerformanceProfiler"));
1578        }
1579
1580        // Test PerformanceMonitor debug
1581        #[test]
1582        fn test_monitor_debug() {
1583            let monitor = PerformanceMonitor::new();
1584            let debug = format!("{:?}", monitor);
1585            assert!(debug.contains("PerformanceMonitor"));
1586        }
1587
1588        // Test PerformanceProfilerBuilder debug
1589        #[test]
1590        fn test_builder_debug() {
1591            let builder = PerformanceProfilerBuilder::new("test");
1592            let debug = format!("{:?}", builder);
1593            assert!(debug.contains("PerformanceProfilerBuilder"));
1594        }
1595
1596        // Test PerformanceProfile debug
1597        #[test]
1598        fn test_profile_debug() {
1599            let profile = PerformanceProfile::new("test");
1600            let debug = format!("{:?}", profile);
1601            assert!(debug.contains("PerformanceProfile"));
1602        }
1603
1604        // Test PerformanceThreshold debug
1605        #[test]
1606        fn test_threshold_debug() {
1607            let t = PerformanceThreshold::new("test");
1608            let debug = format!("{:?}", t);
1609            assert!(debug.contains("PerformanceThreshold"));
1610        }
1611
1612        // Test Measurement debug
1613        #[test]
1614        fn test_measurement_debug() {
1615            let m = Measurement::timing("test", 10.0);
1616            let debug = format!("{:?}", m);
1617            assert!(debug.contains("Measurement"));
1618        }
1619
1620        // Test PerformanceProfilerBuilder::threshold method
1621        #[test]
1622        fn test_builder_threshold() {
1623            let profiler = PerformanceProfilerBuilder::new("test")
1624                .threshold(PerformanceThreshold::new("custom").with_max(50.0))
1625                .build();
1626            assert_eq!(profiler.thresholds.len(), 1);
1627        }
1628
1629        // Test multiple tags on measurement
1630        #[test]
1631        fn test_measurement_multiple_tags() {
1632            let m = Measurement::timing("test", 10.0)
1633                .with_tag("scene", "game")
1634                .with_tag("phase", "render")
1635                .with_tag("component", "ui");
1636            assert_eq!(m.tags.len(), 3);
1637            assert_eq!(m.tags.get("scene"), Some(&"game".to_string()));
1638            assert_eq!(m.tags.get("phase"), Some(&"render".to_string()));
1639            assert_eq!(m.tags.get("component"), Some(&"ui".to_string()));
1640        }
1641
1642        // Test frame drop detection threshold exactly at 2x target
1643        #[test]
1644        fn test_frame_drop_at_boundary() {
1645            let mut monitor = PerformanceMonitor::new(); // target_frame_time = 16.67
1646                                                         // Exactly 2x target should not count as drop
1647            monitor.record_frame_time(33.34);
1648            assert_eq!(monitor.frame_drops(), 0);
1649            // Just over 2x target should count as drop
1650            monitor.record_frame_time(33.35);
1651            assert_eq!(monitor.frame_drops(), 1);
1652        }
1653
1654        // Test record_frame with very short intervals
1655        #[test]
1656        fn test_record_frame_rapid() {
1657            let mut monitor = PerformanceMonitor::new();
1658            monitor.record_frame();
1659            monitor.record_frame();
1660            monitor.record_frame();
1661            // Only 2 intervals recorded (3 frames = 2 intervals)
1662            assert_eq!(monitor.frame_count(), 2);
1663        }
1664
1665        // Test profiler stats for nonexistent metric
1666        #[test]
1667        fn test_profiler_stats_nonexistent() {
1668            let profiler = PerformanceProfiler::new("test");
1669            assert!(profiler.stats("nonexistent").is_none());
1670        }
1671
1672        // Test check_thresholds on profiler with no thresholds
1673        #[test]
1674        fn test_profiler_check_thresholds_empty() {
1675            let mut profiler = PerformanceProfiler::new("test");
1676            profiler.start();
1677            profiler.record_frame_time(16.0);
1678            profiler.stop();
1679            assert!(profiler.check_thresholds().is_ok());
1680        }
1681
1682        // Test check_thresholds with failing threshold
1683        #[test]
1684        fn test_profiler_check_thresholds_fails() {
1685            let mut profiler = PerformanceProfiler::new("test");
1686            profiler.add_threshold(PerformanceThreshold::new("frame_time").with_max(10.0));
1687            profiler.start();
1688            profiler.record_frame_time(20.0);
1689            profiler.stop();
1690            assert!(profiler.check_thresholds().is_err());
1691        }
1692    }
1693}