1#![allow(clippy::type_complexity)]
2#![allow(missing_docs)]
12#![allow(dead_code)]
13
14use serde::{Deserialize, Serialize};
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
22pub struct PySnapshot {
23 pub time: f64,
25 pub kinetic_energy: f64,
27 pub potential_energy: f64,
29 pub total_energy: f64,
31 pub linear_momentum: f64,
33 pub angular_momentum: f64,
35 pub contact_count: usize,
37 pub awake_count: usize,
39 pub sleeping_count: usize,
41 pub step_ms: f64,
43}
44
45impl Default for PySnapshot {
46 fn default() -> Self {
47 Self {
48 time: 0.0,
49 kinetic_energy: 0.0,
50 potential_energy: 0.0,
51 total_energy: 0.0,
52 linear_momentum: 0.0,
53 angular_momentum: 0.0,
54 contact_count: 0,
55 awake_count: 0,
56 sleeping_count: 0,
57 step_ms: 0.0,
58 }
59 }
60}
61
62impl PySnapshot {
63 pub fn compute_total(&mut self) {
65 self.total_energy = self.kinetic_energy + self.potential_energy;
66 }
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize, Default)]
77pub struct PyPhysicsAnalytics {
78 pub snapshots: Vec<PySnapshot>,
80 pub max_snapshots: usize,
82 pub step: u64,
84}
85
86impl PyPhysicsAnalytics {
87 pub fn new(max_snapshots: usize) -> Self {
89 Self {
90 snapshots: Vec::new(),
91 max_snapshots,
92 step: 0,
93 }
94 }
95
96 pub fn snapshot(&mut self, snap: PySnapshot) {
98 if self.max_snapshots > 0 && self.snapshots.len() >= self.max_snapshots {
99 self.snapshots.remove(0);
100 }
101 self.snapshots.push(snap);
102 self.step += 1;
103 }
104
105 pub fn step(&mut self) {
107 self.step += 1;
108 }
109
110 pub fn compute_metrics(&self) -> PyAggregateMetrics {
112 if self.snapshots.is_empty() {
113 return PyAggregateMetrics::default();
114 }
115 let n = self.snapshots.len();
116 let mean_ke = self.snapshots.iter().map(|s| s.kinetic_energy).sum::<f64>() / n as f64;
117 let mean_pe = self
118 .snapshots
119 .iter()
120 .map(|s| s.potential_energy)
121 .sum::<f64>()
122 / n as f64;
123 let max_ke = self
124 .snapshots
125 .iter()
126 .map(|s| s.kinetic_energy)
127 .fold(f64::NEG_INFINITY, f64::max);
128 let min_ke = self
129 .snapshots
130 .iter()
131 .map(|s| s.kinetic_energy)
132 .fold(f64::INFINITY, f64::min);
133 let mean_step_ms = self.snapshots.iter().map(|s| s.step_ms).sum::<f64>() / n as f64;
134 let max_step_ms = self
135 .snapshots
136 .iter()
137 .map(|s| s.step_ms)
138 .fold(f64::NEG_INFINITY, f64::max);
139 PyAggregateMetrics {
140 mean_ke,
141 mean_pe,
142 max_ke,
143 min_ke,
144 mean_step_ms,
145 max_step_ms,
146 sample_count: n,
147 }
148 }
149
150 pub fn latest(&self) -> Option<&PySnapshot> {
152 self.snapshots.last()
153 }
154
155 pub fn is_empty(&self) -> bool {
157 self.snapshots.is_empty()
158 }
159}
160
161#[derive(Debug, Clone, Serialize, Deserialize, Default)]
163pub struct PyAggregateMetrics {
164 pub mean_ke: f64,
166 pub mean_pe: f64,
168 pub max_ke: f64,
170 pub min_ke: f64,
172 pub mean_step_ms: f64,
174 pub max_step_ms: f64,
176 pub sample_count: usize,
178}
179
180#[derive(Debug, Clone, Serialize, Deserialize, Default)]
186pub struct PyEnergyTracker {
187 pub history: Vec<(f64, f64, f64, f64)>,
189 pub capacity: usize,
191 pub anomaly_threshold: f64,
193 pub anomalies: Vec<(usize, f64)>,
195}
196
197impl PyEnergyTracker {
198 pub fn new(capacity: usize, anomaly_threshold: f64) -> Self {
200 Self {
201 history: Vec::new(),
202 capacity,
203 anomaly_threshold,
204 anomalies: Vec::new(),
205 }
206 }
207
208 pub fn record(&mut self, time: f64, ke: f64, pe: f64) {
210 let total = ke + pe;
211 if let Some(&(_t0, _ke0, _pe0, prev_total)) = self.history.last()
212 && prev_total.abs() > 1e-10
213 {
214 let delta = (total - prev_total).abs() / prev_total.abs();
215 if delta > self.anomaly_threshold {
216 self.anomalies
217 .push((self.history.len(), total - prev_total));
218 }
219 }
220 if self.capacity > 0 && self.history.len() >= self.capacity {
221 self.history.remove(0);
222 }
223 self.history.push((time, ke, pe, total));
224 }
225
226 pub fn mean_ke(&self) -> f64 {
228 if self.history.is_empty() {
229 return 0.0;
230 }
231 self.history.iter().map(|&(_, ke, _, _)| ke).sum::<f64>() / self.history.len() as f64
232 }
233
234 pub fn mean_total(&self) -> f64 {
236 if self.history.is_empty() {
237 return 0.0;
238 }
239 self.history
240 .iter()
241 .map(|&(_, _, _, total)| total)
242 .sum::<f64>()
243 / self.history.len() as f64
244 }
245
246 pub fn latest_total(&self) -> f64 {
248 self.history.last().map_or(0.0, |&(_, _, _, t)| t)
249 }
250
251 pub fn has_anomalies(&self) -> bool {
253 !self.anomalies.is_empty()
254 }
255}
256
257#[derive(Debug, Clone, Serialize, Deserialize, Default)]
263pub struct PyMomentumTracker {
264 pub history: Vec<(f64, f64, f64)>,
266 pub initial_linear: f64,
268 pub initial_angular: f64,
270 pub tolerance: f64,
272}
273
274impl PyMomentumTracker {
275 pub fn new(tolerance: f64) -> Self {
277 Self {
278 tolerance,
279 ..Default::default()
280 }
281 }
282
283 pub fn record(&mut self, time: f64, linear_mag: f64, angular_mag: f64) {
285 if self.history.is_empty() {
286 self.initial_linear = linear_mag;
287 self.initial_angular = angular_mag;
288 }
289 self.history.push((time, linear_mag, angular_mag));
290 }
291
292 pub fn linear_conserved(&self) -> bool {
294 if self.history.is_empty() {
295 return true;
296 }
297 self.history.iter().all(|&(_, lm, _)| {
298 (lm - self.initial_linear).abs() <= self.tolerance * (self.initial_linear.abs() + 1.0)
299 })
300 }
301
302 pub fn angular_conserved(&self) -> bool {
304 if self.history.is_empty() {
305 return true;
306 }
307 self.history.iter().all(|&(_, _, am)| {
308 (am - self.initial_angular).abs() <= self.tolerance * (self.initial_angular.abs() + 1.0)
309 })
310 }
311
312 pub fn max_linear_deviation(&self) -> f64 {
314 self.history
315 .iter()
316 .map(|&(_, lm, _)| (lm - self.initial_linear).abs())
317 .fold(0.0_f64, f64::max)
318 }
319}
320
321#[derive(Debug, Clone, Serialize, Deserialize, Default)]
327pub struct PyCollisionStats {
328 pub total_collisions: u64,
330 pub pair_counts: Vec<(u32, u32, u64)>,
332 pub impulse_histogram: Vec<u64>,
334 pub max_impulse: f64,
336 pub depth_min: f64,
338 pub depth_max: f64,
340 pub depth_mean: f64,
342 depth_sum: f64,
343 depth_count: u64,
344}
345
346impl PyCollisionStats {
347 pub fn new(n_bins: usize, max_impulse: f64) -> Self {
349 Self {
350 max_impulse,
351 impulse_histogram: vec![0; n_bins],
352 depth_min: f64::INFINITY,
353 depth_max: f64::NEG_INFINITY,
354 ..Default::default()
355 }
356 }
357
358 pub fn record_collision(&mut self, body_a: u32, body_b: u32, impulse: f64, depth: f64) {
360 self.total_collisions += 1;
361 let pair = self
363 .pair_counts
364 .iter_mut()
365 .find(|(a, b, _)| (*a == body_a && *b == body_b) || (*a == body_b && *b == body_a));
366 match pair {
367 Some((_, _, cnt)) => *cnt += 1,
368 None => self.pair_counts.push((body_a, body_b, 1)),
369 }
370 let n = self.impulse_histogram.len();
372 if n > 0 && self.max_impulse > 0.0 {
373 let bin = ((impulse / self.max_impulse) * n as f64) as usize;
374 let bin = bin.min(n - 1);
375 self.impulse_histogram[bin] += 1;
376 }
377 if depth < self.depth_min {
379 self.depth_min = depth;
380 }
381 if depth > self.depth_max {
382 self.depth_max = depth;
383 }
384 self.depth_sum += depth;
385 self.depth_count += 1;
386 self.depth_mean = self.depth_sum / self.depth_count as f64;
387 }
388
389 pub fn reset(&mut self) {
391 self.total_collisions = 0;
392 self.pair_counts.clear();
393 for b in &mut self.impulse_histogram {
394 *b = 0;
395 }
396 self.depth_min = f64::INFINITY;
397 self.depth_max = f64::NEG_INFINITY;
398 self.depth_mean = 0.0;
399 self.depth_sum = 0.0;
400 self.depth_count = 0;
401 }
402
403 pub fn most_collided_pair(&self) -> Option<(u32, u32, u64)> {
405 self.pair_counts.iter().max_by_key(|&&(_, _, c)| c).copied()
406 }
407}
408
409#[derive(Debug, Clone, Serialize, Deserialize, Default)]
415pub struct PySleepStats {
416 pub total_wake_events: u64,
418 pub total_sleep_events: u64,
420 pub sleeping_fraction_history: Vec<(f64, f64)>,
422 pub capacity: usize,
424}
425
426impl PySleepStats {
427 pub fn new(capacity: usize) -> Self {
429 Self {
430 capacity,
431 ..Default::default()
432 }
433 }
434
435 pub fn record(&mut self, time: f64, sleeping: usize, total: usize, wakes: u64, sleeps: u64) {
437 self.total_wake_events += wakes;
438 self.total_sleep_events += sleeps;
439 let frac = if total == 0 {
440 0.0
441 } else {
442 sleeping as f64 / total as f64
443 };
444 if self.capacity > 0 && self.sleeping_fraction_history.len() >= self.capacity {
445 self.sleeping_fraction_history.remove(0);
446 }
447 self.sleeping_fraction_history.push((time, frac));
448 }
449
450 pub fn mean_sleeping_fraction(&self) -> f64 {
452 if self.sleeping_fraction_history.is_empty() {
453 return 0.0;
454 }
455 self.sleeping_fraction_history
456 .iter()
457 .map(|&(_, f)| f)
458 .sum::<f64>()
459 / self.sleeping_fraction_history.len() as f64
460 }
461}
462
463#[derive(Debug, Clone, Serialize, Deserialize, Default)]
469pub struct PyConstraintStats {
470 pub mean_residual: f64,
472 pub max_residual: f64,
474 pub mean_iterations: f64,
476 pub warm_start_rate: f64,
478 sample_count: u64,
480 residual_sum: f64,
481 iterations_sum: f64,
482}
483
484impl PyConstraintStats {
485 pub fn new() -> Self {
487 Self::default()
488 }
489
490 pub fn record(&mut self, residual: f64, iterations: u32, warm_start_fraction: f64) {
492 self.sample_count += 1;
493 self.residual_sum += residual;
494 self.iterations_sum += iterations as f64;
495 self.mean_residual = self.residual_sum / self.sample_count as f64;
496 self.mean_iterations = self.iterations_sum / self.sample_count as f64;
497 self.warm_start_rate = warm_start_fraction; if residual > self.max_residual {
499 self.max_residual = residual;
500 }
501 }
502
503 pub fn is_converging(&self, threshold: f64) -> bool {
505 self.mean_residual < threshold
506 }
507}
508
509#[derive(Debug, Clone, Serialize, Deserialize, Default)]
515pub struct PyStepTimings {
516 pub broad_ms: f64,
518 pub narrow_ms: f64,
520 pub solver_ms: f64,
522 pub integration_ms: f64,
524 pub total_ms: f64,
526}
527
528impl PyStepTimings {
529 pub fn compute_total(&mut self) {
531 self.total_ms = self.broad_ms + self.narrow_ms + self.solver_ms + self.integration_ms;
532 }
533}
534
535#[derive(Debug, Clone, Serialize, Deserialize, Default)]
537pub struct PyPerformanceTracker {
538 pub history: Vec<PyStepTimings>,
540 pub capacity: usize,
542}
543
544impl PyPerformanceTracker {
545 pub fn new(capacity: usize) -> Self {
547 Self {
548 capacity,
549 history: Vec::new(),
550 }
551 }
552
553 pub fn record(&mut self, mut timings: PyStepTimings) {
555 timings.compute_total();
556 if self.capacity > 0 && self.history.len() >= self.capacity {
557 self.history.remove(0);
558 }
559 self.history.push(timings);
560 }
561
562 pub fn mean_total_ms(&self) -> f64 {
564 if self.history.is_empty() {
565 return 0.0;
566 }
567 self.history.iter().map(|t| t.total_ms).sum::<f64>() / self.history.len() as f64
568 }
569
570 pub fn peak_total_ms(&self) -> f64 {
572 self.history
573 .iter()
574 .map(|t| t.total_ms)
575 .fold(0.0_f64, f64::max)
576 }
577
578 pub fn export_csv(&self) -> String {
580 let mut out = String::from("broad_ms,narrow_ms,solver_ms,integration_ms,total_ms\n");
581 for t in &self.history {
582 out.push_str(&format!(
583 "{:.4},{:.4},{:.4},{:.4},{:.4}\n",
584 t.broad_ms, t.narrow_ms, t.solver_ms, t.integration_ms, t.total_ms
585 ));
586 }
587 out
588 }
589}
590
591#[derive(Debug, Clone, Serialize, Deserialize, Default)]
597pub struct PySimulationReport {
598 pub total_steps: u64,
600 pub total_time: f64,
602 pub energy: PyEnergyTracker,
604 pub momentum: PyMomentumTracker,
606 pub collision: PyCollisionStats,
608 pub sleep: PySleepStats,
610 pub constraint: PyConstraintStats,
612 pub performance: PyPerformanceTracker,
614 pub anomaly_list: Vec<String>,
616}
617
618impl PySimulationReport {
619 pub fn new() -> Self {
621 Self {
622 energy: PyEnergyTracker::new(1000, 0.5),
623 momentum: PyMomentumTracker::new(0.01),
624 collision: PyCollisionStats::new(32, 1000.0),
625 sleep: PySleepStats::new(1000),
626 constraint: PyConstraintStats::new(),
627 performance: PyPerformanceTracker::new(1000),
628 ..Default::default()
629 }
630 }
631
632 pub fn as_json(&self) -> Result<String, serde_json::Error> {
634 serde_json::to_string_pretty(self)
635 }
636
637 pub fn flag_anomaly(&mut self, msg: impl Into<String>) {
639 self.anomaly_list.push(msg.into());
640 }
641
642 pub fn check_energy(&mut self) {
644 if self.energy.has_anomalies() {
645 self.flag_anomaly(format!(
646 "Energy anomalies detected: {} jumps",
647 self.energy.anomalies.len()
648 ));
649 }
650 }
651
652 pub fn check_momentum(&mut self) {
654 if !self.momentum.linear_conserved() {
655 self.flag_anomaly(format!(
656 "Linear momentum not conserved; max deviation = {:.4}",
657 self.momentum.max_linear_deviation()
658 ));
659 }
660 }
661}
662
663#[derive(Debug, Clone, Serialize, Deserialize, Default)]
669pub struct PyBenchmark {
670 pub steps_run: u64,
672 pub total_ms: f64,
674 pub throughput: f64,
676 pub body_count: usize,
678 pub constraint_count: usize,
680}
681
682impl PyBenchmark {
683 pub fn new(body_count: usize, constraint_count: usize) -> Self {
685 Self {
686 body_count,
687 constraint_count,
688 ..Default::default()
689 }
690 }
691
692 pub fn run(&mut self, n_steps: u64, step_cost_ms: f64) {
694 self.steps_run = n_steps;
695 let scale = 1.0 + (self.body_count + self.constraint_count) as f64 / 1000.0;
697 self.total_ms = n_steps as f64 * step_cost_ms * scale;
698 if self.total_ms > 0.0 {
699 self.throughput = self.steps_run as f64 / (self.total_ms / 1000.0);
700 }
701 }
702
703 pub fn steps_per_second(&self) -> f64 {
705 self.throughput
706 }
707}
708
709#[derive(Debug, Clone, Serialize, Deserialize, Default)]
715pub struct PyDataExporter {
716 rows: Vec<(f64, u32, f64, f64, f64, f64, f64, f64, f64)>,
718}
719
720impl PyDataExporter {
721 pub fn new() -> Self {
723 Self::default()
724 }
725
726 #[allow(clippy::too_many_arguments)]
728 pub fn record(
729 &mut self,
730 time: f64,
731 body_id: u32,
732 position: [f64; 3],
733 velocity: [f64; 3],
734 ke: f64,
735 ) {
736 self.rows.push((
737 time,
738 body_id,
739 position[0],
740 position[1],
741 position[2],
742 velocity[0],
743 velocity[1],
744 velocity[2],
745 ke,
746 ));
747 }
748
749 pub fn to_csv(&self) -> String {
751 let mut out = String::from("time,body_id,px,py,pz,vx,vy,vz,ke\n");
752 for &(t, id, px, py, pz, vx, vy, vz, ke) in &self.rows {
753 out.push_str(&format!(
754 "{t:.6},{id},{px:.6},{py:.6},{pz:.6},{vx:.6},{vy:.6},{vz:.6},{ke:.6}\n"
755 ));
756 }
757 out
758 }
759
760 pub fn to_json(&self) -> Result<String, serde_json::Error> {
762 serde_json::to_string_pretty(&self.rows)
763 }
764
765 pub fn row_count(&self) -> usize {
767 self.rows.len()
768 }
769
770 pub fn clear(&mut self) {
772 self.rows.clear();
773 }
774}
775
776#[cfg(test)]
781mod tests {
782 use super::*;
783
784 #[test]
787 fn test_snapshot_default() {
788 let s = PySnapshot::default();
789 assert_eq!(s.time, 0.0);
790 assert_eq!(s.kinetic_energy, 0.0);
791 }
792
793 #[test]
794 fn test_snapshot_compute_total() {
795 let mut s = PySnapshot {
796 kinetic_energy: 3.0,
797 potential_energy: 2.0,
798 ..Default::default()
799 };
800 s.compute_total();
801 assert!((s.total_energy - 5.0).abs() < 1e-12);
802 }
803
804 #[test]
807 fn test_analytics_snapshot() {
808 let mut a = PyPhysicsAnalytics::new(10);
809 for i in 0..5 {
810 let s = PySnapshot {
811 time: i as f64,
812 kinetic_energy: i as f64,
813 ..Default::default()
814 };
815 a.snapshot(s);
816 }
817 assert_eq!(a.snapshots.len(), 5);
818 }
819
820 #[test]
821 fn test_analytics_evicts_old() {
822 let mut a = PyPhysicsAnalytics::new(3);
823 for i in 0..5 {
824 let s = PySnapshot {
825 kinetic_energy: i as f64,
826 ..Default::default()
827 };
828 a.snapshot(s);
829 }
830 assert_eq!(a.snapshots.len(), 3);
831 }
832
833 #[test]
834 fn test_analytics_metrics() {
835 let mut a = PyPhysicsAnalytics::new(100);
836 for i in 1..=4 {
837 let s = PySnapshot {
838 kinetic_energy: i as f64,
839 ..Default::default()
840 };
841 a.snapshot(s);
842 }
843 let m = a.compute_metrics();
844 assert!((m.mean_ke - 2.5).abs() < 1e-10);
845 assert!((m.max_ke - 4.0).abs() < 1e-10);
846 }
847
848 #[test]
849 fn test_analytics_is_empty() {
850 let a = PyPhysicsAnalytics::new(10);
851 assert!(a.is_empty());
852 }
853
854 #[test]
857 fn test_energy_tracker_record() {
858 let mut t = PyEnergyTracker::new(100, 0.5);
859 t.record(0.0, 10.0, 5.0);
860 t.record(0.1, 11.0, 5.0);
861 assert_eq!(t.history.len(), 2);
862 }
863
864 #[test]
865 fn test_energy_tracker_anomaly_detect() {
866 let mut t = PyEnergyTracker::new(100, 0.1);
867 t.record(0.0, 10.0, 0.0);
868 t.record(0.1, 1000.0, 0.0); assert!(t.has_anomalies());
870 }
871
872 #[test]
873 fn test_energy_tracker_no_anomaly() {
874 let mut t = PyEnergyTracker::new(100, 0.5);
875 t.record(0.0, 10.0, 0.0);
876 t.record(0.1, 10.1, 0.0);
877 assert!(!t.has_anomalies());
878 }
879
880 #[test]
881 fn test_energy_tracker_mean_ke() {
882 let mut t = PyEnergyTracker::new(100, 1.0);
883 t.record(0.0, 4.0, 0.0);
884 t.record(0.1, 6.0, 0.0);
885 assert!((t.mean_ke() - 5.0).abs() < 1e-10);
886 }
887
888 #[test]
889 fn test_energy_tracker_eviction() {
890 let mut t = PyEnergyTracker::new(2, 10.0);
891 t.record(0.0, 1.0, 0.0);
892 t.record(0.1, 2.0, 0.0);
893 t.record(0.2, 3.0, 0.0);
894 assert_eq!(t.history.len(), 2);
895 }
896
897 #[test]
900 fn test_momentum_tracker_conserved() {
901 let mut t = PyMomentumTracker::new(0.01);
902 t.record(0.0, 5.0, 2.0);
903 t.record(0.1, 5.001, 2.001);
904 assert!(t.linear_conserved());
905 }
906
907 #[test]
908 fn test_momentum_tracker_not_conserved() {
909 let mut t = PyMomentumTracker::new(0.001);
910 t.record(0.0, 5.0, 2.0);
911 t.record(0.1, 6.0, 2.0); assert!(!t.linear_conserved());
913 }
914
915 #[test]
916 fn test_momentum_tracker_deviation() {
917 let mut t = PyMomentumTracker::new(0.5);
918 t.record(0.0, 10.0, 0.0);
919 t.record(0.1, 10.5, 0.0);
920 assert!((t.max_linear_deviation() - 0.5).abs() < 1e-10);
921 }
922
923 #[test]
926 fn test_collision_stats_record() {
927 let mut s = PyCollisionStats::new(10, 100.0);
928 s.record_collision(0, 1, 50.0, 0.01);
929 assert_eq!(s.total_collisions, 1);
930 }
931
932 #[test]
933 fn test_collision_stats_pair_count() {
934 let mut s = PyCollisionStats::new(10, 100.0);
935 s.record_collision(0, 1, 10.0, 0.01);
936 s.record_collision(0, 1, 20.0, 0.02);
937 let pair = s.most_collided_pair().unwrap();
938 assert_eq!(pair.2, 2);
939 }
940
941 #[test]
942 fn test_collision_stats_depth() {
943 let mut s = PyCollisionStats::new(10, 100.0);
944 s.record_collision(0, 1, 10.0, 0.05);
945 s.record_collision(2, 3, 10.0, 0.10);
946 assert!((s.depth_mean - 0.075).abs() < 1e-10);
947 }
948
949 #[test]
950 fn test_collision_stats_reset() {
951 let mut s = PyCollisionStats::new(10, 100.0);
952 s.record_collision(0, 1, 10.0, 0.01);
953 s.reset();
954 assert_eq!(s.total_collisions, 0);
955 }
956
957 #[test]
960 fn test_sleep_stats_record() {
961 let mut s = PySleepStats::new(100);
962 s.record(0.0, 3, 10, 0, 3);
963 s.record(0.1, 5, 10, 2, 2);
964 assert_eq!(s.total_wake_events, 2);
965 }
966
967 #[test]
968 fn test_sleep_stats_mean_fraction() {
969 let mut s = PySleepStats::new(100);
970 s.record(0.0, 5, 10, 0, 0);
971 s.record(0.1, 3, 10, 0, 0);
972 assert!((s.mean_sleeping_fraction() - 0.4).abs() < 1e-10);
974 }
975
976 #[test]
979 fn test_constraint_stats_record() {
980 let mut s = PyConstraintStats::new();
981 s.record(0.001, 5, 0.8);
982 s.record(0.002, 7, 0.9);
983 assert!((s.mean_iterations - 6.0).abs() < 1e-10);
984 }
985
986 #[test]
987 fn test_constraint_stats_converging() {
988 let mut s = PyConstraintStats::new();
989 s.record(0.0001, 3, 1.0);
990 assert!(s.is_converging(0.01));
991 }
992
993 #[test]
996 fn test_perf_tracker_mean() {
997 let mut t = PyPerformanceTracker::new(100);
998 t.record(PyStepTimings {
999 broad_ms: 1.0,
1000 narrow_ms: 2.0,
1001 solver_ms: 3.0,
1002 integration_ms: 0.5,
1003 total_ms: 0.0,
1004 });
1005 t.record(PyStepTimings {
1006 broad_ms: 2.0,
1007 narrow_ms: 1.0,
1008 solver_ms: 2.0,
1009 integration_ms: 0.5,
1010 total_ms: 0.0,
1011 });
1012 assert!((t.mean_total_ms() - 6.0).abs() < 1e-10);
1014 }
1015
1016 #[test]
1017 fn test_perf_tracker_csv() {
1018 let mut t = PyPerformanceTracker::new(10);
1019 t.record(PyStepTimings {
1020 broad_ms: 1.0,
1021 narrow_ms: 1.0,
1022 solver_ms: 1.0,
1023 integration_ms: 1.0,
1024 total_ms: 0.0,
1025 });
1026 let csv = t.export_csv();
1027 assert!(csv.contains("broad_ms"));
1028 }
1029
1030 #[test]
1033 fn test_report_as_json() {
1034 let mut r = PySimulationReport::new();
1035 r.total_steps = 100;
1036 let json = r.as_json().unwrap();
1037 assert!(json.contains("total_steps"));
1038 }
1039
1040 #[test]
1041 fn test_report_anomaly() {
1042 let mut r = PySimulationReport::new();
1043 r.energy.record(0.0, 10.0, 0.0);
1044 r.energy.record(0.1, 1000.0, 0.0); r.check_energy();
1046 assert!(!r.anomaly_list.is_empty());
1047 }
1048
1049 #[test]
1052 fn test_benchmark_run() {
1053 let mut b = PyBenchmark::new(100, 50);
1054 b.run(1000, 1.0);
1055 assert_eq!(b.steps_run, 1000);
1056 assert!(b.throughput > 0.0);
1057 }
1058
1059 #[test]
1060 fn test_benchmark_throughput() {
1061 let mut b = PyBenchmark::new(0, 0);
1062 b.run(1000, 1.0); assert!((b.steps_per_second() - 1000.0).abs() < 1.0);
1065 }
1066
1067 #[test]
1070 fn test_exporter_csv() {
1071 let mut e = PyDataExporter::new();
1072 e.record(0.0, 1, [0.0, 1.0, 2.0], [0.1, 0.2, 0.3], 5.0);
1073 let csv = e.to_csv();
1074 assert!(csv.contains("time,body_id"));
1075 assert!(csv.contains("0.000000"));
1076 }
1077
1078 #[test]
1079 fn test_exporter_json() {
1080 let mut e = PyDataExporter::new();
1081 e.record(0.0, 1, [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0);
1082 let json = e.to_json().unwrap();
1083 assert!(json.contains("["));
1084 }
1085
1086 #[test]
1087 fn test_exporter_row_count() {
1088 let mut e = PyDataExporter::new();
1089 for i in 0..5 {
1090 e.record(i as f64, i as u32, [0.0; 3], [0.0; 3], 0.0);
1091 }
1092 assert_eq!(e.row_count(), 5);
1093 }
1094
1095 #[test]
1096 fn test_exporter_clear() {
1097 let mut e = PyDataExporter::new();
1098 e.record(0.0, 0, [0.0; 3], [0.0; 3], 0.0);
1099 e.clear();
1100 assert_eq!(e.row_count(), 0);
1101 }
1102}