Skip to main content

oxiphysics_python/
analytics_api.rs

1#![allow(clippy::type_complexity)]
2// Copyright 2026 COOLJAPAN OU (Team KitaSan)
3// SPDX-License-Identifier: Apache-2.0
4
5//! Physics analytics Python bindings.
6//!
7//! Provides Python-friendly types for measuring and analysing simulation
8//! quality: energy tracking, momentum conservation, collision statistics,
9//! performance profiling, and full simulation reports.
10
11#![allow(missing_docs)]
12#![allow(dead_code)]
13
14use serde::{Deserialize, Serialize};
15
16// ─────────────────────────────────────────────────────────────────────────────
17// Snapshot
18// ─────────────────────────────────────────────────────────────────────────────
19
20/// A snapshot of global physics state at one time step.
21#[derive(Debug, Clone, Serialize, Deserialize)]
22pub struct PySnapshot {
23    /// Simulation time (seconds).
24    pub time: f64,
25    /// Total kinetic energy (J).
26    pub kinetic_energy: f64,
27    /// Total potential energy (J).
28    pub potential_energy: f64,
29    /// Total energy (KE + PE).
30    pub total_energy: f64,
31    /// Linear momentum magnitude `|p|`.
32    pub linear_momentum: f64,
33    /// Angular momentum magnitude `|L|`.
34    pub angular_momentum: f64,
35    /// Number of active contacts.
36    pub contact_count: usize,
37    /// Number of awake bodies.
38    pub awake_count: usize,
39    /// Number of sleeping bodies.
40    pub sleeping_count: usize,
41    /// Step wall-clock time in milliseconds.
42    pub step_ms: f64,
43}
44
45impl Default for PySnapshot {
46    fn default() -> Self {
47        Self {
48            time: 0.0,
49            kinetic_energy: 0.0,
50            potential_energy: 0.0,
51            total_energy: 0.0,
52            linear_momentum: 0.0,
53            angular_momentum: 0.0,
54            contact_count: 0,
55            awake_count: 0,
56            sleeping_count: 0,
57            step_ms: 0.0,
58        }
59    }
60}
61
62impl PySnapshot {
63    /// Compute the total energy.
64    pub fn compute_total(&mut self) {
65        self.total_energy = self.kinetic_energy + self.potential_energy;
66    }
67}
68
69// ─────────────────────────────────────────────────────────────────────────────
70// PyPhysicsAnalytics
71// ─────────────────────────────────────────────────────────────────────────────
72
73/// Master analytics object for a simulation.
74///
75/// Holds snapshot history and delegates to sub-trackers.
76#[derive(Debug, Clone, Serialize, Deserialize, Default)]
77pub struct PyPhysicsAnalytics {
78    /// All recorded snapshots.
79    pub snapshots: Vec<PySnapshot>,
80    /// Maximum number of snapshots to retain.
81    pub max_snapshots: usize,
82    /// Current step index.
83    pub step: u64,
84}
85
86impl PyPhysicsAnalytics {
87    /// Create analytics with a given history capacity.
88    pub fn new(max_snapshots: usize) -> Self {
89        Self {
90            snapshots: Vec::new(),
91            max_snapshots,
92            step: 0,
93        }
94    }
95
96    /// Record a new snapshot.
97    pub fn snapshot(&mut self, snap: PySnapshot) {
98        if self.max_snapshots > 0 && self.snapshots.len() >= self.max_snapshots {
99            self.snapshots.remove(0);
100        }
101        self.snapshots.push(snap);
102        self.step += 1;
103    }
104
105    /// Advance step counter.
106    pub fn step(&mut self) {
107        self.step += 1;
108    }
109
110    /// Compute aggregate metrics over all snapshots.
111    pub fn compute_metrics(&self) -> PyAggregateMetrics {
112        if self.snapshots.is_empty() {
113            return PyAggregateMetrics::default();
114        }
115        let n = self.snapshots.len();
116        let mean_ke = self.snapshots.iter().map(|s| s.kinetic_energy).sum::<f64>() / n as f64;
117        let mean_pe = self
118            .snapshots
119            .iter()
120            .map(|s| s.potential_energy)
121            .sum::<f64>()
122            / n as f64;
123        let max_ke = self
124            .snapshots
125            .iter()
126            .map(|s| s.kinetic_energy)
127            .fold(f64::NEG_INFINITY, f64::max);
128        let min_ke = self
129            .snapshots
130            .iter()
131            .map(|s| s.kinetic_energy)
132            .fold(f64::INFINITY, f64::min);
133        let mean_step_ms = self.snapshots.iter().map(|s| s.step_ms).sum::<f64>() / n as f64;
134        let max_step_ms = self
135            .snapshots
136            .iter()
137            .map(|s| s.step_ms)
138            .fold(f64::NEG_INFINITY, f64::max);
139        PyAggregateMetrics {
140            mean_ke,
141            mean_pe,
142            max_ke,
143            min_ke,
144            mean_step_ms,
145            max_step_ms,
146            sample_count: n,
147        }
148    }
149
150    /// Most recent snapshot.
151    pub fn latest(&self) -> Option<&PySnapshot> {
152        self.snapshots.last()
153    }
154
155    /// True if no snapshots recorded.
156    pub fn is_empty(&self) -> bool {
157        self.snapshots.is_empty()
158    }
159}
160
161/// Aggregate metrics derived from snapshot history.
162#[derive(Debug, Clone, Serialize, Deserialize, Default)]
163pub struct PyAggregateMetrics {
164    /// Mean kinetic energy.
165    pub mean_ke: f64,
166    /// Mean potential energy.
167    pub mean_pe: f64,
168    /// Maximum kinetic energy observed.
169    pub max_ke: f64,
170    /// Minimum kinetic energy observed.
171    pub min_ke: f64,
172    /// Mean step wall-clock time (ms).
173    pub mean_step_ms: f64,
174    /// Maximum step wall-clock time (ms).
175    pub max_step_ms: f64,
176    /// Number of snapshots in this aggregate.
177    pub sample_count: usize,
178}
179
180// ─────────────────────────────────────────────────────────────────────────────
181// PyEnergyTracker
182// ─────────────────────────────────────────────────────────────────────────────
183
184/// Tracks kinetic, potential, and total energy over time.
185#[derive(Debug, Clone, Serialize, Deserialize, Default)]
186pub struct PyEnergyTracker {
187    /// Time series of (time, KE, PE, total).
188    pub history: Vec<(f64, f64, f64, f64)>,
189    /// Maximum history size.
190    pub capacity: usize,
191    /// Jump threshold for anomaly detection (energy change ratio).
192    pub anomaly_threshold: f64,
193    /// Detected energy anomalies `(step_index, delta_energy)`.
194    pub anomalies: Vec<(usize, f64)>,
195}
196
197impl PyEnergyTracker {
198    /// Create a new tracker.
199    pub fn new(capacity: usize, anomaly_threshold: f64) -> Self {
200        Self {
201            history: Vec::new(),
202            capacity,
203            anomaly_threshold,
204            anomalies: Vec::new(),
205        }
206    }
207
208    /// Record a step's energy.
209    pub fn record(&mut self, time: f64, ke: f64, pe: f64) {
210        let total = ke + pe;
211        if let Some(&(_t0, _ke0, _pe0, prev_total)) = self.history.last()
212            && prev_total.abs() > 1e-10
213        {
214            let delta = (total - prev_total).abs() / prev_total.abs();
215            if delta > self.anomaly_threshold {
216                self.anomalies
217                    .push((self.history.len(), total - prev_total));
218            }
219        }
220        if self.capacity > 0 && self.history.len() >= self.capacity {
221            self.history.remove(0);
222        }
223        self.history.push((time, ke, pe, total));
224    }
225
226    /// Mean kinetic energy.
227    pub fn mean_ke(&self) -> f64 {
228        if self.history.is_empty() {
229            return 0.0;
230        }
231        self.history.iter().map(|&(_, ke, _, _)| ke).sum::<f64>() / self.history.len() as f64
232    }
233
234    /// Mean total energy.
235    pub fn mean_total(&self) -> f64 {
236        if self.history.is_empty() {
237            return 0.0;
238        }
239        self.history
240            .iter()
241            .map(|&(_, _, _, total)| total)
242            .sum::<f64>()
243            / self.history.len() as f64
244    }
245
246    /// Latest total energy (or 0).
247    pub fn latest_total(&self) -> f64 {
248        self.history.last().map_or(0.0, |&(_, _, _, t)| t)
249    }
250
251    /// True if any anomalies were detected.
252    pub fn has_anomalies(&self) -> bool {
253        !self.anomalies.is_empty()
254    }
255}
256
257// ─────────────────────────────────────────────────────────────────────────────
258// PyMomentumTracker
259// ─────────────────────────────────────────────────────────────────────────────
260
261/// Tracks linear and angular momentum conservation.
262#[derive(Debug, Clone, Serialize, Deserialize, Default)]
263pub struct PyMomentumTracker {
264    /// History of `(time, lin_mom_mag, ang_mom_mag)`.
265    pub history: Vec<(f64, f64, f64)>,
266    /// Initial linear momentum for conservation check.
267    pub initial_linear: f64,
268    /// Initial angular momentum for conservation check.
269    pub initial_angular: f64,
270    /// Tolerance for conservation check.
271    pub tolerance: f64,
272}
273
274impl PyMomentumTracker {
275    /// Create with default tolerance.
276    pub fn new(tolerance: f64) -> Self {
277        Self {
278            tolerance,
279            ..Default::default()
280        }
281    }
282
283    /// Record a step.
284    pub fn record(&mut self, time: f64, linear_mag: f64, angular_mag: f64) {
285        if self.history.is_empty() {
286            self.initial_linear = linear_mag;
287            self.initial_angular = angular_mag;
288        }
289        self.history.push((time, linear_mag, angular_mag));
290    }
291
292    /// True if linear momentum is conserved within tolerance.
293    pub fn linear_conserved(&self) -> bool {
294        if self.history.is_empty() {
295            return true;
296        }
297        self.history.iter().all(|&(_, lm, _)| {
298            (lm - self.initial_linear).abs() <= self.tolerance * (self.initial_linear.abs() + 1.0)
299        })
300    }
301
302    /// True if angular momentum is conserved within tolerance.
303    pub fn angular_conserved(&self) -> bool {
304        if self.history.is_empty() {
305            return true;
306        }
307        self.history.iter().all(|&(_, _, am)| {
308            (am - self.initial_angular).abs() <= self.tolerance * (self.initial_angular.abs() + 1.0)
309        })
310    }
311
312    /// Maximum linear momentum deviation.
313    pub fn max_linear_deviation(&self) -> f64 {
314        self.history
315            .iter()
316            .map(|&(_, lm, _)| (lm - self.initial_linear).abs())
317            .fold(0.0_f64, f64::max)
318    }
319}
320
321// ─────────────────────────────────────────────────────────────────────────────
322// PyCollisionStats
323// ─────────────────────────────────────────────────────────────────────────────
324
325/// Statistics about collisions over the simulation.
326#[derive(Debug, Clone, Serialize, Deserialize, Default)]
327pub struct PyCollisionStats {
328    /// Total collision count.
329    pub total_collisions: u64,
330    /// Per-body-pair collision count (body_a_id, body_b_id, count).
331    pub pair_counts: Vec<(u32, u32, u64)>,
332    /// Impulse histogram (n_bins buckets in \[0, max_impulse\]).
333    pub impulse_histogram: Vec<u64>,
334    /// Maximum impulse bin value.
335    pub max_impulse: f64,
336    /// Penetration depth statistics.
337    pub depth_min: f64,
338    /// Maximum penetration depth recorded.
339    pub depth_max: f64,
340    /// Mean penetration depth.
341    pub depth_mean: f64,
342    depth_sum: f64,
343    depth_count: u64,
344}
345
346impl PyCollisionStats {
347    /// Create with given histogram resolution.
348    pub fn new(n_bins: usize, max_impulse: f64) -> Self {
349        Self {
350            max_impulse,
351            impulse_histogram: vec![0; n_bins],
352            depth_min: f64::INFINITY,
353            depth_max: f64::NEG_INFINITY,
354            ..Default::default()
355        }
356    }
357
358    /// Record a collision event.
359    pub fn record_collision(&mut self, body_a: u32, body_b: u32, impulse: f64, depth: f64) {
360        self.total_collisions += 1;
361        // Pair counts
362        let pair = self
363            .pair_counts
364            .iter_mut()
365            .find(|(a, b, _)| (*a == body_a && *b == body_b) || (*a == body_b && *b == body_a));
366        match pair {
367            Some((_, _, cnt)) => *cnt += 1,
368            None => self.pair_counts.push((body_a, body_b, 1)),
369        }
370        // Histogram
371        let n = self.impulse_histogram.len();
372        if n > 0 && self.max_impulse > 0.0 {
373            let bin = ((impulse / self.max_impulse) * n as f64) as usize;
374            let bin = bin.min(n - 1);
375            self.impulse_histogram[bin] += 1;
376        }
377        // Depth stats
378        if depth < self.depth_min {
379            self.depth_min = depth;
380        }
381        if depth > self.depth_max {
382            self.depth_max = depth;
383        }
384        self.depth_sum += depth;
385        self.depth_count += 1;
386        self.depth_mean = self.depth_sum / self.depth_count as f64;
387    }
388
389    /// Reset all stats.
390    pub fn reset(&mut self) {
391        self.total_collisions = 0;
392        self.pair_counts.clear();
393        for b in &mut self.impulse_histogram {
394            *b = 0;
395        }
396        self.depth_min = f64::INFINITY;
397        self.depth_max = f64::NEG_INFINITY;
398        self.depth_mean = 0.0;
399        self.depth_sum = 0.0;
400        self.depth_count = 0;
401    }
402
403    /// Most collided pair (if any).
404    pub fn most_collided_pair(&self) -> Option<(u32, u32, u64)> {
405        self.pair_counts.iter().max_by_key(|&&(_, _, c)| c).copied()
406    }
407}
408
409// ─────────────────────────────────────────────────────────────────────────────
410// PySleepStats
411// ─────────────────────────────────────────────────────────────────────────────
412
413/// Statistics about body sleep/wake behaviour.
414#[derive(Debug, Clone, Serialize, Deserialize, Default)]
415pub struct PySleepStats {
416    /// Total wake events observed.
417    pub total_wake_events: u64,
418    /// Total sleep events observed.
419    pub total_sleep_events: u64,
420    /// History of (time, sleeping_fraction) tuples.
421    pub sleeping_fraction_history: Vec<(f64, f64)>,
422    /// Capacity limit.
423    pub capacity: usize,
424}
425
426impl PySleepStats {
427    /// Create with history capacity.
428    pub fn new(capacity: usize) -> Self {
429        Self {
430            capacity,
431            ..Default::default()
432        }
433    }
434
435    /// Record a step with body counts.
436    pub fn record(&mut self, time: f64, sleeping: usize, total: usize, wakes: u64, sleeps: u64) {
437        self.total_wake_events += wakes;
438        self.total_sleep_events += sleeps;
439        let frac = if total == 0 {
440            0.0
441        } else {
442            sleeping as f64 / total as f64
443        };
444        if self.capacity > 0 && self.sleeping_fraction_history.len() >= self.capacity {
445            self.sleeping_fraction_history.remove(0);
446        }
447        self.sleeping_fraction_history.push((time, frac));
448    }
449
450    /// Mean sleeping fraction.
451    pub fn mean_sleeping_fraction(&self) -> f64 {
452        if self.sleeping_fraction_history.is_empty() {
453            return 0.0;
454        }
455        self.sleeping_fraction_history
456            .iter()
457            .map(|&(_, f)| f)
458            .sum::<f64>()
459            / self.sleeping_fraction_history.len() as f64
460    }
461}
462
463// ─────────────────────────────────────────────────────────────────────────────
464// PyConstraintStats
465// ─────────────────────────────────────────────────────────────────────────────
466
467/// Solver and constraint quality statistics.
468#[derive(Debug, Clone, Serialize, Deserialize, Default)]
469pub struct PyConstraintStats {
470    /// Mean solver residual per step.
471    pub mean_residual: f64,
472    /// Maximum residual observed.
473    pub max_residual: f64,
474    /// Mean solver iterations used.
475    pub mean_iterations: f64,
476    /// Warm-start hit rate (fraction of constraints using warm start).
477    pub warm_start_rate: f64,
478    /// Number of samples.
479    sample_count: u64,
480    residual_sum: f64,
481    iterations_sum: f64,
482}
483
484impl PyConstraintStats {
485    /// Create empty stats.
486    pub fn new() -> Self {
487        Self::default()
488    }
489
490    /// Record a solver step.
491    pub fn record(&mut self, residual: f64, iterations: u32, warm_start_fraction: f64) {
492        self.sample_count += 1;
493        self.residual_sum += residual;
494        self.iterations_sum += iterations as f64;
495        self.mean_residual = self.residual_sum / self.sample_count as f64;
496        self.mean_iterations = self.iterations_sum / self.sample_count as f64;
497        self.warm_start_rate = warm_start_fraction; // latest value
498        if residual > self.max_residual {
499            self.max_residual = residual;
500        }
501    }
502
503    /// True if the solver is converging well.
504    pub fn is_converging(&self, threshold: f64) -> bool {
505        self.mean_residual < threshold
506    }
507}
508
509// ─────────────────────────────────────────────────────────────────────────────
510// PyPerformanceTracker
511// ─────────────────────────────────────────────────────────────────────────────
512
513/// Breakdown of time spent per pipeline phase (milliseconds).
514#[derive(Debug, Clone, Serialize, Deserialize, Default)]
515pub struct PyStepTimings {
516    /// Broadphase collision detection (ms).
517    pub broad_ms: f64,
518    /// Narrowphase collision detection (ms).
519    pub narrow_ms: f64,
520    /// Constraint solver (ms).
521    pub solver_ms: f64,
522    /// Integration (ms).
523    pub integration_ms: f64,
524    /// Total step time (ms).
525    pub total_ms: f64,
526}
527
528impl PyStepTimings {
529    /// Compute total as sum of phases.
530    pub fn compute_total(&mut self) {
531        self.total_ms = self.broad_ms + self.narrow_ms + self.solver_ms + self.integration_ms;
532    }
533}
534
535/// Tracks per-step performance timings.
536#[derive(Debug, Clone, Serialize, Deserialize, Default)]
537pub struct PyPerformanceTracker {
538    /// History of per-step timings.
539    pub history: Vec<PyStepTimings>,
540    /// Capacity limit.
541    pub capacity: usize,
542}
543
544impl PyPerformanceTracker {
545    /// Create with capacity.
546    pub fn new(capacity: usize) -> Self {
547        Self {
548            capacity,
549            history: Vec::new(),
550        }
551    }
552
553    /// Record a step's timings.
554    pub fn record(&mut self, mut timings: PyStepTimings) {
555        timings.compute_total();
556        if self.capacity > 0 && self.history.len() >= self.capacity {
557            self.history.remove(0);
558        }
559        self.history.push(timings);
560    }
561
562    /// Rolling average total step time.
563    pub fn mean_total_ms(&self) -> f64 {
564        if self.history.is_empty() {
565            return 0.0;
566        }
567        self.history.iter().map(|t| t.total_ms).sum::<f64>() / self.history.len() as f64
568    }
569
570    /// Peak total step time.
571    pub fn peak_total_ms(&self) -> f64 {
572        self.history
573            .iter()
574            .map(|t| t.total_ms)
575            .fold(0.0_f64, f64::max)
576    }
577
578    /// Export timings as CSV string.
579    pub fn export_csv(&self) -> String {
580        let mut out = String::from("broad_ms,narrow_ms,solver_ms,integration_ms,total_ms\n");
581        for t in &self.history {
582            out.push_str(&format!(
583                "{:.4},{:.4},{:.4},{:.4},{:.4}\n",
584                t.broad_ms, t.narrow_ms, t.solver_ms, t.integration_ms, t.total_ms
585            ));
586        }
587        out
588    }
589}
590
591// ─────────────────────────────────────────────────────────────────────────────
592// PySimulationReport
593// ─────────────────────────────────────────────────────────────────────────────
594
595/// A full simulation quality report.
596#[derive(Debug, Clone, Serialize, Deserialize, Default)]
597pub struct PySimulationReport {
598    /// Total steps simulated.
599    pub total_steps: u64,
600    /// Total simulation time (s).
601    pub total_time: f64,
602    /// Energy summary.
603    pub energy: PyEnergyTracker,
604    /// Momentum summary.
605    pub momentum: PyMomentumTracker,
606    /// Collision stats.
607    pub collision: PyCollisionStats,
608    /// Sleep stats.
609    pub sleep: PySleepStats,
610    /// Constraint stats.
611    pub constraint: PyConstraintStats,
612    /// Performance stats.
613    pub performance: PyPerformanceTracker,
614    /// List of anomalies as human-readable strings.
615    pub anomaly_list: Vec<String>,
616}
617
618impl PySimulationReport {
619    /// Create a new report.
620    pub fn new() -> Self {
621        Self {
622            energy: PyEnergyTracker::new(1000, 0.5),
623            momentum: PyMomentumTracker::new(0.01),
624            collision: PyCollisionStats::new(32, 1000.0),
625            sleep: PySleepStats::new(1000),
626            constraint: PyConstraintStats::new(),
627            performance: PyPerformanceTracker::new(1000),
628            ..Default::default()
629        }
630    }
631
632    /// Serialise report to JSON string.
633    pub fn as_json(&self) -> Result<String, serde_json::Error> {
634        serde_json::to_string_pretty(self)
635    }
636
637    /// Append an anomaly message.
638    pub fn flag_anomaly(&mut self, msg: impl Into<String>) {
639        self.anomaly_list.push(msg.into());
640    }
641
642    /// Run a check and flag if energy anomalies exist.
643    pub fn check_energy(&mut self) {
644        if self.energy.has_anomalies() {
645            self.flag_anomaly(format!(
646                "Energy anomalies detected: {} jumps",
647                self.energy.anomalies.len()
648            ));
649        }
650    }
651
652    /// Run a check and flag if momentum is not conserved.
653    pub fn check_momentum(&mut self) {
654        if !self.momentum.linear_conserved() {
655            self.flag_anomaly(format!(
656                "Linear momentum not conserved; max deviation = {:.4}",
657                self.momentum.max_linear_deviation()
658            ));
659        }
660    }
661}
662
663// ─────────────────────────────────────────────────────────────────────────────
664// PyBenchmark
665// ─────────────────────────────────────────────────────────────────────────────
666
667/// Benchmarks a physics simulation over N steps.
668#[derive(Debug, Clone, Serialize, Deserialize, Default)]
669pub struct PyBenchmark {
670    /// Number of steps run.
671    pub steps_run: u64,
672    /// Total wall-clock time for all steps (ms, simulated).
673    pub total_ms: f64,
674    /// Throughput in steps per second.
675    pub throughput: f64,
676    /// Body count used in the benchmark.
677    pub body_count: usize,
678    /// Constraint count used.
679    pub constraint_count: usize,
680}
681
682impl PyBenchmark {
683    /// Create a benchmark for the given body and constraint counts.
684    pub fn new(body_count: usize, constraint_count: usize) -> Self {
685        Self {
686            body_count,
687            constraint_count,
688            ..Default::default()
689        }
690    }
691
692    /// Run `n_steps` of a mock simulation with artificial cost.
693    pub fn run(&mut self, n_steps: u64, step_cost_ms: f64) {
694        self.steps_run = n_steps;
695        // Mock: cost scales linearly with body+constraint count
696        let scale = 1.0 + (self.body_count + self.constraint_count) as f64 / 1000.0;
697        self.total_ms = n_steps as f64 * step_cost_ms * scale;
698        if self.total_ms > 0.0 {
699            self.throughput = self.steps_run as f64 / (self.total_ms / 1000.0);
700        }
701    }
702
703    /// Throughput in steps/s.
704    pub fn steps_per_second(&self) -> f64 {
705        self.throughput
706    }
707}
708
709// ─────────────────────────────────────────────────────────────────────────────
710// PyDataExporter
711// ─────────────────────────────────────────────────────────────────────────────
712
713/// Exports simulation state to CSV or JSON.
714#[derive(Debug, Clone, Serialize, Deserialize, Default)]
715pub struct PyDataExporter {
716    /// Recorded rows: (time, body_id, px, py, pz, vx, vy, vz, ke).
717    rows: Vec<(f64, u32, f64, f64, f64, f64, f64, f64, f64)>,
718}
719
720impl PyDataExporter {
721    /// Create an empty exporter.
722    pub fn new() -> Self {
723        Self::default()
724    }
725
726    /// Record a body state.
727    #[allow(clippy::too_many_arguments)]
728    pub fn record(
729        &mut self,
730        time: f64,
731        body_id: u32,
732        position: [f64; 3],
733        velocity: [f64; 3],
734        ke: f64,
735    ) {
736        self.rows.push((
737            time,
738            body_id,
739            position[0],
740            position[1],
741            position[2],
742            velocity[0],
743            velocity[1],
744            velocity[2],
745            ke,
746        ));
747    }
748
749    /// Export all rows to CSV.
750    pub fn to_csv(&self) -> String {
751        let mut out = String::from("time,body_id,px,py,pz,vx,vy,vz,ke\n");
752        for &(t, id, px, py, pz, vx, vy, vz, ke) in &self.rows {
753            out.push_str(&format!(
754                "{t:.6},{id},{px:.6},{py:.6},{pz:.6},{vx:.6},{vy:.6},{vz:.6},{ke:.6}\n"
755            ));
756        }
757        out
758    }
759
760    /// Export all rows to JSON.
761    pub fn to_json(&self) -> Result<String, serde_json::Error> {
762        serde_json::to_string_pretty(&self.rows)
763    }
764
765    /// Number of recorded rows.
766    pub fn row_count(&self) -> usize {
767        self.rows.len()
768    }
769
770    /// Clear all rows.
771    pub fn clear(&mut self) {
772        self.rows.clear();
773    }
774}
775
776// ─────────────────────────────────────────────────────────────────────────────
777// Tests
778// ─────────────────────────────────────────────────────────────────────────────
779
780#[cfg(test)]
781mod tests {
782    use super::*;
783
784    // --- PySnapshot ---
785
786    #[test]
787    fn test_snapshot_default() {
788        let s = PySnapshot::default();
789        assert_eq!(s.time, 0.0);
790        assert_eq!(s.kinetic_energy, 0.0);
791    }
792
793    #[test]
794    fn test_snapshot_compute_total() {
795        let mut s = PySnapshot {
796            kinetic_energy: 3.0,
797            potential_energy: 2.0,
798            ..Default::default()
799        };
800        s.compute_total();
801        assert!((s.total_energy - 5.0).abs() < 1e-12);
802    }
803
804    // --- PyPhysicsAnalytics ---
805
806    #[test]
807    fn test_analytics_snapshot() {
808        let mut a = PyPhysicsAnalytics::new(10);
809        for i in 0..5 {
810            let s = PySnapshot {
811                time: i as f64,
812                kinetic_energy: i as f64,
813                ..Default::default()
814            };
815            a.snapshot(s);
816        }
817        assert_eq!(a.snapshots.len(), 5);
818    }
819
820    #[test]
821    fn test_analytics_evicts_old() {
822        let mut a = PyPhysicsAnalytics::new(3);
823        for i in 0..5 {
824            let s = PySnapshot {
825                kinetic_energy: i as f64,
826                ..Default::default()
827            };
828            a.snapshot(s);
829        }
830        assert_eq!(a.snapshots.len(), 3);
831    }
832
833    #[test]
834    fn test_analytics_metrics() {
835        let mut a = PyPhysicsAnalytics::new(100);
836        for i in 1..=4 {
837            let s = PySnapshot {
838                kinetic_energy: i as f64,
839                ..Default::default()
840            };
841            a.snapshot(s);
842        }
843        let m = a.compute_metrics();
844        assert!((m.mean_ke - 2.5).abs() < 1e-10);
845        assert!((m.max_ke - 4.0).abs() < 1e-10);
846    }
847
848    #[test]
849    fn test_analytics_is_empty() {
850        let a = PyPhysicsAnalytics::new(10);
851        assert!(a.is_empty());
852    }
853
854    // --- PyEnergyTracker ---
855
856    #[test]
857    fn test_energy_tracker_record() {
858        let mut t = PyEnergyTracker::new(100, 0.5);
859        t.record(0.0, 10.0, 5.0);
860        t.record(0.1, 11.0, 5.0);
861        assert_eq!(t.history.len(), 2);
862    }
863
864    #[test]
865    fn test_energy_tracker_anomaly_detect() {
866        let mut t = PyEnergyTracker::new(100, 0.1);
867        t.record(0.0, 10.0, 0.0);
868        t.record(0.1, 1000.0, 0.0); // huge jump
869        assert!(t.has_anomalies());
870    }
871
872    #[test]
873    fn test_energy_tracker_no_anomaly() {
874        let mut t = PyEnergyTracker::new(100, 0.5);
875        t.record(0.0, 10.0, 0.0);
876        t.record(0.1, 10.1, 0.0);
877        assert!(!t.has_anomalies());
878    }
879
880    #[test]
881    fn test_energy_tracker_mean_ke() {
882        let mut t = PyEnergyTracker::new(100, 1.0);
883        t.record(0.0, 4.0, 0.0);
884        t.record(0.1, 6.0, 0.0);
885        assert!((t.mean_ke() - 5.0).abs() < 1e-10);
886    }
887
888    #[test]
889    fn test_energy_tracker_eviction() {
890        let mut t = PyEnergyTracker::new(2, 10.0);
891        t.record(0.0, 1.0, 0.0);
892        t.record(0.1, 2.0, 0.0);
893        t.record(0.2, 3.0, 0.0);
894        assert_eq!(t.history.len(), 2);
895    }
896
897    // --- PyMomentumTracker ---
898
899    #[test]
900    fn test_momentum_tracker_conserved() {
901        let mut t = PyMomentumTracker::new(0.01);
902        t.record(0.0, 5.0, 2.0);
903        t.record(0.1, 5.001, 2.001);
904        assert!(t.linear_conserved());
905    }
906
907    #[test]
908    fn test_momentum_tracker_not_conserved() {
909        let mut t = PyMomentumTracker::new(0.001);
910        t.record(0.0, 5.0, 2.0);
911        t.record(0.1, 6.0, 2.0); // large jump
912        assert!(!t.linear_conserved());
913    }
914
915    #[test]
916    fn test_momentum_tracker_deviation() {
917        let mut t = PyMomentumTracker::new(0.5);
918        t.record(0.0, 10.0, 0.0);
919        t.record(0.1, 10.5, 0.0);
920        assert!((t.max_linear_deviation() - 0.5).abs() < 1e-10);
921    }
922
923    // --- PyCollisionStats ---
924
925    #[test]
926    fn test_collision_stats_record() {
927        let mut s = PyCollisionStats::new(10, 100.0);
928        s.record_collision(0, 1, 50.0, 0.01);
929        assert_eq!(s.total_collisions, 1);
930    }
931
932    #[test]
933    fn test_collision_stats_pair_count() {
934        let mut s = PyCollisionStats::new(10, 100.0);
935        s.record_collision(0, 1, 10.0, 0.01);
936        s.record_collision(0, 1, 20.0, 0.02);
937        let pair = s.most_collided_pair().unwrap();
938        assert_eq!(pair.2, 2);
939    }
940
941    #[test]
942    fn test_collision_stats_depth() {
943        let mut s = PyCollisionStats::new(10, 100.0);
944        s.record_collision(0, 1, 10.0, 0.05);
945        s.record_collision(2, 3, 10.0, 0.10);
946        assert!((s.depth_mean - 0.075).abs() < 1e-10);
947    }
948
949    #[test]
950    fn test_collision_stats_reset() {
951        let mut s = PyCollisionStats::new(10, 100.0);
952        s.record_collision(0, 1, 10.0, 0.01);
953        s.reset();
954        assert_eq!(s.total_collisions, 0);
955    }
956
957    // --- PySleepStats ---
958
959    #[test]
960    fn test_sleep_stats_record() {
961        let mut s = PySleepStats::new(100);
962        s.record(0.0, 3, 10, 0, 3);
963        s.record(0.1, 5, 10, 2, 2);
964        assert_eq!(s.total_wake_events, 2);
965    }
966
967    #[test]
968    fn test_sleep_stats_mean_fraction() {
969        let mut s = PySleepStats::new(100);
970        s.record(0.0, 5, 10, 0, 0);
971        s.record(0.1, 3, 10, 0, 0);
972        // (0.5 + 0.3) / 2 = 0.4
973        assert!((s.mean_sleeping_fraction() - 0.4).abs() < 1e-10);
974    }
975
976    // --- PyConstraintStats ---
977
978    #[test]
979    fn test_constraint_stats_record() {
980        let mut s = PyConstraintStats::new();
981        s.record(0.001, 5, 0.8);
982        s.record(0.002, 7, 0.9);
983        assert!((s.mean_iterations - 6.0).abs() < 1e-10);
984    }
985
986    #[test]
987    fn test_constraint_stats_converging() {
988        let mut s = PyConstraintStats::new();
989        s.record(0.0001, 3, 1.0);
990        assert!(s.is_converging(0.01));
991    }
992
993    // --- PyPerformanceTracker ---
994
995    #[test]
996    fn test_perf_tracker_mean() {
997        let mut t = PyPerformanceTracker::new(100);
998        t.record(PyStepTimings {
999            broad_ms: 1.0,
1000            narrow_ms: 2.0,
1001            solver_ms: 3.0,
1002            integration_ms: 0.5,
1003            total_ms: 0.0,
1004        });
1005        t.record(PyStepTimings {
1006            broad_ms: 2.0,
1007            narrow_ms: 1.0,
1008            solver_ms: 2.0,
1009            integration_ms: 0.5,
1010            total_ms: 0.0,
1011        });
1012        // totals: 6.5 and 5.5 -> mean 6.0
1013        assert!((t.mean_total_ms() - 6.0).abs() < 1e-10);
1014    }
1015
1016    #[test]
1017    fn test_perf_tracker_csv() {
1018        let mut t = PyPerformanceTracker::new(10);
1019        t.record(PyStepTimings {
1020            broad_ms: 1.0,
1021            narrow_ms: 1.0,
1022            solver_ms: 1.0,
1023            integration_ms: 1.0,
1024            total_ms: 0.0,
1025        });
1026        let csv = t.export_csv();
1027        assert!(csv.contains("broad_ms"));
1028    }
1029
1030    // --- PySimulationReport ---
1031
1032    #[test]
1033    fn test_report_as_json() {
1034        let mut r = PySimulationReport::new();
1035        r.total_steps = 100;
1036        let json = r.as_json().unwrap();
1037        assert!(json.contains("total_steps"));
1038    }
1039
1040    #[test]
1041    fn test_report_anomaly() {
1042        let mut r = PySimulationReport::new();
1043        r.energy.record(0.0, 10.0, 0.0);
1044        r.energy.record(0.1, 1000.0, 0.0); // triggers anomaly
1045        r.check_energy();
1046        assert!(!r.anomaly_list.is_empty());
1047    }
1048
1049    // --- PyBenchmark ---
1050
1051    #[test]
1052    fn test_benchmark_run() {
1053        let mut b = PyBenchmark::new(100, 50);
1054        b.run(1000, 1.0);
1055        assert_eq!(b.steps_run, 1000);
1056        assert!(b.throughput > 0.0);
1057    }
1058
1059    #[test]
1060    fn test_benchmark_throughput() {
1061        let mut b = PyBenchmark::new(0, 0);
1062        b.run(1000, 1.0); // 1000 steps × 1ms/step = 1000ms total = 1s
1063        // throughput ~ 1000 steps/s
1064        assert!((b.steps_per_second() - 1000.0).abs() < 1.0);
1065    }
1066
1067    // --- PyDataExporter ---
1068
1069    #[test]
1070    fn test_exporter_csv() {
1071        let mut e = PyDataExporter::new();
1072        e.record(0.0, 1, [0.0, 1.0, 2.0], [0.1, 0.2, 0.3], 5.0);
1073        let csv = e.to_csv();
1074        assert!(csv.contains("time,body_id"));
1075        assert!(csv.contains("0.000000"));
1076    }
1077
1078    #[test]
1079    fn test_exporter_json() {
1080        let mut e = PyDataExporter::new();
1081        e.record(0.0, 1, [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0);
1082        let json = e.to_json().unwrap();
1083        assert!(json.contains("["));
1084    }
1085
1086    #[test]
1087    fn test_exporter_row_count() {
1088        let mut e = PyDataExporter::new();
1089        for i in 0..5 {
1090            e.record(i as f64, i as u32, [0.0; 3], [0.0; 3], 0.0);
1091        }
1092        assert_eq!(e.row_count(), 5);
1093    }
1094
1095    #[test]
1096    fn test_exporter_clear() {
1097        let mut e = PyDataExporter::new();
1098        e.record(0.0, 0, [0.0; 3], [0.0; 3], 0.0);
1099        e.clear();
1100        assert_eq!(e.row_count(), 0);
1101    }
1102}