#![allow(clippy::type_complexity)]
#![allow(missing_docs)]
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PySnapshot {
pub time: f64,
pub kinetic_energy: f64,
pub potential_energy: f64,
pub total_energy: f64,
pub linear_momentum: f64,
pub angular_momentum: f64,
pub contact_count: usize,
pub awake_count: usize,
pub sleeping_count: usize,
pub step_ms: f64,
}
impl Default for PySnapshot {
fn default() -> Self {
Self {
time: 0.0,
kinetic_energy: 0.0,
potential_energy: 0.0,
total_energy: 0.0,
linear_momentum: 0.0,
angular_momentum: 0.0,
contact_count: 0,
awake_count: 0,
sleeping_count: 0,
step_ms: 0.0,
}
}
}
impl PySnapshot {
pub fn compute_total(&mut self) {
self.total_energy = self.kinetic_energy + self.potential_energy;
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyPhysicsAnalytics {
pub snapshots: Vec<PySnapshot>,
pub max_snapshots: usize,
pub step: u64,
}
impl PyPhysicsAnalytics {
pub fn new(max_snapshots: usize) -> Self {
Self {
snapshots: Vec::new(),
max_snapshots,
step: 0,
}
}
pub fn snapshot(&mut self, snap: PySnapshot) {
if self.max_snapshots > 0 && self.snapshots.len() >= self.max_snapshots {
self.snapshots.remove(0);
}
self.snapshots.push(snap);
self.step += 1;
}
pub fn step(&mut self) {
self.step += 1;
}
pub fn compute_metrics(&self) -> PyAggregateMetrics {
if self.snapshots.is_empty() {
return PyAggregateMetrics::default();
}
let n = self.snapshots.len();
let mean_ke = self.snapshots.iter().map(|s| s.kinetic_energy).sum::<f64>() / n as f64;
let mean_pe = self
.snapshots
.iter()
.map(|s| s.potential_energy)
.sum::<f64>()
/ n as f64;
let max_ke = self
.snapshots
.iter()
.map(|s| s.kinetic_energy)
.fold(f64::NEG_INFINITY, f64::max);
let min_ke = self
.snapshots
.iter()
.map(|s| s.kinetic_energy)
.fold(f64::INFINITY, f64::min);
let mean_step_ms = self.snapshots.iter().map(|s| s.step_ms).sum::<f64>() / n as f64;
let max_step_ms = self
.snapshots
.iter()
.map(|s| s.step_ms)
.fold(f64::NEG_INFINITY, f64::max);
PyAggregateMetrics {
mean_ke,
mean_pe,
max_ke,
min_ke,
mean_step_ms,
max_step_ms,
sample_count: n,
}
}
pub fn latest(&self) -> Option<&PySnapshot> {
self.snapshots.last()
}
pub fn is_empty(&self) -> bool {
self.snapshots.is_empty()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyAggregateMetrics {
pub mean_ke: f64,
pub mean_pe: f64,
pub max_ke: f64,
pub min_ke: f64,
pub mean_step_ms: f64,
pub max_step_ms: f64,
pub sample_count: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyEnergyTracker {
pub history: Vec<(f64, f64, f64, f64)>,
pub capacity: usize,
pub anomaly_threshold: f64,
pub anomalies: Vec<(usize, f64)>,
}
impl PyEnergyTracker {
pub fn new(capacity: usize, anomaly_threshold: f64) -> Self {
Self {
history: Vec::new(),
capacity,
anomaly_threshold,
anomalies: Vec::new(),
}
}
pub fn record(&mut self, time: f64, ke: f64, pe: f64) {
let total = ke + pe;
if let Some(&(_t0, _ke0, _pe0, prev_total)) = self.history.last()
&& prev_total.abs() > 1e-10
{
let delta = (total - prev_total).abs() / prev_total.abs();
if delta > self.anomaly_threshold {
self.anomalies
.push((self.history.len(), total - prev_total));
}
}
if self.capacity > 0 && self.history.len() >= self.capacity {
self.history.remove(0);
}
self.history.push((time, ke, pe, total));
}
pub fn mean_ke(&self) -> f64 {
if self.history.is_empty() {
return 0.0;
}
self.history.iter().map(|&(_, ke, _, _)| ke).sum::<f64>() / self.history.len() as f64
}
pub fn mean_total(&self) -> f64 {
if self.history.is_empty() {
return 0.0;
}
self.history
.iter()
.map(|&(_, _, _, total)| total)
.sum::<f64>()
/ self.history.len() as f64
}
pub fn latest_total(&self) -> f64 {
self.history.last().map_or(0.0, |&(_, _, _, t)| t)
}
pub fn has_anomalies(&self) -> bool {
!self.anomalies.is_empty()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyMomentumTracker {
pub history: Vec<(f64, f64, f64)>,
pub initial_linear: f64,
pub initial_angular: f64,
pub tolerance: f64,
}
impl PyMomentumTracker {
pub fn new(tolerance: f64) -> Self {
Self {
tolerance,
..Default::default()
}
}
pub fn record(&mut self, time: f64, linear_mag: f64, angular_mag: f64) {
if self.history.is_empty() {
self.initial_linear = linear_mag;
self.initial_angular = angular_mag;
}
self.history.push((time, linear_mag, angular_mag));
}
pub fn linear_conserved(&self) -> bool {
if self.history.is_empty() {
return true;
}
self.history.iter().all(|&(_, lm, _)| {
(lm - self.initial_linear).abs() <= self.tolerance * (self.initial_linear.abs() + 1.0)
})
}
pub fn angular_conserved(&self) -> bool {
if self.history.is_empty() {
return true;
}
self.history.iter().all(|&(_, _, am)| {
(am - self.initial_angular).abs() <= self.tolerance * (self.initial_angular.abs() + 1.0)
})
}
pub fn max_linear_deviation(&self) -> f64 {
self.history
.iter()
.map(|&(_, lm, _)| (lm - self.initial_linear).abs())
.fold(0.0_f64, f64::max)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyCollisionStats {
pub total_collisions: u64,
pub pair_counts: Vec<(u32, u32, u64)>,
pub impulse_histogram: Vec<u64>,
pub max_impulse: f64,
pub depth_min: f64,
pub depth_max: f64,
pub depth_mean: f64,
depth_sum: f64,
depth_count: u64,
}
impl PyCollisionStats {
pub fn new(n_bins: usize, max_impulse: f64) -> Self {
Self {
max_impulse,
impulse_histogram: vec![0; n_bins],
depth_min: f64::INFINITY,
depth_max: f64::NEG_INFINITY,
..Default::default()
}
}
pub fn record_collision(&mut self, body_a: u32, body_b: u32, impulse: f64, depth: f64) {
self.total_collisions += 1;
let pair = self
.pair_counts
.iter_mut()
.find(|(a, b, _)| (*a == body_a && *b == body_b) || (*a == body_b && *b == body_a));
match pair {
Some((_, _, cnt)) => *cnt += 1,
None => self.pair_counts.push((body_a, body_b, 1)),
}
let n = self.impulse_histogram.len();
if n > 0 && self.max_impulse > 0.0 {
let bin = ((impulse / self.max_impulse) * n as f64) as usize;
let bin = bin.min(n - 1);
self.impulse_histogram[bin] += 1;
}
if depth < self.depth_min {
self.depth_min = depth;
}
if depth > self.depth_max {
self.depth_max = depth;
}
self.depth_sum += depth;
self.depth_count += 1;
self.depth_mean = self.depth_sum / self.depth_count as f64;
}
pub fn reset(&mut self) {
self.total_collisions = 0;
self.pair_counts.clear();
for b in &mut self.impulse_histogram {
*b = 0;
}
self.depth_min = f64::INFINITY;
self.depth_max = f64::NEG_INFINITY;
self.depth_mean = 0.0;
self.depth_sum = 0.0;
self.depth_count = 0;
}
pub fn most_collided_pair(&self) -> Option<(u32, u32, u64)> {
self.pair_counts.iter().max_by_key(|&&(_, _, c)| c).copied()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PySleepStats {
pub total_wake_events: u64,
pub total_sleep_events: u64,
pub sleeping_fraction_history: Vec<(f64, f64)>,
pub capacity: usize,
}
impl PySleepStats {
pub fn new(capacity: usize) -> Self {
Self {
capacity,
..Default::default()
}
}
pub fn record(&mut self, time: f64, sleeping: usize, total: usize, wakes: u64, sleeps: u64) {
self.total_wake_events += wakes;
self.total_sleep_events += sleeps;
let frac = if total == 0 {
0.0
} else {
sleeping as f64 / total as f64
};
if self.capacity > 0 && self.sleeping_fraction_history.len() >= self.capacity {
self.sleeping_fraction_history.remove(0);
}
self.sleeping_fraction_history.push((time, frac));
}
pub fn mean_sleeping_fraction(&self) -> f64 {
if self.sleeping_fraction_history.is_empty() {
return 0.0;
}
self.sleeping_fraction_history
.iter()
.map(|&(_, f)| f)
.sum::<f64>()
/ self.sleeping_fraction_history.len() as f64
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyConstraintStats {
pub mean_residual: f64,
pub max_residual: f64,
pub mean_iterations: f64,
pub warm_start_rate: f64,
sample_count: u64,
residual_sum: f64,
iterations_sum: f64,
}
impl PyConstraintStats {
pub fn new() -> Self {
Self::default()
}
pub fn record(&mut self, residual: f64, iterations: u32, warm_start_fraction: f64) {
self.sample_count += 1;
self.residual_sum += residual;
self.iterations_sum += iterations as f64;
self.mean_residual = self.residual_sum / self.sample_count as f64;
self.mean_iterations = self.iterations_sum / self.sample_count as f64;
self.warm_start_rate = warm_start_fraction; if residual > self.max_residual {
self.max_residual = residual;
}
}
pub fn is_converging(&self, threshold: f64) -> bool {
self.mean_residual < threshold
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyStepTimings {
pub broad_ms: f64,
pub narrow_ms: f64,
pub solver_ms: f64,
pub integration_ms: f64,
pub total_ms: f64,
}
impl PyStepTimings {
pub fn compute_total(&mut self) {
self.total_ms = self.broad_ms + self.narrow_ms + self.solver_ms + self.integration_ms;
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyPerformanceTracker {
pub history: Vec<PyStepTimings>,
pub capacity: usize,
}
impl PyPerformanceTracker {
pub fn new(capacity: usize) -> Self {
Self {
capacity,
history: Vec::new(),
}
}
pub fn record(&mut self, mut timings: PyStepTimings) {
timings.compute_total();
if self.capacity > 0 && self.history.len() >= self.capacity {
self.history.remove(0);
}
self.history.push(timings);
}
pub fn mean_total_ms(&self) -> f64 {
if self.history.is_empty() {
return 0.0;
}
self.history.iter().map(|t| t.total_ms).sum::<f64>() / self.history.len() as f64
}
pub fn peak_total_ms(&self) -> f64 {
self.history
.iter()
.map(|t| t.total_ms)
.fold(0.0_f64, f64::max)
}
pub fn export_csv(&self) -> String {
let mut out = String::from("broad_ms,narrow_ms,solver_ms,integration_ms,total_ms\n");
for t in &self.history {
out.push_str(&format!(
"{:.4},{:.4},{:.4},{:.4},{:.4}\n",
t.broad_ms, t.narrow_ms, t.solver_ms, t.integration_ms, t.total_ms
));
}
out
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PySimulationReport {
pub total_steps: u64,
pub total_time: f64,
pub energy: PyEnergyTracker,
pub momentum: PyMomentumTracker,
pub collision: PyCollisionStats,
pub sleep: PySleepStats,
pub constraint: PyConstraintStats,
pub performance: PyPerformanceTracker,
pub anomaly_list: Vec<String>,
}
impl PySimulationReport {
pub fn new() -> Self {
Self {
energy: PyEnergyTracker::new(1000, 0.5),
momentum: PyMomentumTracker::new(0.01),
collision: PyCollisionStats::new(32, 1000.0),
sleep: PySleepStats::new(1000),
constraint: PyConstraintStats::new(),
performance: PyPerformanceTracker::new(1000),
..Default::default()
}
}
pub fn as_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
pub fn flag_anomaly(&mut self, msg: impl Into<String>) {
self.anomaly_list.push(msg.into());
}
pub fn check_energy(&mut self) {
if self.energy.has_anomalies() {
self.flag_anomaly(format!(
"Energy anomalies detected: {} jumps",
self.energy.anomalies.len()
));
}
}
pub fn check_momentum(&mut self) {
if !self.momentum.linear_conserved() {
self.flag_anomaly(format!(
"Linear momentum not conserved; max deviation = {:.4}",
self.momentum.max_linear_deviation()
));
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyBenchmark {
pub steps_run: u64,
pub total_ms: f64,
pub throughput: f64,
pub body_count: usize,
pub constraint_count: usize,
}
impl PyBenchmark {
pub fn new(body_count: usize, constraint_count: usize) -> Self {
Self {
body_count,
constraint_count,
..Default::default()
}
}
pub fn run(&mut self, n_steps: u64, step_cost_ms: f64) {
self.steps_run = n_steps;
let scale = 1.0 + (self.body_count + self.constraint_count) as f64 / 1000.0;
self.total_ms = n_steps as f64 * step_cost_ms * scale;
if self.total_ms > 0.0 {
self.throughput = self.steps_run as f64 / (self.total_ms / 1000.0);
}
}
pub fn steps_per_second(&self) -> f64 {
self.throughput
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PyDataExporter {
rows: Vec<(f64, u32, f64, f64, f64, f64, f64, f64, f64)>,
}
impl PyDataExporter {
pub fn new() -> Self {
Self::default()
}
#[allow(clippy::too_many_arguments)]
pub fn record(
&mut self,
time: f64,
body_id: u32,
position: [f64; 3],
velocity: [f64; 3],
ke: f64,
) {
self.rows.push((
time,
body_id,
position[0],
position[1],
position[2],
velocity[0],
velocity[1],
velocity[2],
ke,
));
}
pub fn to_csv(&self) -> String {
let mut out = String::from("time,body_id,px,py,pz,vx,vy,vz,ke\n");
for &(t, id, px, py, pz, vx, vy, vz, ke) in &self.rows {
out.push_str(&format!(
"{t:.6},{id},{px:.6},{py:.6},{pz:.6},{vx:.6},{vy:.6},{vz:.6},{ke:.6}\n"
));
}
out
}
pub fn to_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(&self.rows)
}
pub fn row_count(&self) -> usize {
self.rows.len()
}
pub fn clear(&mut self) {
self.rows.clear();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_snapshot_default() {
let s = PySnapshot::default();
assert_eq!(s.time, 0.0);
assert_eq!(s.kinetic_energy, 0.0);
}
#[test]
fn test_snapshot_compute_total() {
let mut s = PySnapshot {
kinetic_energy: 3.0,
potential_energy: 2.0,
..Default::default()
};
s.compute_total();
assert!((s.total_energy - 5.0).abs() < 1e-12);
}
#[test]
fn test_analytics_snapshot() {
let mut a = PyPhysicsAnalytics::new(10);
for i in 0..5 {
let s = PySnapshot {
time: i as f64,
kinetic_energy: i as f64,
..Default::default()
};
a.snapshot(s);
}
assert_eq!(a.snapshots.len(), 5);
}
#[test]
fn test_analytics_evicts_old() {
let mut a = PyPhysicsAnalytics::new(3);
for i in 0..5 {
let s = PySnapshot {
kinetic_energy: i as f64,
..Default::default()
};
a.snapshot(s);
}
assert_eq!(a.snapshots.len(), 3);
}
#[test]
fn test_analytics_metrics() {
let mut a = PyPhysicsAnalytics::new(100);
for i in 1..=4 {
let s = PySnapshot {
kinetic_energy: i as f64,
..Default::default()
};
a.snapshot(s);
}
let m = a.compute_metrics();
assert!((m.mean_ke - 2.5).abs() < 1e-10);
assert!((m.max_ke - 4.0).abs() < 1e-10);
}
#[test]
fn test_analytics_is_empty() {
let a = PyPhysicsAnalytics::new(10);
assert!(a.is_empty());
}
#[test]
fn test_energy_tracker_record() {
let mut t = PyEnergyTracker::new(100, 0.5);
t.record(0.0, 10.0, 5.0);
t.record(0.1, 11.0, 5.0);
assert_eq!(t.history.len(), 2);
}
#[test]
fn test_energy_tracker_anomaly_detect() {
let mut t = PyEnergyTracker::new(100, 0.1);
t.record(0.0, 10.0, 0.0);
t.record(0.1, 1000.0, 0.0); assert!(t.has_anomalies());
}
#[test]
fn test_energy_tracker_no_anomaly() {
let mut t = PyEnergyTracker::new(100, 0.5);
t.record(0.0, 10.0, 0.0);
t.record(0.1, 10.1, 0.0);
assert!(!t.has_anomalies());
}
#[test]
fn test_energy_tracker_mean_ke() {
let mut t = PyEnergyTracker::new(100, 1.0);
t.record(0.0, 4.0, 0.0);
t.record(0.1, 6.0, 0.0);
assert!((t.mean_ke() - 5.0).abs() < 1e-10);
}
#[test]
fn test_energy_tracker_eviction() {
let mut t = PyEnergyTracker::new(2, 10.0);
t.record(0.0, 1.0, 0.0);
t.record(0.1, 2.0, 0.0);
t.record(0.2, 3.0, 0.0);
assert_eq!(t.history.len(), 2);
}
#[test]
fn test_momentum_tracker_conserved() {
let mut t = PyMomentumTracker::new(0.01);
t.record(0.0, 5.0, 2.0);
t.record(0.1, 5.001, 2.001);
assert!(t.linear_conserved());
}
#[test]
fn test_momentum_tracker_not_conserved() {
let mut t = PyMomentumTracker::new(0.001);
t.record(0.0, 5.0, 2.0);
t.record(0.1, 6.0, 2.0); assert!(!t.linear_conserved());
}
#[test]
fn test_momentum_tracker_deviation() {
let mut t = PyMomentumTracker::new(0.5);
t.record(0.0, 10.0, 0.0);
t.record(0.1, 10.5, 0.0);
assert!((t.max_linear_deviation() - 0.5).abs() < 1e-10);
}
#[test]
fn test_collision_stats_record() {
let mut s = PyCollisionStats::new(10, 100.0);
s.record_collision(0, 1, 50.0, 0.01);
assert_eq!(s.total_collisions, 1);
}
#[test]
fn test_collision_stats_pair_count() {
let mut s = PyCollisionStats::new(10, 100.0);
s.record_collision(0, 1, 10.0, 0.01);
s.record_collision(0, 1, 20.0, 0.02);
let pair = s.most_collided_pair().unwrap();
assert_eq!(pair.2, 2);
}
#[test]
fn test_collision_stats_depth() {
let mut s = PyCollisionStats::new(10, 100.0);
s.record_collision(0, 1, 10.0, 0.05);
s.record_collision(2, 3, 10.0, 0.10);
assert!((s.depth_mean - 0.075).abs() < 1e-10);
}
#[test]
fn test_collision_stats_reset() {
let mut s = PyCollisionStats::new(10, 100.0);
s.record_collision(0, 1, 10.0, 0.01);
s.reset();
assert_eq!(s.total_collisions, 0);
}
#[test]
fn test_sleep_stats_record() {
let mut s = PySleepStats::new(100);
s.record(0.0, 3, 10, 0, 3);
s.record(0.1, 5, 10, 2, 2);
assert_eq!(s.total_wake_events, 2);
}
#[test]
fn test_sleep_stats_mean_fraction() {
let mut s = PySleepStats::new(100);
s.record(0.0, 5, 10, 0, 0);
s.record(0.1, 3, 10, 0, 0);
assert!((s.mean_sleeping_fraction() - 0.4).abs() < 1e-10);
}
#[test]
fn test_constraint_stats_record() {
let mut s = PyConstraintStats::new();
s.record(0.001, 5, 0.8);
s.record(0.002, 7, 0.9);
assert!((s.mean_iterations - 6.0).abs() < 1e-10);
}
#[test]
fn test_constraint_stats_converging() {
let mut s = PyConstraintStats::new();
s.record(0.0001, 3, 1.0);
assert!(s.is_converging(0.01));
}
#[test]
fn test_perf_tracker_mean() {
let mut t = PyPerformanceTracker::new(100);
t.record(PyStepTimings {
broad_ms: 1.0,
narrow_ms: 2.0,
solver_ms: 3.0,
integration_ms: 0.5,
total_ms: 0.0,
});
t.record(PyStepTimings {
broad_ms: 2.0,
narrow_ms: 1.0,
solver_ms: 2.0,
integration_ms: 0.5,
total_ms: 0.0,
});
assert!((t.mean_total_ms() - 6.0).abs() < 1e-10);
}
#[test]
fn test_perf_tracker_csv() {
let mut t = PyPerformanceTracker::new(10);
t.record(PyStepTimings {
broad_ms: 1.0,
narrow_ms: 1.0,
solver_ms: 1.0,
integration_ms: 1.0,
total_ms: 0.0,
});
let csv = t.export_csv();
assert!(csv.contains("broad_ms"));
}
#[test]
fn test_report_as_json() {
let mut r = PySimulationReport::new();
r.total_steps = 100;
let json = r.as_json().unwrap();
assert!(json.contains("total_steps"));
}
#[test]
fn test_report_anomaly() {
let mut r = PySimulationReport::new();
r.energy.record(0.0, 10.0, 0.0);
r.energy.record(0.1, 1000.0, 0.0); r.check_energy();
assert!(!r.anomaly_list.is_empty());
}
#[test]
fn test_benchmark_run() {
let mut b = PyBenchmark::new(100, 50);
b.run(1000, 1.0);
assert_eq!(b.steps_run, 1000);
assert!(b.throughput > 0.0);
}
#[test]
fn test_benchmark_throughput() {
let mut b = PyBenchmark::new(0, 0);
b.run(1000, 1.0); assert!((b.steps_per_second() - 1000.0).abs() < 1.0);
}
#[test]
fn test_exporter_csv() {
let mut e = PyDataExporter::new();
e.record(0.0, 1, [0.0, 1.0, 2.0], [0.1, 0.2, 0.3], 5.0);
let csv = e.to_csv();
assert!(csv.contains("time,body_id"));
assert!(csv.contains("0.000000"));
}
#[test]
fn test_exporter_json() {
let mut e = PyDataExporter::new();
e.record(0.0, 1, [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0);
let json = e.to_json().unwrap();
assert!(json.contains("["));
}
#[test]
fn test_exporter_row_count() {
let mut e = PyDataExporter::new();
for i in 0..5 {
e.record(i as f64, i as u32, [0.0; 3], [0.0; 3], 0.0);
}
assert_eq!(e.row_count(), 5);
}
#[test]
fn test_exporter_clear() {
let mut e = PyDataExporter::new();
e.record(0.0, 0, [0.0; 3], [0.0; 3], 0.0);
e.clear();
assert_eq!(e.row_count(), 0);
}
}