const BINS: usize = 16;
pub struct ComplexityEstimator<const W: usize> {
bin_history: [u8; W],
histogram: [u16; BINS],
head: usize,
count: usize,
bin_width: f32,
max_val: f32,
}
impl<const W: usize> ComplexityEstimator<W> {
pub fn new(max_val: f32) -> Self {
let max_val = if max_val > 0.0 { max_val } else { 1.0 };
Self {
bin_history: [0; W],
histogram: [0; BINS],
head: 0,
count: 0,
bin_width: max_val / BINS as f32,
max_val,
}
}
pub fn push(&mut self, norm: f32) -> ComplexityResult {
let bin = self.quantize(norm);
if self.count >= W {
let old_bin = self.bin_history[self.head] as usize;
if old_bin < BINS && self.histogram[old_bin] > 0 {
self.histogram[old_bin] -= 1;
}
}
self.bin_history[self.head] = bin as u8;
if bin < BINS {
self.histogram[bin] = self.histogram[bin].saturating_add(1);
}
self.head = (self.head + 1) % W;
if self.count < W { self.count += 1; }
let entropy = self.shannon_entropy();
let max_entropy = log2_f32(BINS as f32);
let normalized = if max_entropy > 0.0 { entropy / max_entropy } else { 0.0 };
ComplexityResult {
entropy,
normalized_complexity: normalized,
regime: ComplexityRegime::from_score(normalized),
}
}
#[inline]
fn quantize(&self, norm: f32) -> usize {
if norm <= 0.0 { return 0; }
if norm >= self.max_val { return BINS - 1; }
let bin = (norm / self.bin_width) as usize;
bin.min(BINS - 1)
}
fn shannon_entropy(&self) -> f32 {
if self.count == 0 { return 0.0; }
let n = self.count as f32;
let mut h = 0.0_f32;
for i in 0..BINS {
let c = self.histogram[i] as f32;
if c > 0.0 {
let p = c / n;
h -= p * log2_f32(p);
}
}
h
}
pub fn reset(&mut self) {
self.bin_history = [0; W];
self.histogram = [0; BINS];
self.head = 0;
self.count = 0;
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct ComplexityResult {
pub entropy: f32,
pub normalized_complexity: f32,
pub regime: ComplexityRegime,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum ComplexityRegime {
LowComplexity,
TransitionalComplexity,
HighComplexity,
}
impl ComplexityRegime {
pub fn from_score(score: f32) -> Self {
if score < 0.3 {
ComplexityRegime::LowComplexity
} else if score < 0.7 {
ComplexityRegime::TransitionalComplexity
} else {
ComplexityRegime::HighComplexity
}
}
}
#[inline]
fn log2_f32(x: f32) -> f32 {
if x <= 0.0 { return -30.0; }
let bits = x.to_bits();
let exponent = ((bits >> 23) & 0xFF) as i32 - 127;
let mantissa_bits = (bits & 0x007F_FFFF) | 0x3F80_0000;
let m = f32::from_bits(mantissa_bits); let t = m - 1.0;
let log2_m = t * (core::f32::consts::LOG2_E + t * (-0.72135 + t * 0.48090));
log2_m + exponent as f32
}
#[inline]
pub fn ordinal_pattern_3(a: f32, b: f32, c: f32) -> usize {
if a <= b {
if b <= c { 0 } else if a <= c { 1 } else { 2 } } else { if a <= c { 3 } else if b <= c { 4 } else { 5 } }
}
pub struct PermutationEntropyEstimator<const W: usize> {
buf: [f32; W],
head: usize,
count: usize,
}
impl<const W: usize> PermutationEntropyEstimator<W> {
pub const fn new() -> Self {
Self { buf: [0.0; W], head: 0, count: 0 }
}
pub fn push(&mut self, norm: f32) -> PermEntropyResult {
self.buf[self.head] = norm;
self.head = (self.head + 1) % W;
if self.count < W { self.count += 1; }
self.compute()
}
pub fn compute(&self) -> PermEntropyResult {
let n = self.count.min(W);
if n < 3 {
return PermEntropyResult {
normalized_pe: 0.0,
n_patterns: 0,
regime: PermEntropyRegime::Insufficient,
};
}
let mut counts = [0u32; 6];
let mut total = 0u32;
let start = if self.count < W { 0 } else { self.head };
for i in 0..n.saturating_sub(2) {
let i0 = (start + i) % W;
let i1 = (start + i + 1) % W;
let i2 = (start + i + 2) % W;
let pat = ordinal_pattern_3(self.buf[i0], self.buf[i1], self.buf[i2]);
counts[pat] += 1;
total += 1;
}
if total == 0 {
return PermEntropyResult { normalized_pe: 0.0, n_patterns: 0,
regime: PermEntropyRegime::Insufficient };
}
let mut h = 0.0_f32;
for &c in &counts {
if c > 0 {
let p = c as f32 / total as f32;
h -= p * log2_f32(p);
}
}
let max_h = log2_f32(6.0); let npe = if max_h > 0.0 { h / max_h } else { 0.0 };
PermEntropyResult {
normalized_pe: npe,
n_patterns: total,
regime: PermEntropyRegime::from_score(npe),
}
}
pub fn reset(&mut self) {
self.buf = [0.0; W];
self.head = 0;
self.count = 0;
}
}
impl<const W: usize> Default for PermutationEntropyEstimator<W> {
fn default() -> Self { Self::new() }
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct PermEntropyResult {
pub normalized_pe: f32,
pub n_patterns: u32,
pub regime: PermEntropyRegime,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PermEntropyRegime {
Insufficient,
HiddenDeterminism,
PartiallyOrdered,
StochasticNoise,
}
impl PermEntropyRegime {
pub fn from_score(npe: f32) -> Self {
if npe < 0.70 { PermEntropyRegime::HiddenDeterminism }
else if npe < 0.92 { PermEntropyRegime::PartiallyOrdered }
else { PermEntropyRegime::StochasticNoise }
}
pub fn label(&self) -> &'static str {
match self {
PermEntropyRegime::Insufficient => "PE:insufficient",
PermEntropyRegime::HiddenDeterminism => "PE:hidden_det",
PermEntropyRegime::PartiallyOrdered => "PE:partial_ord",
PermEntropyRegime::StochasticNoise => "PE:stochastic",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn log2_known_values() {
assert!((log2_f32(1.0)).abs() < 0.01, "log2(1)={}", log2_f32(1.0));
assert!((log2_f32(2.0) - 1.0).abs() < 0.05, "log2(2)={}", log2_f32(2.0));
assert!((log2_f32(4.0) - 2.0).abs() < 0.1, "log2(4)={}", log2_f32(4.0));
assert!((log2_f32(0.5) - (-1.0)).abs() < 0.05, "log2(0.5)={}", log2_f32(0.5));
}
#[test]
fn constant_signal_low_complexity() {
let mut est = ComplexityEstimator::<20>::new(1.0);
let mut last = ComplexityResult { entropy: 0.0, normalized_complexity: 0.0, regime: ComplexityRegime::LowComplexity };
for _ in 0..20 {
last = est.push(0.05); }
assert!(last.normalized_complexity < 0.1,
"constant signal must be low complexity: {}", last.normalized_complexity);
assert_eq!(last.regime, ComplexityRegime::LowComplexity);
}
#[test]
fn spread_signal_high_complexity() {
let mut est = ComplexityEstimator::<32>::new(1.0);
let mut last = ComplexityResult { entropy: 0.0, normalized_complexity: 0.0, regime: ComplexityRegime::LowComplexity };
for i in 0..32 {
let norm = (i as f32 / 32.0) * 0.99;
last = est.push(norm);
}
assert!(last.normalized_complexity > 0.5,
"spread signal must be high complexity: {}", last.normalized_complexity);
}
#[test]
fn complexity_rises_during_regime_change() {
let mut est = ComplexityEstimator::<10>::new(1.0);
for _ in 0..10 {
est.push(0.05);
}
let baseline = est.push(0.05);
for i in 0..10 {
est.push(0.05 + i as f32 * 0.08);
}
let after = est.push(0.5);
assert!(after.normalized_complexity > baseline.normalized_complexity,
"complexity must rise during regime change: {} -> {}",
baseline.normalized_complexity, after.normalized_complexity);
}
#[test]
fn reset_clears() {
let mut est = ComplexityEstimator::<10>::new(1.0);
for _ in 0..10 { est.push(0.5); }
est.reset();
let r = est.push(0.5);
assert!(r.entropy < 0.1, "after reset, single observation should have near-zero entropy");
}
#[test]
fn ordinal_pattern_rising() {
assert_eq!(ordinal_pattern_3(1.0, 2.0, 3.0), 0, "rising: 012");
}
#[test]
fn ordinal_pattern_falling() {
assert_eq!(ordinal_pattern_3(3.0, 2.0, 1.0), 5, "falling: 210");
}
#[test]
fn ordinal_pattern_all_six() {
let patterns = [
ordinal_pattern_3(1.0, 2.0, 3.0), ordinal_pattern_3(1.0, 3.0, 2.0), ordinal_pattern_3(1.0, 3.0, 2.0), ordinal_pattern_3(2.0, 3.0, 1.0), ordinal_pattern_3(2.0, 1.0, 3.0), ordinal_pattern_3(3.0, 1.0, 2.0), ordinal_pattern_3(3.0, 2.0, 1.0), ];
assert_eq!(patterns[0], 0);
assert_eq!(patterns[6], 5);
}
#[test]
fn pe_strict_periodic_is_low() {
let mut pe = PermutationEntropyEstimator::<12>::new();
for _ in 0..4 {
pe.push(0.1);
pe.push(0.2);
pe.push(0.3);
}
let r = pe.compute();
assert!(r.normalized_pe < 0.70,
"period-3 signal must be in HiddenDeterminism: NPE={}", r.normalized_pe);
assert_eq!(r.regime, PermEntropyRegime::HiddenDeterminism);
}
#[test]
fn pe_shuffled_tends_high() {
let mut pe = PermutationEntropyEstimator::<24>::new();
let vals = [0.1f32, 0.3, 0.2, 0.5, 0.1, 0.4, 0.3, 0.1, 0.5, 0.2,
0.4, 0.1, 0.2, 0.5, 0.3, 0.2, 0.4, 0.1, 0.3, 0.5,
0.2, 0.3, 0.1, 0.4];
for &v in &vals { pe.push(v); }
let r = pe.compute();
assert!(r.normalized_pe > 0.5,
"shuffled sequence must be moderately complex: {}", r.normalized_pe);
}
#[test]
fn pe_insufficient_for_short_window() {
let mut pe = PermutationEntropyEstimator::<10>::new();
pe.push(0.1);
pe.push(0.2);
let r = pe.compute();
assert_eq!(r.regime, PermEntropyRegime::Insufficient);
}
#[test]
fn pe_reset_clears_state() {
let mut pe = PermutationEntropyEstimator::<10>::new();
for i in 0..10 { pe.push(i as f32 * 0.1); }
pe.reset();
let r = pe.compute();
assert_eq!(r.regime, PermEntropyRegime::Insufficient);
}
}