use crate::trade::Tick;
use smallvec::SmallVec;
use super::drawdown::compute_max_drawdown_and_runup;
use super::ith::{bear_ith, bull_ith};
use super::normalize::{
normalize_cv, normalize_drawdown, normalize_epochs, normalize_excess, normalize_runup,
};
use crate::normalization_lut::soft_clamp_hurst_lut;
#[derive(Debug, Clone)]
pub struct IntraBarConfig {
pub compute_hurst: bool,
pub compute_permutation_entropy: bool,
}
impl Default for IntraBarConfig {
fn default() -> Self {
Self {
compute_hurst: true,
compute_permutation_entropy: true,
}
}
}
const MAX_ENTROPY_M3: f64 = 1.791_759_469_228_327;
#[derive(Debug, Clone, Default)]
pub struct IntraBarFeatures {
pub intra_bull_epoch_density: Option<f64>,
pub intra_bear_epoch_density: Option<f64>,
pub intra_bull_excess_gain: Option<f64>,
pub intra_bear_excess_gain: Option<f64>,
pub intra_bull_cv: Option<f64>,
pub intra_bear_cv: Option<f64>,
pub intra_max_drawdown: Option<f64>,
pub intra_max_runup: Option<f64>,
pub intra_trade_count: Option<u32>,
pub intra_ofi: Option<f64>,
pub intra_duration_us: Option<i64>,
pub intra_intensity: Option<f64>,
pub intra_vwap_position: Option<f64>,
pub intra_count_imbalance: Option<f64>,
pub intra_kyle_lambda: Option<f64>,
pub intra_burstiness: Option<f64>,
pub intra_volume_skew: Option<f64>,
pub intra_volume_kurt: Option<f64>,
pub intra_kaufman_er: Option<f64>,
pub intra_garman_klass_vol: Option<f64>,
pub intra_hurst: Option<f64>,
pub intra_permutation_entropy: Option<f64>,
}
#[cold]
#[inline(never)]
fn intra_bar_zero_trades() -> IntraBarFeatures {
IntraBarFeatures {
intra_trade_count: Some(0),
..Default::default()
}
}
#[cold]
#[inline(never)]
fn intra_bar_single_trade() -> IntraBarFeatures {
IntraBarFeatures {
intra_trade_count: Some(1),
intra_duration_us: Some(0),
intra_intensity: Some(0.0),
intra_ofi: Some(0.0),
..Default::default()
}
}
#[cold]
#[inline(never)]
fn intra_bar_invalid_price(n: usize) -> IntraBarFeatures {
IntraBarFeatures {
intra_trade_count: Some(n as u32),
..Default::default()
}
}
#[inline]
pub fn compute_intra_bar_features(trades: &[Tick]) -> IntraBarFeatures {
let mut scratch_prices = SmallVec::<[f64; 64]>::new();
let mut scratch_volumes = SmallVec::<[f64; 64]>::new();
compute_intra_bar_features_with_scratch(trades, &mut scratch_prices, &mut scratch_volumes)
}
#[inline]
pub fn compute_intra_bar_features_with_scratch(
trades: &[Tick],
scratch_prices: &mut SmallVec<[f64; 64]>,
scratch_volumes: &mut SmallVec<[f64; 64]>,
) -> IntraBarFeatures {
compute_intra_bar_features_with_config(
trades,
scratch_prices,
scratch_volumes,
&IntraBarConfig::default(),
)
}
#[inline]
pub fn compute_intra_bar_features_with_config(
trades: &[Tick],
scratch_prices: &mut SmallVec<[f64; 64]>,
scratch_volumes: &mut SmallVec<[f64; 64]>,
config: &IntraBarConfig,
) -> IntraBarFeatures {
let n = trades.len();
if n == 0 {
return intra_bar_zero_trades();
}
if n == 1 {
return intra_bar_single_trade();
}
scratch_prices.clear();
scratch_prices.reserve(n);
for trade in trades {
scratch_prices.push(trade.price.to_f64());
}
let first_price = scratch_prices[0];
if first_price <= 0.0 || !first_price.is_finite() {
return intra_bar_invalid_price(n);
}
let inv_first_price = 1.0 / first_price;
scratch_volumes.clear();
scratch_volumes.reserve(n);
for &p in scratch_prices.iter() {
scratch_volumes.push(p * inv_first_price);
}
let normalized = scratch_volumes;
let (max_dd, max_ru) = compute_max_drawdown_and_runup(normalized);
let bull_result = bull_ith(normalized, max_dd);
let bear_result = bear_ith(normalized, max_ru);
let bull_excess_sum: f64 = bull_result.excess_gains.iter().sum();
let bear_excess_sum: f64 = bear_result.excess_gains.iter().sum();
let stats = compute_statistical_features(trades, scratch_prices);
let hurst = if n >= 64 && config.compute_hurst {
Some(compute_hurst_rescaled_range(normalized))
} else {
None
};
let pe = if n >= 60 && config.compute_permutation_entropy {
Some(compute_permutation_entropy(scratch_prices, 3))
} else {
None
};
IntraBarFeatures {
intra_bull_epoch_density: Some(normalize_epochs(bull_result.num_of_epochs, n)),
intra_bear_epoch_density: Some(normalize_epochs(bear_result.num_of_epochs, n)),
intra_bull_excess_gain: Some(normalize_excess(bull_excess_sum)),
intra_bear_excess_gain: Some(normalize_excess(bear_excess_sum)),
intra_bull_cv: Some(normalize_cv(bull_result.intervals_cv)),
intra_bear_cv: Some(normalize_cv(bear_result.intervals_cv)),
intra_max_drawdown: Some(normalize_drawdown(bull_result.max_drawdown)),
intra_max_runup: Some(normalize_runup(bear_result.max_runup)),
intra_trade_count: Some(n as u32),
intra_ofi: Some(stats.ofi),
intra_duration_us: Some(stats.duration_us),
intra_intensity: Some(stats.intensity),
intra_vwap_position: Some(stats.vwap_position),
intra_count_imbalance: Some(stats.count_imbalance),
intra_kyle_lambda: stats.kyle_lambda,
intra_burstiness: stats.burstiness,
intra_volume_skew: stats.volume_skew,
intra_volume_kurt: stats.volume_kurt,
intra_kaufman_er: stats.kaufman_er,
intra_garman_klass_vol: Some(stats.garman_klass_vol),
intra_hurst: hurst,
intra_permutation_entropy: pe,
}
}
struct StatisticalFeatures {
ofi: f64,
duration_us: i64,
intensity: f64,
vwap_position: f64,
count_imbalance: f64,
kyle_lambda: Option<f64>,
burstiness: Option<f64>,
volume_skew: Option<f64>,
volume_kurt: Option<f64>,
kaufman_er: Option<f64>,
garman_klass_vol: f64,
}
fn compute_statistical_features(trades: &[Tick], prices: &[f64]) -> StatisticalFeatures {
let n = trades.len();
let mut cached_volumes = SmallVec::<[f64; 128]>::with_capacity(n);
let mut buy_vol = 0.0_f64;
let mut sell_vol = 0.0_f64;
let mut buy_count = 0_u32;
let mut sell_count = 0_u32;
let mut total_turnover = 0.0_f64;
let mut sum_vol = 0.0_f64;
let mut high = f64::NEG_INFINITY;
let mut low = f64::INFINITY;
for trade in trades {
let vol = trade.volume.to_f64(); cached_volumes.push(vol); let price = prices[cached_volumes.len() - 1];
total_turnover += price * vol;
sum_vol += vol;
if trade.is_buyer_maker {
sell_vol += vol;
sell_count += trade.individual_trade_count() as u32;
} else {
buy_vol += vol;
buy_count += trade.individual_trade_count() as u32;
}
high = high.max(price);
low = low.min(price);
}
let vol_count = n;
let mean_vol = if vol_count > 0 {
sum_vol / vol_count as f64
} else {
0.0
};
let mut m2_vol = 0.0_f64; let mut m3_vol = 0.0_f64; let mut m4_vol = 0.0_f64;
for &vol in &cached_volumes {
let d = vol - mean_vol;
let d2 = d * d;
let d3 = d2 * d;
let d4 = d2 * d2;
m2_vol += d2;
m3_vol += d3;
m4_vol += d4;
}
let total_vol = buy_vol + sell_vol;
let total_count = (buy_count + sell_count) as f64;
let ofi = if total_vol > f64::EPSILON {
(buy_vol - sell_vol) / total_vol
} else {
0.0
};
let first_ts = trades.first().map(|t| t.timestamp).unwrap_or(0);
let last_ts = trades.last().map(|t| t.timestamp).unwrap_or(0);
let duration_us = last_ts - first_ts;
let duration_sec = duration_us as f64 * 1e-6;
let intensity = if duration_sec > f64::EPSILON {
n as f64 / duration_sec
} else {
n as f64 };
let vwap = if total_vol > f64::EPSILON {
total_turnover / total_vol
} else {
prices.first().copied().unwrap_or(0.0)
};
let range = high - low;
let vwap_position = if range > f64::EPSILON {
((vwap - low) / range).clamp(0.0, 1.0)
} else {
0.5
};
let count_imbalance = if total_count > f64::EPSILON {
(buy_count as f64 - sell_count as f64) / total_count
} else {
0.0
};
let kyle_lambda = if n >= 2 && total_vol > f64::EPSILON {
let first_price = prices[0];
let last_price = prices[n - 1];
let price_return = if first_price.abs() > f64::EPSILON {
(last_price - first_price) / first_price
} else {
0.0
};
let normalized_imbalance = (buy_vol - sell_vol) / total_vol;
if normalized_imbalance.abs() > f64::EPSILON {
Some(price_return / normalized_imbalance)
} else {
None
}
} else {
None
};
let burstiness = if n >= 3 {
let mut intervals = SmallVec::<[f64; 64]>::new();
for i in 0..n - 1 {
intervals.push((trades[i + 1].timestamp - trades[i].timestamp) as f64);
}
if intervals.len() >= 2 {
let inv_len = 1.0 / intervals.len() as f64;
let mean_tau: f64 = intervals.iter().sum::<f64>() * inv_len;
let variance: f64 = intervals
.iter()
.map(|&x| {
let d = x - mean_tau;
d * d })
.sum::<f64>()
* inv_len;
let std_tau = variance.sqrt();
if std_tau <= f64::EPSILON {
None } else if (std_tau + mean_tau).abs() > f64::EPSILON {
Some((std_tau - mean_tau) / (std_tau + mean_tau))
} else {
None
}
} else {
None
}
} else {
None
};
let (volume_skew, volume_kurt) = if n >= 3 {
let inv_n = 1.0 / n as f64;
let m2_norm = m2_vol * inv_n;
let m3_norm = m3_vol * inv_n;
let m4_norm = m4_vol * inv_n;
let std_v = m2_norm.sqrt();
if std_v > f64::EPSILON {
let std_v2 = std_v * std_v;
let std_v3 = std_v2 * std_v;
let std_v4 = std_v2 * std_v2;
(Some(m3_norm / std_v3), Some(m4_norm / std_v4 - 3.0))
} else {
(None, None)
}
} else {
(None, None)
};
let kaufman_er = if n >= 2 {
let net_move = (prices[n - 1] - prices[0]).abs();
let mut path_length = 0.0;
for i in 0..n - 1 {
path_length += (prices[i + 1] - prices[i]).abs();
}
if path_length > f64::EPSILON {
Some((net_move / path_length).clamp(0.0, 1.0))
} else {
Some(1.0) }
} else {
None
};
const GK_SCALE: f64 = 0.6137; let open = prices[0];
let close = prices[n - 1];
let garman_klass_vol = if high > low && high > 0.0 && open > 0.0 {
let hl_ratio = (high / low).ln();
let co_ratio = (close / open).ln();
let hl_sq = hl_ratio * hl_ratio;
let co_sq = co_ratio * co_ratio;
let gk_var = 0.5 * hl_sq - GK_SCALE * co_sq;
gk_var.max(0.0).sqrt()
} else {
0.0
};
StatisticalFeatures {
ofi,
duration_us,
intensity,
vwap_position,
count_imbalance,
kyle_lambda,
burstiness,
volume_skew,
volume_kurt,
kaufman_er,
garman_klass_vol,
}
}
fn compute_hurst_rescaled_range(prices: &[f64]) -> f64 {
let n = prices.len();
if n < 64 {
return 0.5; }
let mean: f64 = prices.iter().sum::<f64>() / n as f64;
let mut y = SmallVec::<[f64; 256]>::new();
let mut cumsum = 0.0;
for &p in prices {
cumsum += p - mean;
y.push(cumsum);
}
let min_scale = (n / 4).max(8);
let max_scale = n / 2;
let mut log_scales = SmallVec::<[f64; 12]>::new();
let mut log_fluctuations = SmallVec::<[f64; 12]>::new();
let mut scale = min_scale;
while scale <= max_scale {
let num_segments = n / scale;
if num_segments < 2 {
break;
}
let x_mean = (scale - 1) as f64 / 2.0;
let scale_f64 = scale as f64;
let inv_scale = 1.0 / scale_f64;
let xx_sum = scale_f64 * (scale_f64 * scale_f64 - 1.0) / 12.0;
let mut total_fluctuation = 0.0;
let mut segment_count = 0;
for seg in 0..num_segments {
let start = seg * scale;
let end = start + scale;
if end > n {
break;
}
let mut xy_sum = 0.0;
let mut y_sum = 0.0;
let mut sum_y_sq = 0.0;
for (i, &yi) in y[start..end].iter().enumerate() {
let delta_x = i as f64 - x_mean;
xy_sum += delta_x * yi;
y_sum += yi;
sum_y_sq += yi * yi;
}
let yy_sum = sum_y_sq - y_sum * y_sum * inv_scale;
let rms = if xx_sum > f64::EPSILON {
let rms_sq = yy_sum - xy_sum * xy_sum / xx_sum;
(rms_sq.max(0.0) * inv_scale).sqrt()
} else {
(yy_sum.max(0.0) * inv_scale).sqrt()
};
total_fluctuation += rms;
segment_count += 1;
}
if segment_count > 0 {
let avg_fluctuation = total_fluctuation / segment_count as f64;
if avg_fluctuation > f64::EPSILON {
log_scales.push((scale as f64).ln());
log_fluctuations.push(avg_fluctuation.ln());
}
}
scale = (scale as f64 * 1.5).ceil() as usize;
}
if log_scales.len() < 2 {
return 0.5;
}
let n_points = log_scales.len() as f64;
let inv_n_points = 1.0 / n_points;
let x_mean: f64 = log_scales.iter().sum::<f64>() * inv_n_points;
let y_mean: f64 = log_fluctuations.iter().sum::<f64>() * inv_n_points;
let mut xy_sum = 0.0;
let mut xx_sum = 0.0;
for (&x, &y) in log_scales.iter().zip(log_fluctuations.iter()) {
let dx = x - x_mean;
xy_sum += dx * (y - y_mean);
xx_sum += dx * dx;
}
let hurst = if xx_sum.abs() > f64::EPSILON {
xy_sum / xx_sum
} else {
0.5
};
soft_clamp_hurst_lut(hurst)
}
fn compute_permutation_entropy(prices: &[f64], embed_dim: usize) -> f64 {
let len = prices.len();
let required = factorial(embed_dim) + embed_dim - 1;
if len < required || embed_dim < 2 {
return 0.5; }
let max_patterns = factorial(embed_dim);
if max_patterns > 24 {
return fallback_permutation_entropy(prices, embed_dim);
}
let mut pattern_counts = [0usize; 24]; let num_patterns = len - embed_dim + 1;
if embed_dim == 3 {
for i in 0..num_patterns {
let (p0, p1, p2) = (prices[i], prices[i + 1], prices[i + 2]);
let idx = if p0 <= p1 {
if p1 <= p2 {
0
}
else if p0 <= p2 {
1
}
else {
4
} } else if p0 <= p2 {
2
}
else if p1 <= p2 {
3
}
else {
5
}; pattern_counts[idx] += 1;
}
} else {
let mut indices = SmallVec::<[usize; 4]>::new();
for i in 0..num_patterns {
let window = &prices[i..i + embed_dim];
let prices_ascending = window.windows(2).all(|w| w[0] <= w[1]);
if prices_ascending {
pattern_counts[0] += 1;
} else {
indices.clear();
for j in 0..embed_dim {
indices.push(j);
}
indices.sort_by(|&idx_a, &idx_b| {
window[idx_a]
.partial_cmp(&window[idx_b])
.unwrap_or(std::cmp::Ordering::Equal)
});
let pattern_idx = ordinal_indices_to_pattern_index(&indices);
pattern_counts[pattern_idx] += 1;
}
}
}
let inv_num_patterns = 1.0 / num_patterns as f64;
let mut entropy = 0.0;
for &count in &pattern_counts[..max_patterns] {
if count > 0 {
let p = count as f64 * inv_num_patterns;
entropy -= p * p.ln();
}
}
let max_entropy = if embed_dim == 3 {
MAX_ENTROPY_M3
} else {
(max_patterns as f64).ln()
};
if max_entropy > f64::EPSILON {
(entropy / max_entropy).clamp(0.0, 1.0)
} else {
0.5
}
}
#[inline]
fn ordinal_indices_to_pattern_index(indices: &smallvec::SmallVec<[usize; 4]>) -> usize {
match indices.len() {
2 => {
if indices[0] < indices[1] { 0 } else { 1 }
}
3 => {
let mut code = 0usize;
let factors = [2, 1, 1];
let lesser_0 = (indices[1] < indices[0]) as usize + (indices[2] < indices[0]) as usize;
code += lesser_0 * factors[0];
let lesser_1 = (indices[2] < indices[1]) as usize;
code += lesser_1 * factors[1];
code
}
4 => {
let mut code = 0usize;
let factors = [6, 2, 1, 1];
let lesser_0 = (indices[1] < indices[0]) as usize
+ (indices[2] < indices[0]) as usize
+ (indices[3] < indices[0]) as usize;
code += lesser_0 * factors[0];
let lesser_1 = (indices[2] < indices[1]) as usize + (indices[3] < indices[1]) as usize;
code += lesser_1 * factors[1];
let lesser_2 = (indices[3] < indices[2]) as usize;
code += lesser_2 * factors[2];
code
}
_ => 0, }
}
fn fallback_permutation_entropy(prices: &[f64], m: usize) -> f64 {
let n = prices.len();
let num_patterns = n - m + 1;
let mut pattern_counts = std::collections::HashMap::new();
for i in 0..num_patterns {
let window = &prices[i..i + m];
let mut indices: Vec<usize> = (0..m).collect();
indices.sort_by(|&a, &b| {
window[a]
.partial_cmp(&window[b])
.unwrap_or(std::cmp::Ordering::Equal)
});
let pattern_key: String = indices.iter().map(|&i| i.to_string()).collect();
*pattern_counts.entry(pattern_key).or_insert(0usize) += 1;
}
let inv_num_patterns = 1.0 / num_patterns as f64;
let mut entropy = 0.0;
for &count in pattern_counts.values() {
if count > 0 {
let p = count as f64 * inv_num_patterns;
entropy -= p * p.ln();
}
}
let max_entropy = if m == 3 {
MAX_ENTROPY_M3
} else {
(factorial(m) as f64).ln()
};
if max_entropy > f64::EPSILON {
(entropy / max_entropy).clamp(0.0, 1.0)
} else {
0.5
}
}
fn factorial(n: usize) -> usize {
(1..=n).product()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::fixed_point::FixedPoint;
fn create_test_trade(
price: f64,
volume: f64,
timestamp: i64,
is_buyer_maker: bool,
) -> Tick {
Tick {
ref_id: timestamp,
price: FixedPoint((price * 1e8) as i64),
volume: FixedPoint((volume * 1e8) as i64),
first_sub_id: timestamp,
last_sub_id: timestamp,
timestamp,
is_buyer_maker,
is_best_match: None,
best_bid: None,
best_ask: None,
}
}
#[test]
fn test_compute_intra_bar_features_empty() {
let features = compute_intra_bar_features(&[]);
assert_eq!(features.intra_trade_count, Some(0));
assert!(features.intra_bull_epoch_density.is_none());
}
#[test]
fn test_compute_intra_bar_features_single_trade() {
let trades = vec![create_test_trade(100.0, 1.0, 1000000, false)];
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(1));
assert!(features.intra_bull_epoch_density.is_none());
}
#[test]
fn test_compute_intra_bar_features_uptrend() {
let trades: Vec<Tick> = (0..10)
.map(|i| create_test_trade(100.0 + i as f64 * 0.5, 1.0, i * 1000000, false))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(10));
assert!(features.intra_bull_epoch_density.is_some());
assert!(features.intra_bear_epoch_density.is_some());
if let Some(dd) = features.intra_max_drawdown {
assert!(dd < 0.1, "Uptrend should have low drawdown: {}", dd);
}
}
#[test]
fn test_compute_intra_bar_features_downtrend() {
let trades: Vec<Tick> = (0..10)
.map(|i| create_test_trade(100.0 - i as f64 * 0.5, 1.0, i * 1000000, true))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(10));
if let Some(ru) = features.intra_max_runup {
assert!(ru < 0.1, "Downtrend should have low runup: {}", ru);
}
}
#[test]
fn test_ofi_calculation() {
let buy_trades: Vec<Tick> = (0..5)
.map(|i| create_test_trade(100.0, 1.0, i * 1000000, false))
.collect();
let features = compute_intra_bar_features(&buy_trades);
assert!(
features.intra_ofi.unwrap() > 0.9,
"All buys should have OFI near 1.0"
);
let sell_trades: Vec<Tick> = (0..5)
.map(|i| create_test_trade(100.0, 1.0, i * 1000000, true))
.collect();
let features = compute_intra_bar_features(&sell_trades);
assert!(
features.intra_ofi.unwrap() < -0.9,
"All sells should have OFI near -1.0"
);
}
#[test]
fn test_ith_features_bounded() {
let trades: Vec<Tick> = (0..50)
.map(|i| {
let price = 100.0 + ((i as f64 * 0.7).sin() * 2.0);
create_test_trade(price, 1.0, i * 1000000, i % 2 == 0)
})
.collect();
let features = compute_intra_bar_features(&trades);
if let Some(v) = features.intra_bull_epoch_density {
assert!(
v >= 0.0 && v <= 1.0,
"bull_epoch_density out of bounds: {}",
v
);
}
if let Some(v) = features.intra_bear_epoch_density {
assert!(
v >= 0.0 && v <= 1.0,
"bear_epoch_density out of bounds: {}",
v
);
}
if let Some(v) = features.intra_bull_excess_gain {
assert!(
v >= 0.0 && v <= 1.0,
"bull_excess_gain out of bounds: {}",
v
);
}
if let Some(v) = features.intra_bear_excess_gain {
assert!(
v >= 0.0 && v <= 1.0,
"bear_excess_gain out of bounds: {}",
v
);
}
if let Some(v) = features.intra_bull_cv {
assert!(v >= 0.0 && v <= 1.0, "bull_cv out of bounds: {}", v);
}
if let Some(v) = features.intra_bear_cv {
assert!(v >= 0.0 && v <= 1.0, "bear_cv out of bounds: {}", v);
}
if let Some(v) = features.intra_max_drawdown {
assert!(v >= 0.0 && v <= 1.0, "max_drawdown out of bounds: {}", v);
}
if let Some(v) = features.intra_max_runup {
assert!(v >= 0.0 && v <= 1.0, "max_runup out of bounds: {}", v);
}
}
#[test]
fn test_kaufman_er_bounds() {
let efficient_trades: Vec<Tick> = (0..10)
.map(|i| create_test_trade(100.0 + i as f64, 1.0, i * 1000000, false))
.collect();
let features = compute_intra_bar_features(&efficient_trades);
if let Some(er) = features.intra_kaufman_er {
assert!(
(er - 1.0).abs() < 0.01,
"Straight line should have ER near 1.0: {}",
er
);
}
}
#[test]
fn test_complexity_features_require_data() {
let small_trades: Vec<Tick> = (0..30)
.map(|i| create_test_trade(100.0, 1.0, i * 1000000, false))
.collect();
let features = compute_intra_bar_features(&small_trades);
assert!(features.intra_hurst.is_none());
assert!(features.intra_permutation_entropy.is_none());
let large_trades: Vec<Tick> = (0..70)
.map(|i| {
let price = 100.0 + ((i as f64 * 0.1).sin() * 2.0);
create_test_trade(price, 1.0, i * 1000000, false)
})
.collect();
let features = compute_intra_bar_features(&large_trades);
assert!(features.intra_hurst.is_some());
assert!(features.intra_permutation_entropy.is_some());
if let Some(h) = features.intra_hurst {
assert!(h >= 0.0 && h <= 1.0, "Hurst out of bounds: {}", h);
}
if let Some(pe) = features.intra_permutation_entropy {
assert!(
pe >= 0.0 && pe <= 1.0,
"Permutation entropy out of bounds: {}",
pe
);
}
}
#[test]
fn test_hurst_rescaled_range_all_identical_prices() {
let prices: Vec<f64> = vec![100.0; 70];
let h = compute_hurst_rescaled_range(&prices);
assert!(h.is_finite(), "Hurst should be finite for identical prices");
assert!(
(h - 0.5).abs() < 0.15,
"Hurst should be near 0.5 for flat prices: {}",
h
);
}
#[test]
fn test_hurst_rescaled_range_monotonic_ascending() {
let prices: Vec<f64> = (0..70).map(|i| 100.0 + i as f64 * 0.01).collect();
let h = compute_hurst_rescaled_range(&prices);
assert!(h >= 0.0 && h <= 1.0, "Hurst out of bounds: {}", h);
assert!(h > 0.5, "Trending series should have H > 0.5: {}", h);
}
#[test]
fn test_hurst_rescaled_range_mean_reverting() {
let prices: Vec<f64> = (0..70)
.map(|i| if i % 2 == 0 { 100.0 } else { 100.5 })
.collect();
let h = compute_hurst_rescaled_range(&prices);
assert!(h >= 0.0 && h <= 1.0, "Hurst out of bounds: {}", h);
assert!(
h < 0.55,
"Mean-reverting series should have H <= 0.5: {}",
h
);
}
#[test]
fn test_hurst_rescaled_range_exactly_64_trades() {
let prices: Vec<f64> = (0..64).map(|i| 100.0 + (i as f64 * 0.3).sin()).collect();
let h = compute_hurst_rescaled_range(&prices);
assert!(h >= 0.0 && h <= 1.0, "Hurst out of bounds at n=64: {}", h);
}
#[test]
fn test_hurst_rescaled_range_below_threshold() {
let prices: Vec<f64> = (0..63).map(|i| 100.0 + i as f64 * 0.01).collect();
let h = compute_hurst_rescaled_range(&prices);
assert!(
(h - 0.5).abs() < f64::EPSILON,
"Below threshold should return 0.5: {}",
h
);
}
#[test]
fn test_pe_monotonic_ascending() {
let prices: Vec<f64> = (0..60).map(|i| 100.0 + i as f64 * 0.01).collect();
let pe = compute_permutation_entropy(&prices, 3);
assert!(
(pe - 0.0).abs() < 0.01,
"Ascending series should have PE near 0: {}",
pe
);
}
#[test]
fn test_pe_monotonic_descending() {
let prices: Vec<f64> = (0..60).map(|i| 200.0 - i as f64 * 0.01).collect();
let pe = compute_permutation_entropy(&prices, 3);
assert!(
(pe - 0.0).abs() < 0.01,
"Descending series should have PE near 0: {}",
pe
);
}
#[test]
fn test_pe_all_identical_prices() {
let prices: Vec<f64> = vec![100.0; 60];
let pe = compute_permutation_entropy(&prices, 3);
assert!(
(pe - 0.0).abs() < 0.01,
"Identical prices should have PE near 0: {}",
pe
);
}
#[test]
fn test_pe_alternating_high_entropy() {
let prices: Vec<f64> = (0..70)
.map(|i| match i % 6 {
0 => 100.0,
1 => 102.0,
2 => 101.0,
3 => 103.0,
4 => 99.0,
5 => 101.5,
_ => unreachable!(),
})
.collect();
let pe = compute_permutation_entropy(&prices, 3);
assert!(pe > 0.5, "Diverse patterns should have high PE: {}", pe);
assert!(pe <= 1.0, "PE must be <= 1.0: {}", pe);
}
#[test]
fn test_pe_below_threshold() {
let prices: Vec<f64> = (0..7).map(|i| 100.0 + i as f64).collect();
let pe = compute_permutation_entropy(&prices, 3);
assert!(
(pe - 0.5).abs() < f64::EPSILON,
"Below threshold should return 0.5: {}",
pe
);
}
#[test]
fn test_pe_exactly_at_threshold() {
let prices: Vec<f64> = (0..8).map(|i| 100.0 + (i as f64 * 0.7).sin()).collect();
let pe = compute_permutation_entropy(&prices, 3);
assert!(
pe >= 0.0 && pe <= 1.0,
"PE at threshold should be valid: {}",
pe
);
}
#[test]
fn test_pe_decision_tree_all_six_patterns() {
let prices = vec![
1.0, 2.0, 3.0, 1.0, 3.0, 2.0, 2.0, 1.0, 3.0, 2.0, 3.0, 1.0, 2.0, 1.0, 3.0, ];
let pe = compute_permutation_entropy(&prices, 3);
assert!(
pe > 0.5,
"Sequence with diverse patterns should have high PE: {}",
pe
);
let desc_prices: Vec<f64> = (0..20).map(|i| 100.0 - i as f64).collect();
let pe_desc = compute_permutation_entropy(&desc_prices, 3);
assert!(
pe_desc < 0.1,
"Pure descending should have PE near 0: {}",
pe_desc
);
let asc_prices: Vec<f64> = (0..20).map(|i| 100.0 + i as f64).collect();
let pe_asc = compute_permutation_entropy(&asc_prices, 3);
assert!(
pe_asc < 0.1,
"Pure ascending should have PE near 0: {}",
pe_asc
);
}
#[test]
fn test_lehmer_code_bijection_m3() {
use smallvec::SmallVec;
let permutations: [[usize; 3]; 6] = [
[0, 1, 2],
[0, 2, 1],
[1, 0, 2],
[1, 2, 0],
[2, 0, 1],
[2, 1, 0],
];
let mut seen = std::collections::HashSet::new();
for perm in &permutations {
let sv: SmallVec<[usize; 4]> = SmallVec::from_slice(perm);
let idx = ordinal_indices_to_pattern_index(&sv);
assert!(idx < 6, "m=3 index must be in [0,5]: {:?} → {}", perm, idx);
assert!(
seen.insert(idx),
"Collision! {:?} → {} already used",
perm,
idx
);
}
assert_eq!(seen.len(), 6, "Must map to exactly 6 unique indices");
}
#[test]
fn test_lehmer_code_bijection_m4() {
use smallvec::SmallVec;
let mut seen = std::collections::HashSet::new();
let mut perm = [0usize, 1, 2, 3];
loop {
let sv: SmallVec<[usize; 4]> = SmallVec::from_slice(&perm);
let idx = ordinal_indices_to_pattern_index(&sv);
assert!(
idx < 24,
"m=4 index must be in [0,23]: {:?} → {}",
perm,
idx
);
assert!(
seen.insert(idx),
"Collision! {:?} → {} already used",
perm,
idx
);
if !next_permutation(&mut perm) {
break;
}
}
assert_eq!(seen.len(), 24, "Must map to exactly 24 unique indices");
}
fn next_permutation(arr: &mut [usize]) -> bool {
let n = arr.len();
if n < 2 {
return false;
}
let mut i = n - 1;
while i > 0 && arr[i - 1] >= arr[i] {
i -= 1;
}
if i == 0 {
return false;
}
let mut j = n - 1;
while arr[j] <= arr[i - 1] {
j -= 1;
}
arr.swap(i - 1, j);
arr[i..].reverse();
true
}
#[test]
fn test_lehmer_code_bijection_m2() {
use smallvec::SmallVec;
let asc: SmallVec<[usize; 4]> = SmallVec::from_slice(&[0, 1]);
let desc: SmallVec<[usize; 4]> = SmallVec::from_slice(&[1, 0]);
let idx_asc = ordinal_indices_to_pattern_index(&asc);
let idx_desc = ordinal_indices_to_pattern_index(&desc);
assert_eq!(idx_asc, 0, "ascending [0,1] → 0");
assert_eq!(idx_desc, 1, "descending [1,0] → 1");
assert_ne!(idx_asc, idx_desc);
}
#[test]
fn test_lehmer_code_m3_specific_values() {
use smallvec::SmallVec;
let p012: SmallVec<[usize; 4]> = SmallVec::from_slice(&[0, 1, 2]);
assert_eq!(ordinal_indices_to_pattern_index(&p012), 0);
let p210: SmallVec<[usize; 4]> = SmallVec::from_slice(&[2, 1, 0]);
assert_eq!(ordinal_indices_to_pattern_index(&p210), 5);
let p102: SmallVec<[usize; 4]> = SmallVec::from_slice(&[1, 0, 2]);
assert_eq!(ordinal_indices_to_pattern_index(&p102), 2);
}
#[test]
fn test_intra_bar_nan_first_price() {
let trades = vec![
Tick {
ref_id: 1,
price: FixedPoint(0), volume: FixedPoint(100_000_000),
first_sub_id: 1,
last_sub_id: 1,
timestamp: 1_000_000,
is_buyer_maker: false,
is_best_match: None,
best_bid: None,
best_ask: None,
},
create_test_trade(100.0, 1.0, 2_000_000, false),
];
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(2));
assert!(features.intra_bull_epoch_density.is_none());
assert!(features.intra_hurst.is_none());
}
#[test]
fn test_intra_bar_all_identical_prices() {
let trades: Vec<Tick> = (0..100)
.map(|i| create_test_trade(100.0, 1.0, i * 1_000_000, i % 2 == 0))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(100));
if let Some(er) = features.intra_kaufman_er {
assert!(er.is_finite(), "Kaufman ER should be finite: {}", er);
}
if let Some(gk) = features.intra_garman_klass_vol {
assert!(gk.is_finite(), "Garman-Klass should be finite: {}", gk);
}
if let Some(h) = features.intra_hurst {
assert!(
h.is_finite(),
"Hurst should be finite for flat prices: {}",
h
);
}
}
#[test]
fn test_intra_bar_all_buys_count_imbalance() {
let trades: Vec<Tick> = (0..20)
.map(|i| create_test_trade(100.0 + i as f64 * 0.1, 1.0, i * 1_000_000, false))
.collect();
let features = compute_intra_bar_features(&trades);
if let Some(ci) = features.intra_count_imbalance {
assert!(
(ci - 1.0).abs() < 0.01,
"All buys should have count_imbalance near 1.0: {}",
ci
);
}
}
#[test]
fn test_intra_bar_all_sells_count_imbalance() {
let trades: Vec<Tick> = (0..20)
.map(|i| create_test_trade(100.0 - i as f64 * 0.1, 1.0, i * 1_000_000, true))
.collect();
let features = compute_intra_bar_features(&trades);
if let Some(ci) = features.intra_count_imbalance {
assert!(
(ci - (-1.0)).abs() < 0.01,
"All sells should have count_imbalance near -1.0: {}",
ci
);
}
}
#[test]
fn test_intra_bar_instant_bar_same_timestamp() {
let trades: Vec<Tick> = (0..10)
.map(|i| create_test_trade(100.0 + i as f64 * 0.1, 1.0, 1_000_000, i % 2 == 0))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(10));
if let Some(b) = features.intra_burstiness {
assert!(
b.is_finite(),
"Burstiness should be finite for instant bar: {}",
b
);
}
if let Some(intensity) = features.intra_intensity {
assert!(
intensity.is_finite(),
"Intensity should be finite: {}",
intensity
);
}
}
#[test]
fn test_intra_bar_large_trade_count() {
let trades: Vec<Tick> = (0..500)
.map(|i| {
let price = 100.0 + (i as f64 * 0.1).sin() * 2.0;
create_test_trade(
price,
0.5 + (i as f64 * 0.03).cos(),
i * 1_000_000,
i % 3 == 0,
)
})
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(500));
if let Some(h) = features.intra_hurst {
assert!(h >= 0.0 && h <= 1.0, "Hurst out of bounds at n=500: {}", h);
}
if let Some(pe) = features.intra_permutation_entropy {
assert!(pe >= 0.0 && pe <= 1.0, "PE out of bounds at n=500: {}", pe);
}
if let Some(ofi) = features.intra_ofi {
assert!(
ofi >= -1.0 && ofi <= 1.0,
"OFI out of bounds at n=500: {}",
ofi
);
}
}
#[test]
fn test_intrabar_exactly_2_trades_ith() {
let trades = vec![
create_test_trade(100.0, 1.0, 1_000_000, false),
create_test_trade(100.5, 1.5, 2_000_000, true),
];
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(2));
assert!(
features.intra_bull_epoch_density.is_some(),
"Bull epochs for n=2"
);
assert!(
features.intra_bear_epoch_density.is_some(),
"Bear epochs for n=2"
);
assert!(
features.intra_max_drawdown.is_some(),
"Max drawdown for n=2"
);
assert!(features.intra_max_runup.is_some(), "Max runup for n=2");
assert!(features.intra_hurst.is_none(), "Hurst requires n >= 64");
assert!(
features.intra_permutation_entropy.is_none(),
"PE requires n >= 60"
);
if let Some(er) = features.intra_kaufman_er {
assert!(
(er - 1.0).abs() < 0.01,
"Straight line ER should be 1.0: {}",
er
);
}
}
#[test]
fn test_intrabar_pe_boundary_59_vs_60() {
let trades_59: Vec<Tick> = (0..59)
.map(|i| {
let price = 100.0 + (i as f64 * 0.3).sin() * 2.0;
create_test_trade(price, 1.0, i * 1_000_000, i % 2 == 0)
})
.collect();
let f59 = compute_intra_bar_features(&trades_59);
assert!(
f59.intra_permutation_entropy.is_none(),
"n=59 should not compute PE"
);
let trades_60: Vec<Tick> = (0..60)
.map(|i| {
let price = 100.0 + (i as f64 * 0.3).sin() * 2.0;
create_test_trade(price, 1.0, i * 1_000_000, i % 2 == 0)
})
.collect();
let f60 = compute_intra_bar_features(&trades_60);
assert!(
f60.intra_permutation_entropy.is_some(),
"n=60 should compute PE"
);
let pe60 = f60.intra_permutation_entropy.unwrap();
assert!(
pe60.is_finite() && pe60 >= 0.0 && pe60 <= 1.0,
"PE(60) out of bounds: {}",
pe60
);
}
#[test]
fn test_intrabar_hurst_boundary_63_vs_64() {
let trades_63: Vec<Tick> = (0..63)
.map(|i| {
let price = 100.0 + (i as f64 * 0.2).sin() * 2.0;
create_test_trade(price, 1.0, i * 1_000_000, i % 2 == 0)
})
.collect();
let f63 = compute_intra_bar_features(&trades_63);
assert!(f63.intra_hurst.is_none(), "n=63 should not compute Hurst");
let trades_64: Vec<Tick> = (0..64)
.map(|i| {
let price = 100.0 + (i as f64 * 0.2).sin() * 2.0;
create_test_trade(price, 1.0, i * 1_000_000, i % 2 == 0)
})
.collect();
let f64_features = compute_intra_bar_features(&trades_64);
assert!(
f64_features.intra_hurst.is_some(),
"n=64 should compute Hurst"
);
let h64 = f64_features.intra_hurst.unwrap();
assert!(
h64.is_finite() && h64 >= 0.0 && h64 <= 1.0,
"Hurst(64) out of bounds: {}",
h64
);
}
#[test]
fn test_intrabar_constant_price_full_features() {
let trades: Vec<Tick> = (0..100)
.map(|i| create_test_trade(42000.0, 1.0, i * 1_000_000, i % 2 == 0))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(100));
if let Some(ofi) = features.intra_ofi {
assert!(ofi.abs() < 0.1, "Equal buy/sell → OFI near 0: {}", ofi);
}
if let Some(gk) = features.intra_garman_klass_vol {
assert!(
gk.is_finite() && gk < 0.001,
"Constant price → GK near 0: {}",
gk
);
}
if let Some(h) = features.intra_hurst {
assert!(
h.is_finite() && h >= 0.0 && h <= 1.0,
"Hurst must be finite: {}",
h
);
}
if let Some(pe) = features.intra_permutation_entropy {
assert!(pe.is_finite() && pe >= 0.0, "PE must be finite: {}", pe);
assert!(pe < 0.05, "Constant prices → PE near 0: {}", pe);
}
if let Some(er) = features.intra_kaufman_er {
assert!(
er.is_finite(),
"Kaufman ER finite for constant price: {}",
er
);
}
}
#[test]
fn test_intrabar_all_buy_with_hurst_pe() {
let trades: Vec<Tick> = (0..70)
.map(|i| create_test_trade(100.0 + i as f64 * 0.1, 1.0, i * 1_000_000, false))
.collect();
let features = compute_intra_bar_features(&trades);
if let Some(ofi) = features.intra_ofi {
assert!((ofi - 1.0).abs() < 0.01, "All buys → OFI=1.0: {}", ofi);
}
assert!(features.intra_hurst.is_some(), "n=70 should compute Hurst");
if let Some(h) = features.intra_hurst {
assert!(
h.is_finite() && h >= 0.0 && h <= 1.0,
"Hurst bounded: {}",
h
);
}
assert!(
features.intra_permutation_entropy.is_some(),
"n=70 should compute PE"
);
if let Some(pe) = features.intra_permutation_entropy {
assert!(
pe.is_finite() && pe >= 0.0 && pe <= 1.0,
"PE bounded: {}",
pe
);
assert!(pe < 0.1, "Monotonic ascending → low PE: {}", pe);
}
}
#[test]
fn test_intrabar_all_sell_with_hurst_pe() {
let trades: Vec<Tick> = (0..70)
.map(|i| create_test_trade(100.0 - i as f64 * 0.1, 1.0, i * 1_000_000, true))
.collect();
let features = compute_intra_bar_features(&trades);
if let Some(ofi) = features.intra_ofi {
assert!((ofi - (-1.0)).abs() < 0.01, "All sells → OFI=-1.0: {}", ofi);
}
assert!(features.intra_hurst.is_some(), "n=70 should compute Hurst");
assert!(
features.intra_permutation_entropy.is_some(),
"n=70 should compute PE"
);
if let Some(pe) = features.intra_permutation_entropy {
assert!(pe < 0.1, "Monotonic descending → low PE: {}", pe);
}
}
#[test]
fn test_intra_bar_zero_volume_trades() {
let trades: Vec<Tick> = (0..20)
.map(|i| create_test_trade(100.0 + i as f64 * 0.1, 0.0, i * 1_000_000, i % 2 == 0))
.collect();
let features = compute_intra_bar_features(&trades);
assert_eq!(features.intra_trade_count, Some(20));
if let Some(ofi) = features.intra_ofi {
assert!(
ofi.is_finite(),
"OFI must be finite with zero volume: {}",
ofi
);
assert!(
(ofi).abs() < f64::EPSILON,
"OFI should be 0.0 with zero volume: {}",
ofi
);
}
if let Some(vp) = features.intra_vwap_position {
assert!(vp.is_finite(), "VWAP position must be finite: {}", vp);
}
assert!(
features.intra_kyle_lambda.is_none(),
"Kyle Lambda undefined with zero volume"
);
if let Some(d) = features.intra_duration_us {
assert!(d > 0, "Duration should be positive: {}", d);
}
if let Some(intensity) = features.intra_intensity {
assert!(
intensity.is_finite() && intensity > 0.0,
"Intensity finite: {}",
intensity
);
}
}
}
#[cfg(test)]
mod proptest_intrabar_bounds {
use super::*;
use crate::fixed_point::FixedPoint;
use crate::trade::Tick;
use proptest::prelude::*;
fn make_trade(price: f64, volume: f64, timestamp: i64, is_buyer_maker: bool) -> Tick {
Tick {
ref_id: timestamp,
price: FixedPoint((price * 1e8) as i64),
volume: FixedPoint((volume * 1e8) as i64),
first_sub_id: timestamp,
last_sub_id: timestamp,
timestamp,
is_buyer_maker,
is_best_match: None,
best_bid: None,
best_ask: None,
}
}
fn trade_sequence(min_n: usize, max_n: usize) -> impl Strategy<Value = Vec<Tick>> {
(min_n..=max_n, 0_u64..10000).prop_map(|(n, seed)| {
let mut rng = seed;
let base_price = 100.0;
(0..n)
.map(|i| {
rng = rng.wrapping_mul(6364136223846793005).wrapping_add(1);
let r = ((rng >> 33) as f64) / (u32::MAX as f64);
let price = base_price + (r - 0.5) * 10.0;
let volume = 0.1 + r * 5.0;
let ts = (i as i64) * 1_000_000; make_trade(price, volume, ts, rng % 2 == 0)
})
.collect()
})
}
proptest! {
#[test]
fn ith_features_always_bounded(trades in trade_sequence(2, 100)) {
let features = compute_intra_bar_features(&trades);
if let Some(v) = features.intra_bull_epoch_density {
prop_assert!(v >= 0.0 && v <= 1.0, "bull_epoch_density={v}");
}
if let Some(v) = features.intra_bear_epoch_density {
prop_assert!(v >= 0.0 && v <= 1.0, "bear_epoch_density={v}");
}
if let Some(v) = features.intra_bull_excess_gain {
prop_assert!(v >= 0.0 && v <= 1.0, "bull_excess_gain={v}");
}
if let Some(v) = features.intra_bear_excess_gain {
prop_assert!(v >= 0.0 && v <= 1.0, "bear_excess_gain={v}");
}
if let Some(v) = features.intra_bull_cv {
prop_assert!(v >= 0.0 && v <= 1.0, "bull_cv={v}");
}
if let Some(v) = features.intra_bear_cv {
prop_assert!(v >= 0.0 && v <= 1.0, "bear_cv={v}");
}
if let Some(v) = features.intra_max_drawdown {
prop_assert!(v >= 0.0 && v <= 1.0, "max_drawdown={v}");
}
if let Some(v) = features.intra_max_runup {
prop_assert!(v >= 0.0 && v <= 1.0, "max_runup={v}");
}
}
#[test]
fn statistical_features_bounded(trades in trade_sequence(3, 200)) {
let features = compute_intra_bar_features(&trades);
if let Some(ofi) = features.intra_ofi {
prop_assert!(ofi >= -1.0 - f64::EPSILON && ofi <= 1.0 + f64::EPSILON,
"OFI={ofi} out of [-1, 1]");
}
if let Some(ci) = features.intra_count_imbalance {
prop_assert!(ci >= -1.0 - f64::EPSILON && ci <= 1.0 + f64::EPSILON,
"count_imbalance={ci} out of [-1, 1]");
}
if let Some(b) = features.intra_burstiness {
prop_assert!(b >= -1.0 - f64::EPSILON && b <= 1.0 + f64::EPSILON,
"burstiness={b} out of [-1, 1]");
}
if let Some(er) = features.intra_kaufman_er {
prop_assert!(er >= 0.0 && er <= 1.0 + f64::EPSILON,
"kaufman_er={er} out of [0, 1]");
}
if let Some(vwap) = features.intra_vwap_position {
prop_assert!(vwap >= 0.0 && vwap <= 1.0 + f64::EPSILON,
"vwap_position={vwap} out of [0, 1]");
}
if let Some(gk) = features.intra_garman_klass_vol {
prop_assert!(gk >= 0.0, "garman_klass_vol={gk} negative");
}
if let Some(intensity) = features.intra_intensity {
prop_assert!(intensity >= 0.0, "intensity={intensity} negative");
}
}
#[test]
fn complexity_features_bounded(trades in trade_sequence(70, 300)) {
let features = compute_intra_bar_features(&trades);
if let Some(h) = features.intra_hurst {
prop_assert!(h >= 0.0 && h <= 1.0,
"hurst={h} out of [0, 1] for n={}", trades.len());
}
if let Some(pe) = features.intra_permutation_entropy {
prop_assert!(pe >= 0.0 && pe <= 1.0 + f64::EPSILON,
"permutation_entropy={pe} out of [0, 1] for n={}", trades.len());
}
}
#[test]
fn trade_count_matches_input(trades in trade_sequence(0, 50)) {
let features = compute_intra_bar_features(&trades);
prop_assert_eq!(features.intra_trade_count, Some(trades.len() as u32));
}
}
}