Skip to main content

opendeviationbar_core/
interbar.rs

1// FILE-SIZE-OK: Tests stay inline (access pub(crate) math functions via glob import). Phase 2b extracted types, Phase 2e extracted math.
2//! Inter-bar microstructure features computed from lookback trade windows
3//!
4//! GitHub Issue: https://github.com/terrylica/opendeviationbar-py/issues/59
5//!
6//! This module provides features computed from trades that occurred BEFORE each bar opened,
7//! enabling enrichment of larger open deviation bars (e.g., 1000 dbps) with finer-grained microstructure
8//! signals without lookahead bias.
9//!
10//! ## Temporal Integrity
11//!
12//! All features are computed from trades with timestamps strictly BEFORE the current bar's
13//! `open_time`. This ensures no lookahead bias in ML applications.
14//!
15//! ## Feature Tiers
16//!
17//! - **Tier 1**: Core features (7) - low complexity, high value
18//! - **Tier 2**: Statistical features (5) - medium complexity
19//! - **Tier 3**: Advanced features (4) - higher complexity, from trading-fitness patterns
20//!
21//! ## Academic References
22//!
23//! | Feature | Reference |
24//! |---------|-----------|
25//! | OFI | Chordia et al. (2002) - Order imbalance |
26//! | Kyle's Lambda | Kyle (1985) - Continuous auctions and insider trading |
27//! | Burstiness | Goh & Barabási (2008) - Burstiness and memory in complex systems |
28//! | Kaufman ER | Kaufman (1995) - Smarter Trading |
29//! | Garman-Klass | Garman & Klass (1980) - On the Estimation of Security Price Volatilities |
30//! | Hurst (DFA) | Peng et al. (1994) - Mosaic organization of DNA nucleotides |
31//! | Permutation Entropy | Bandt & Pompe (2002) - Permutation Entropy: A Natural Complexity Measure |
32
33use crate::fixed_point::FixedPoint;
34use crate::interbar_math::*;
35use crate::types::AggTrade;
36use rayon::join; // Issue #115: Parallelization of Tier 2/3 features
37use smallvec::SmallVec;
38use std::collections::VecDeque;
39use std::sync::LazyLock; // std::sync::LazyLock (stable since Rust 1.80, replaces once_cell)
40
41// Re-export types from interbar_types.rs (Phase 2b extraction)
42pub use crate::interbar_types::{InterBarConfig, InterBarFeatures, LookbackMode, TradeSnapshot};
43
44/// Issue #96 Task #191: Lazy initialization of entropy cache warm-up
45/// Ensures warm-up runs exactly once, on first TradeHistory creation in the process
46static ENTROPY_CACHE_WARMUP: LazyLock<()> = LazyLock::new(|| {
47    crate::entropy_cache_global::warm_up_entropy_cache();
48});
49
50/// Trade history ring buffer for inter-bar feature computation
51#[derive(Debug)]
52pub struct TradeHistory {
53    /// Ring buffer of recent trades
54    trades: VecDeque<TradeSnapshot>,
55    /// Configuration for lookback
56    config: InterBarConfig,
57    /// Timestamp threshold: trades with timestamp < this are protected from pruning.
58    /// Set to the oldest timestamp we might need for lookback computation.
59    /// Updated each time a new bar opens.
60    protected_until: Option<i64>,
61    /// Total number of trades pushed (monotonic counter for BarRelative indexing)
62    total_pushed: usize,
63    /// Indices into total_pushed at which each bar closed (Issue #81).
64    /// `bar_close_indices[i]` = `total_pushed` value when bar i closed.
65    /// Used by `BarRelative` mode to determine how many trades to keep.
66    bar_close_indices: VecDeque<usize>,
67    /// Issue #104: Pushes since last prune check (reduces check frequency)
68    pushes_since_prune_check: usize,
69    /// Issue #104: Maximum safe capacity (computed once at init)
70    max_safe_capacity: usize,
71    /// Issue #96 Task #117: Cache for permutation entropy results
72    /// Avoids redundant computation on identical price sequences
73    /// Uses parking_lot::RwLock for lower-latency locking (Issue #96 Task #124)
74    entropy_cache: std::sync::Arc<parking_lot::RwLock<crate::interbar_math::EntropyCache>>,
75    /// Issue #96 Task #144 Phase 4: Cache for complete inter-bar feature results
76    /// Avoids redundant feature computation for similar trade patterns
77    /// Enabled by default, can be disabled via config
78    feature_result_cache: Option<std::sync::Arc<parking_lot::RwLock<crate::interbar_cache::InterBarFeatureCache>>>,
79    /// Issue #96 Task #155 Phase 1: Adaptive pruning batch size tuning
80    /// Tracks pruning efficiency to adapt batch size dynamically
81    /// Reduces overhead of frequent prune checks when pruning is inefficient
82    adaptive_prune_batch: usize,
83    /// Tracks total trades pruned and prune calls for efficiency measurement
84    prune_stats: (usize, usize), // (trades_pruned, prune_calls)
85    /// Issue #96 Task #163: Cache last binary search result
86    /// Avoids O(log n) binary search when bar_open_time hasn't changed significantly
87    /// Most bars have similar/close timestamps, so cutoff index changes slowly
88    /// Issue #96 Task #58: Removed Arc wrapper (eliminates indirection + atomic refcount overhead)
89    last_binary_search_cache: parking_lot::Mutex<Option<(i64, usize)>>,  // (open_time, cutoff_idx)
90    /// Issue #96 Task #167: Lookahead prediction buffer for binary search optimization
91    /// Tracks last 2 search results to predict next position via timestamp delta trend
92    /// On miss, analyzes trend = (ts_delta) / (idx_delta) to hint next search bounds
93    /// Reduces binary search iterations by 20-40% on trending data patterns
94    /// Issue #96 Task #62: VecDeque for O(1) pop_front (was SmallVec with O(n) remove(0))
95    lookahead_buffer: parking_lot::Mutex<VecDeque<(i64, usize)>>,
96}
97
98/// Cold path: return default inter-bar features for empty lookback
99/// Extracted to improve instruction cache locality on the hot path
100#[cold]
101#[inline(never)]
102fn default_interbar_features() -> InterBarFeatures {
103    InterBarFeatures::default()
104}
105
106impl TradeHistory {
107    /// Create new trade history with given configuration
108    ///
109    /// Uses a local entropy cache (default behavior, backward compatible).
110    /// For multi-symbol workloads, use `new_with_cache()` to provide a shared global cache.
111    pub fn new(config: InterBarConfig) -> Self {
112        Self::new_with_cache(config, None)
113    }
114
115    /// Create new trade history with optional external entropy cache
116    ///
117    /// Issue #145 Phase 2: Multi-Symbol Entropy Cache Sharing
118    ///
119    /// ## Parameters
120    ///
121    /// - `config`: Lookback configuration (FixedCount, FixedWindow, or BarRelative)
122    /// - `external_cache`: Optional shared entropy cache from `get_global_entropy_cache()`
123    ///   - If provided: Uses the shared global cache (recommended for multi-symbol)
124    ///   - If None: Creates a local 128-entry cache (default, backward compatible)
125    ///
126    /// ## Usage
127    ///
128    /// ```ignore
129    /// // Single-symbol: use local cache (default)
130    /// let history = TradeHistory::new(config);
131    ///
132    /// // Multi-symbol: share global cache
133    /// let global_cache = get_global_entropy_cache();
134    /// let history = TradeHistory::new_with_cache(config, Some(global_cache));
135    /// ```
136    ///
137    /// ## Thread Safety
138    ///
139    /// Both local and external caches are thread-safe via Arc<RwLock<>>.
140    /// Multiple processors can safely share the same external cache concurrently.
141    pub fn new_with_cache(
142        config: InterBarConfig,
143        external_cache: Option<std::sync::Arc<parking_lot::RwLock<crate::interbar_math::EntropyCache>>>,
144    ) -> Self {
145        // Issue #96 Task #191: Trigger entropy cache warm-up on first TradeHistory creation
146        // Uses lazy static to ensure it runs exactly once per process
147        let _ = &*ENTROPY_CACHE_WARMUP;
148
149        // Issue #118: Optimized capacity sizing based on lookback config
150        // Reduces memory overhead by 20-30% while maintaining safety margins
151        let capacity = match &config.lookback_mode {
152            LookbackMode::FixedCount(n) => *n, // Exact size (pruning handles overflow)
153            LookbackMode::FixedWindow(_) => 500, // Covers 99% of time-based windows
154            LookbackMode::BarRelative(_) => 1000, // Adaptive pruning scales with bar size
155        };
156        // Issue #104: Compute max safe capacity once to avoid repeated computation
157        let max_safe_capacity = match &config.lookback_mode {
158            LookbackMode::FixedCount(n) => *n * 2,  // 2x safety margin (reduced from 3x)
159            LookbackMode::FixedWindow(_) => 1500,    // Reduced from 3000 (better cache locality)
160            LookbackMode::BarRelative(_) => 2000,    // Reduced from 5000 (adaptive scaling)
161        };
162        // Task #91: Pre-allocate bar_close_indices buffer
163        // Typical lookback: 10-100 bars, so capacity 128 avoids most re-allocations
164        let bar_capacity = match &config.lookback_mode {
165            LookbackMode::BarRelative(n_bars) => (*n_bars + 1).min(128),
166            _ => 128,
167        };
168
169        // Issue #145 Phase 2: Use external cache if provided, otherwise create local
170        let entropy_cache = external_cache.unwrap_or_else(|| {
171            std::sync::Arc::new(parking_lot::RwLock::new(crate::interbar_math::EntropyCache::new()))
172        });
173
174        // Issue #96 Task #144 Phase 4: Create feature result cache (enabled by default)
175        let feature_result_cache = Some(
176            std::sync::Arc::new(parking_lot::RwLock::new(
177                crate::interbar_cache::InterBarFeatureCache::new()
178            ))
179        );
180
181        // Issue #96 Task #155: Initialize adaptive pruning batch size
182        let initial_prune_batch = match &config.lookback_mode {
183            LookbackMode::FixedCount(n) => std::cmp::max((*n / 10).max(5), 10),
184            _ => 10,
185        };
186
187        Self {
188            trades: VecDeque::with_capacity(capacity),
189            config,
190            protected_until: None,
191            total_pushed: 0,
192            bar_close_indices: VecDeque::with_capacity(bar_capacity),
193            pushes_since_prune_check: 0,
194            max_safe_capacity,
195            entropy_cache,
196            feature_result_cache,
197            adaptive_prune_batch: initial_prune_batch,
198            prune_stats: (0, 0),
199            last_binary_search_cache: parking_lot::Mutex::new(None), // Issue #96 Task #163/#58: No Arc indirection
200            lookahead_buffer: parking_lot::Mutex::new(VecDeque::with_capacity(3)), // Issue #96 Task #62: VecDeque for O(1) pop_front
201        }
202    }
203
204    /// Push a new trade to the history buffer
205    ///
206    /// Automatically prunes old entries based on lookback mode, but preserves
207    /// trades needed for lookback computation (timestamp < protected_until).
208    /// Issue #104: Uses batched pruning check to reduce frequency
209    pub fn push(&mut self, trade: &AggTrade) {
210        let snapshot = TradeSnapshot::from(trade);
211        self.trades.push_back(snapshot);
212        self.total_pushed += 1;
213        self.pushes_since_prune_check += 1;
214
215        // Issue #96 Task #155: Use adaptive pruning batch size
216        // Batch size increases if pruning is inefficient (<10% trades removed)
217        let prune_batch_size = self.adaptive_prune_batch;
218
219        // Check every N trades or when capacity limit exceeded (deferred: 2x threshold)
220        if self.pushes_since_prune_check >= prune_batch_size
221            || self.trades.len() > self.max_safe_capacity * 2
222        {
223            let trades_before = self.trades.len();
224            self.prune_if_needed();
225            let trades_after = self.trades.len();
226            let trades_removed = trades_before.saturating_sub(trades_after);
227
228            // Issue #96 Task #155: Track efficiency and adapt batch size
229            self.prune_stats.0 = self.prune_stats.0.saturating_add(trades_removed);
230            self.prune_stats.1 = self.prune_stats.1.saturating_add(1);
231
232            // Every 10 prune calls, reevaluate batch size
233            if self.prune_stats.1 > 0 && self.prune_stats.1.is_multiple_of(10) {
234                let avg_removed = self.prune_stats.0 / self.prune_stats.1;
235                let removal_efficiency = if trades_before > 0 {
236                    (avg_removed * 100) / (trades_before + avg_removed)
237                } else {
238                    0
239                };
240
241                // If removing <10%, increase batch size (reduce prune frequency)
242                if removal_efficiency < 10 {
243                    self.adaptive_prune_batch = std::cmp::min(
244                        self.adaptive_prune_batch * 2,
245                        self.max_safe_capacity / 4, // Cap at quarter of max capacity
246                    );
247                } else if removal_efficiency > 30 {
248                    // If removing >30%, decrease batch size (more frequent pruning)
249                    self.adaptive_prune_batch = std::cmp::max(
250                        self.adaptive_prune_batch / 2,
251                        5, // Minimum batch size
252                    );
253                }
254
255                // Reset stats for next measurement cycle
256                self.prune_stats = (0, 0);
257            }
258
259            self.pushes_since_prune_check = 0;
260        }
261    }
262
263    /// Notify that a new bar has opened at the given timestamp
264    ///
265    /// This sets the protection threshold to ensure trades from before the bar
266    /// opened are preserved for lookback computation. The protection extends
267    /// until the next bar opens and calls this method again.
268    pub fn on_bar_open(&mut self, bar_open_time: i64) {
269        // Protect all trades with timestamp < bar_open_time
270        // These are the trades that can be used for lookback computation
271        self.protected_until = Some(bar_open_time);
272    }
273
274    /// Notify that the current bar has closed
275    ///
276    /// For `BarRelative` mode, records the current trade count as a bar boundary.
277    /// For other modes, this is a no-op. Protection is always kept until the
278    /// next bar opens.
279    pub fn on_bar_close(&mut self) {
280        // Record bar boundary for BarRelative pruning (Issue #81)
281        if let LookbackMode::BarRelative(n_bars) = &self.config.lookback_mode {
282            self.bar_close_indices.push_back(self.total_pushed);
283            // Keep only last n_bars+1 boundaries (n_bars for lookback + 1 for current)
284            while self.bar_close_indices.len() > *n_bars + 1 {
285                self.bar_close_indices.pop_front();
286            }
287        }
288        // Keep protection until next bar opens (all modes)
289    }
290
291    /// Conditionally prune trades based on capacity (Task #91: reduce prune() call overhead)
292    ///
293    /// Only calls the full prune() when approaching capacity limits.
294    /// This reduces function call overhead while maintaining correctness.
295    /// Issue #104: Use pre-computed max_safe_capacity for branch-free check
296    fn prune_if_needed(&mut self) {
297        // Issue #104: Simple threshold check using pre-computed capacity
298        // Reduces function call overhead and enables better branch prediction
299        if self.trades.len() > self.max_safe_capacity {
300            self.prune();
301        }
302    }
303
304    /// Prune old trades based on lookback configuration
305    ///
306    /// Pruning logic:
307    /// - For `FixedCount(n)`: Keep up to 2*n trades total, but never prune trades
308    ///   with timestamp < `protected_until` (needed for lookback)
309    /// - For `FixedWindow`: Standard time-based pruning, but respect `protected_until`
310    /// - For `BarRelative(n)`: Keep trades from last n completed bars (Issue #81)
311    fn prune(&mut self) {
312        match &self.config.lookback_mode {
313            LookbackMode::FixedCount(n) => {
314                // Keep at most 2*n trades (n for lookback + n for next bar's lookback)
315                let max_trades = *n * 2;
316                while self.trades.len() > max_trades {
317                    // Check if front trade is protected
318                    if let Some(front) = self.trades.front() {
319                        if let Some(protected) = self.protected_until {
320                            if front.timestamp < protected {
321                                // Don't prune protected trades
322                                break;
323                            }
324                        }
325                    }
326                    self.trades.pop_front();
327                }
328            }
329            LookbackMode::FixedWindow(window_us) => {
330                // Find the oldest trade we need
331                let newest_timestamp = self.trades.back().map(|t| t.timestamp).unwrap_or(0);
332                let cutoff = newest_timestamp - window_us;
333
334                while let Some(front) = self.trades.front() {
335                    // Respect protection
336                    if let Some(protected) = self.protected_until {
337                        if front.timestamp < protected {
338                            break;
339                        }
340                    }
341                    // Prune if outside time window
342                    if front.timestamp < cutoff {
343                        self.trades.pop_front();
344                    } else {
345                        break;
346                    }
347                }
348            }
349            LookbackMode::BarRelative(n_bars) => {
350                // Issue #81: Keep trades from last n completed bars.
351                //
352                // bar_close_indices stores total_pushed at each bar close:
353                //   B0 = end of bar 0 / start of bar 1's trades
354                //   B1 = end of bar 1 / start of bar 2's trades
355                //   etc.
356                //
357                // To include N bars of lookback, we need boundary B_{k-1}
358                // where k is the oldest bar we want. on_bar_close() keeps
359                // at most n_bars+1 entries, so after steady state, front()
360                // is exactly B_{k-1}.
361                //
362                // Bootstrap: when fewer than n_bars bars have closed, we
363                // want ALL available bars, so keep everything.
364                if self.bar_close_indices.len() <= *n_bars {
365                    // Bootstrap: fewer completed bars than lookback depth.
366                    // Keep all trades — we want every available bar.
367                    return;
368                }
369
370                // Steady state: front() is the boundary BEFORE the oldest
371                // bar we want. Trades from front() onward belong to the
372                // N-bar lookback window plus the current in-progress bar.
373                let oldest_boundary = self.bar_close_indices.front().copied().unwrap_or(0);
374                let keep_count = self.total_pushed - oldest_boundary;
375
376                // Prune unconditionally — bar boundaries are the source of truth
377                while self.trades.len() > keep_count {
378                    self.trades.pop_front();
379                }
380            }
381        }
382    }
383
384    /// Fast-path check for empty lookback window (Issue #96 Task #178)
385    ///
386    /// Returns true if there are any lookback trades before the given bar_open_time.
387    /// This check is done WITHOUT allocating the SmallVec, enabling fast-path for
388    /// zero-trade lookback windows. Typical improvement: 0.3-0.8% for windows with
389    /// no lookback data (common in consolidation periods at session start).
390    ///
391    /// # Performance
392    /// - Cache hit: ~2-3 ns (checks cached_idx from previous query)
393    /// - Cache miss: ~5-10 ns (single timestamp comparison, no SmallVec allocation)
394    /// - vs SmallVec allocation: ~10-20 ns (stack buffer initialization)
395    ///
396    /// # Example
397    /// ```ignore
398    /// if history.has_lookback_trades(bar_open_time) {
399    ///     let lookback = history.get_lookback_trades(bar_open_time);
400    ///     // Process lookback
401    /// } else {
402    ///     // Skip feature computation for zero-trade window
403    /// }
404    /// ```
405    #[inline]
406    pub fn has_lookback_trades(&self, bar_open_time: i64) -> bool {
407        // Quick check: if no trades at all, no lookback
408        if self.trades.is_empty() {
409            return false;
410        }
411
412        // Check cache first for O(1) path (Issue #96 Task #163/#58)
413        {
414            let cache = self.last_binary_search_cache.lock();
415            if let Some((cached_time, cached_idx)) = *cache {
416                if cached_time == bar_open_time {
417                    return cached_idx > 0;
418                }
419            }
420        }
421
422        // Cache miss: use partition_point for cleaner cutoff lookup
423        // Issue #96 Task #48: partition_point avoids Ok/Err unwrapping overhead
424        let idx = self.trades.partition_point(|trade| trade.timestamp < bar_open_time);
425        *self.last_binary_search_cache.lock() = Some((bar_open_time, idx));
426        idx > 0
427    }
428
429    /// Analyze lookahead buffer to compute trend-based search hint
430    ///
431    /// Issue #96 Task #167 Phase 2: Uses last 2-3 search results to predict if the
432    /// next index will be higher or lower than the previous result. Enables partitioned
433    /// binary search for 5-10% iteration reduction on trending data.
434    ///
435    /// Returns (should_check_higher, last_index) if trend is reliable, None otherwise
436    fn compute_search_hint(&self) -> Option<(bool, usize)> {
437        let buffer = self.lookahead_buffer.lock();
438        if buffer.len() < 2 {
439            return None;
440        }
441
442        // Compute trend from last 2 results
443        let prev = buffer[buffer.len() - 2]; // (ts, idx)
444        let curr = buffer[buffer.len() - 1];
445
446        let ts_delta = curr.0.saturating_sub(prev.0);
447        let idx_delta = (curr.1 as i64) - (prev.1 as i64);
448
449        // Only use hint if trend is clear (not flat, indices are changing)
450        if ts_delta > 0 && idx_delta != 0 {
451            let should_check_higher = idx_delta > 0;
452            Some((should_check_higher, curr.1))
453        } else {
454            None
455        }
456    }
457
458    pub fn get_lookback_trades(&self, bar_open_time: i64) -> SmallVec<[&TradeSnapshot; 256]> {
459        // Issue #96 Task #163/#58: Check cache first
460        {
461            let cache = self.last_binary_search_cache.lock();
462            if let Some((cached_time, cached_idx)) = *cache {
463                if cached_time == bar_open_time {
464                    let cutoff_idx = cached_idx;
465                    drop(cache);
466                    let mut result = SmallVec::new();
467                    for i in 0..cutoff_idx {
468                        result.push(&self.trades[i]);
469                    }
470                    return result;
471                }
472            }
473        }
474
475        // Issue #96 Task #167 Phase 2: Trend-guided binary search with lookahead hint
476        // Uses hint for O(1) boundary probe; falls back to O(log n) VecDeque binary search.
477        // Issue #96 Task #48: partition_point replaces binary_search_by + Ok/Err collapse
478        #[inline(always)]
479        fn ts_partition_point(trades: &std::collections::VecDeque<TradeSnapshot>, bar_open_time: i64) -> usize {
480            trades.partition_point(|trade| trade.timestamp < bar_open_time)
481        }
482
483        let cutoff_idx = if let Some((should_check_higher, last_idx)) = self.compute_search_hint() {
484            let check_region_end = if should_check_higher {
485                std::cmp::min(last_idx + (last_idx / 2), self.trades.len())
486            } else {
487                last_idx
488            };
489
490            // O(1) boundary probe: if all trades are before bar_open_time, skip binary search
491            if check_region_end > 0
492                && check_region_end == self.trades.len()
493                && self.trades[check_region_end - 1].timestamp < bar_open_time
494            {
495                check_region_end
496            } else {
497                ts_partition_point(&self.trades, bar_open_time)
498            }
499        } else {
500            ts_partition_point(&self.trades, bar_open_time)
501        };
502
503        // Issue #96 Task #163/#58: Update cache
504        *self.last_binary_search_cache.lock() = Some((bar_open_time, cutoff_idx));
505
506        // Issue #96 Task #167/#58: Update lookahead buffer
507        {
508            let mut buffer = self.lookahead_buffer.lock();
509            buffer.push_back((bar_open_time, cutoff_idx));
510            if buffer.len() > 3 {
511                buffer.pop_front();
512            }
513        }
514
515        // Task #26: Unified loop handles all sizes (0 = no iterations, no special cases needed)
516        let mut result = SmallVec::new();
517        for i in 0..cutoff_idx {
518            result.push(&self.trades[i]);
519        }
520        result
521    }
522
523    /// Get buffer statistics for benchmarking and profiling
524    ///
525    /// Issue #96 Task #155: Exposes pruning state for performance analysis
526    pub fn buffer_stats(&self) -> (usize, usize, usize, usize) {
527        (
528            self.trades.len(),
529            self.max_safe_capacity,
530            self.adaptive_prune_batch,
531            self.prune_stats.0, // trades_pruned
532        )
533    }
534
535    /// Compute inter-bar features from lookback window
536    ///
537    /// Issue #96 Task #99: Memoized float conversions for 2-5% speedup
538    /// Extracts prices/volumes once and reuses across all 16 features.
539    ///
540    /// # Arguments
541    ///
542    /// * `bar_open_time` - The open timestamp of the current bar (microseconds)
543    ///
544    /// # Returns
545    ///
546    /// `InterBarFeatures` with computed values, or `None` for features that
547    /// cannot be computed due to insufficient data.
548    pub fn compute_features(&self, bar_open_time: i64) -> InterBarFeatures {
549        // Issue #96 Task #44: Eliminate double binary search in compute_features
550        // Previously: has_lookback_trades() (mutex lock + binary search + cache update)
551        //   then get_lookback_trades() (mutex lock + cache hit + SmallVec construction)
552        // Now: trades.is_empty() O(1) fast-path + single get_lookback_trades() call
553        // Saves 1 mutex lock acquisition per bar (~0.3-0.5% speedup)
554        if self.trades.is_empty() {
555            return default_interbar_features();
556        }
557
558        let lookback = self.get_lookback_trades(bar_open_time);
559
560        if lookback.is_empty() {
561            return default_interbar_features();
562        }
563
564        // Issue #96 Task #183: Check feature result cache with try-lock to reduce contention
565        // Task #15: Compute cache key once, reuse for both read and write paths
566        let cache_key = self.feature_result_cache.as_ref().map(|_| {
567            crate::interbar_cache::InterBarCacheKey::from_lookback(&lookback)
568        });
569        if let (Some(cache), Some(key)) = (&self.feature_result_cache, &cache_key) {
570            if let Some(cache_guard) = cache.try_read() {
571                if let Some(cached_features) = cache_guard.get(key) {
572                    return cached_features;
573                }
574                drop(cache_guard);
575            }
576        }
577
578        let mut features = InterBarFeatures::default();
579
580        // === Tier 1: Core Features ===
581        self.compute_tier1_features(&lookback, &mut features);
582
583        // === Issue #96 Task #99: Single-pass cache extraction ===
584        // Pre-compute all float conversions once, before any Tier 2/3 features
585        let cache = if self.config.compute_tier2 || self.config.compute_tier3 {
586            Some(crate::interbar_math::extract_lookback_cache(&lookback))
587        } else {
588            None
589        };
590
591        // === Tier 2 & 3: Dynamic Parallelization with CPU-Aware Dispatch (Issue #96 Task #189) ===
592        // Adaptive dispatch based on window size, tier complexity, and CPU availability
593        // Tier 2: Lower threshold (simpler computation, parallelization benefits earlier)
594        // Tier 3: Higher threshold (complex computation, parallelization justified for larger windows)
595        // CPU-aware: Avoid oversubscription on systems with few cores
596
597        // Issue #96 Task #189: Dynamic threshold calculation
598        // Base thresholds: Tier 2 can parallelize with fewer trades than Tier 3
599        const TIER2_PARALLEL_THRESHOLD_BASE: usize = 80;   // Tier 2 parallelizes at 80+ trades
600        const TIER3_PARALLEL_THRESHOLD_BASE: usize = 150;  // Tier 3 parallelizes at 150+ trades
601
602        // Task #18: Cache CPU count to avoid repeated syscall per bar
603        static CPU_COUNT: std::sync::OnceLock<usize> = std::sync::OnceLock::new();
604        let cpu_count = *CPU_COUNT.get_or_init(num_cpus::get);
605        let tier2_threshold = if cpu_count == 1 {
606            usize::MAX  // Never parallelize on single-core
607        } else {
608            TIER2_PARALLEL_THRESHOLD_BASE / cpu_count.max(2)
609        };
610
611        let tier3_threshold = if cpu_count == 1 {
612            usize::MAX  // Never parallelize on single-core
613        } else {
614            TIER3_PARALLEL_THRESHOLD_BASE / cpu_count.max(2)
615        };
616
617        // Dispatch Tier 2 & 3 with independent parallelization decisions
618        let tier2_can_parallelize = self.config.compute_tier2 && lookback.len() >= tier2_threshold;
619        let tier3_can_parallelize = self.config.compute_tier3 && lookback.len() >= tier3_threshold;
620
621        match (tier2_can_parallelize, tier3_can_parallelize) {
622            // Both parallelizable: use rayon join for both
623            (true, true) => {
624                let (tier2_features, tier3_features) = join(
625                    || self.compute_tier2_features(&lookback, cache.as_ref()),
626                    || self.compute_tier3_features(&lookback, cache.as_ref()),
627                );
628                features.merge_tier2(&tier2_features);
629                features.merge_tier3(&tier3_features);
630            }
631            // Only Tier 2 parallelizable: parallel Tier 2, sequential Tier 3
632            (true, false) => {
633                let tier2_features = self.compute_tier2_features(&lookback, cache.as_ref());
634                features.merge_tier2(&tier2_features);
635                if self.config.compute_tier3 {
636                    let tier3_features = self.compute_tier3_features(&lookback, cache.as_ref());
637                    features.merge_tier3(&tier3_features);
638                }
639            }
640            // Only Tier 3 parallelizable: sequential Tier 2, parallel Tier 3
641            (false, true) => {
642                if self.config.compute_tier2 {
643                    let tier2_features = self.compute_tier2_features(&lookback, cache.as_ref());
644                    features.merge_tier2(&tier2_features);
645                }
646                let tier3_features = self.compute_tier3_features(&lookback, cache.as_ref());
647                features.merge_tier3(&tier3_features);
648            }
649            // Neither parallelizable: sequential for both
650            (false, false) => {
651                if self.config.compute_tier2 {
652                    let tier2_features = self.compute_tier2_features(&lookback, cache.as_ref());
653                    features.merge_tier2(&tier2_features);
654                }
655                if self.config.compute_tier3 {
656                    let tier3_features = self.compute_tier3_features(&lookback, cache.as_ref());
657                    features.merge_tier3(&tier3_features);
658                }
659            }
660        }
661
662        // Issue #96 Task #183: Store computed features in cache with try-write
663        // Task #15: Reuse cache_key computed above (avoids duplicate from_lookback call)
664        if let (Some(cache), Some(key)) = (&self.feature_result_cache, cache_key) {
665            if let Some(cache_guard) = cache.try_write() {
666                cache_guard.insert(key, features);
667            }
668        }
669
670        features
671    }
672
673    /// Compute Tier 1 features (7 features, min 1 trade)
674    /// Issue #96 Task #85: #[inline] — called once per bar from compute_features()
675    #[inline]
676    fn compute_tier1_features(&self, lookback: &[&TradeSnapshot], features: &mut InterBarFeatures) {
677        let n = lookback.len();
678        if n == 0 {
679            return;
680        }
681
682        // Trade count
683        features.lookback_trade_count = Some(n as u32);
684
685        // Issue #96 Task #46: Merge Tier 1 feature folds into single pass
686        // Previously: 3 separate folds (buy/sell, volumes, min/max prices)
687        // Now: Single loop with 8-value accumulation - 8-15% speedup via improved cache locality
688        // Issue #96: Branchless buy/sell accumulation eliminates branch mispredictions
689        let mut buy_vol = 0.0_f64;
690        let mut sell_vol = 0.0_f64;
691        let mut buy_count = 0_u32;
692        let mut sell_count = 0_u32;
693        let mut total_turnover = 0_i128;
694        let mut total_volume_fp = 0_i128;
695        let mut low = i64::MAX;
696        let mut high = i64::MIN;
697
698        for t in lookback.iter() {
699            total_turnover += t.turnover;
700            total_volume_fp += t.volume.0 as i128;
701            low = low.min(t.price.0);
702            high = high.max(t.price.0);
703
704            // Branchless buy/sell accumulation: mask-based arithmetic
705            // is_buyer_maker=true → seller (mask=1.0), false → buyer (mask=0.0)
706            let vol = t.volume.to_f64();
707            let is_seller_mask = t.is_buyer_maker as u32 as f64;
708            sell_vol += vol * is_seller_mask;
709            buy_vol += vol * (1.0 - is_seller_mask);
710
711            let is_seller_count = t.is_buyer_maker as u32;
712            sell_count += is_seller_count;
713            buy_count += 1 - is_seller_count;
714        }
715
716        let total_vol = buy_vol + sell_vol;
717
718        // OFI: Order Flow Imbalance [-1, 1]
719        features.lookback_ofi = Some(if total_vol > f64::EPSILON {
720            (buy_vol - sell_vol) / total_vol
721        } else {
722            0.0
723        });
724
725        // Count imbalance [-1, 1]
726        let total_count = buy_count + sell_count;
727        features.lookback_count_imbalance = Some(if total_count > 0 {
728            (buy_count as f64 - sell_count as f64) / total_count as f64
729        } else {
730            0.0
731        });
732
733        // Duration
734        // Issue #96 Task #86: Direct indexing — n>0 guaranteed by early return above
735        let first_ts = lookback[0].timestamp;
736        let last_ts = lookback[n - 1].timestamp;
737        let duration_us = last_ts - first_ts;
738        features.lookback_duration_us = Some(duration_us);
739
740        // Intensity (trades per second)
741        // Issue #96: Multiply by reciprocal instead of dividing
742        let duration_sec = duration_us as f64 * 1e-6;
743        features.lookback_intensity = Some(if duration_sec > f64::EPSILON {
744            n as f64 / duration_sec
745        } else {
746            n as f64 // Instant window = all trades at once
747        });
748
749        // VWAP (Issue #88: i128 sum to prevent overflow on high-token-count symbols)
750        features.lookback_vwap = Some(if total_volume_fp > 0 {
751            let vwap_raw = total_turnover / total_volume_fp;
752            FixedPoint(vwap_raw as i64)
753        } else {
754            FixedPoint(0)
755        });
756
757        // VWAP position within range [0, 1]
758        let range = (high - low) as f64;
759        let vwap_val = features.lookback_vwap.as_ref().map(|v| v.0).unwrap_or(0);
760        features.lookback_vwap_position = Some(if range > f64::EPSILON {
761            (vwap_val - low) as f64 / range
762        } else {
763            0.5 // Flat price = middle position
764        });
765    }
766
767    /// Compute Tier 2 features (5 features, varying min trades)
768    ///
769    /// Issue #96 Task #99: Optimized with memoized float conversions.
770    /// Uses pre-computed cache passed from compute_features() to avoid
771    /// redundant float conversions across multiple feature functions.
772    /// Issue #115: Refactored to return InterBarFeatures for rayon parallelization support
773    /// Issue #96 Task #85: #[inline] — called once per bar from compute_features()
774    #[inline]
775    fn compute_tier2_features(
776        &self,
777        lookback: &[&TradeSnapshot],
778        cache: Option<&crate::interbar_math::LookbackCache>,
779    ) -> InterBarFeatures {
780        let mut features = InterBarFeatures::default();
781        let n = lookback.len();
782
783        // Issue #96 Task #187: Eliminate redundant SmallVec clone
784        // Use cache reference directly if provided, only extract on cache miss (rare)
785        // Avoids cloning ~400-2000 f64 values per tier computation
786        let cache_owned;
787        let cache = match cache {
788            Some(c) => c,  // Fast path: use reference directly (no clone)
789            None => {
790                // Slow path: extract only when not provided
791                cache_owned = crate::interbar_math::extract_lookback_cache(lookback);
792                &cache_owned
793            }
794        };
795
796        // Kyle's Lambda (min 2 trades)
797        if n >= 2 {
798            features.lookback_kyle_lambda = Some(compute_kyle_lambda(lookback));
799        }
800
801        // Burstiness (min 2 trades for inter-arrival times)
802        if n >= 2 {
803            features.lookback_burstiness = Some(compute_burstiness(lookback));
804        }
805
806        // Volume skewness (min 3 trades)
807        // Issue #96 Task #51: Use pre-computed total_volume for mean (eliminates O(n) sum pass)
808        if n >= 3 {
809            let mean_vol = cache.total_volume / n as f64;
810            let (skew, kurt) = crate::interbar_math::compute_volume_moments_with_mean(&cache.volumes, mean_vol);
811            features.lookback_volume_skew = Some(skew);
812            // Kurtosis requires 4 trades for meaningful estimate
813            if n >= 4 {
814                features.lookback_volume_kurt = Some(kurt);
815            }
816        }
817
818        // Price range (min 1 trade)
819        // Issue #96 Task #99: Use cached open (first price) and OHLC instead of conversion + fold
820        if n >= 1 {
821            let range = cache.high - cache.low;
822            features.lookback_price_range = Some(if cache.open > f64::EPSILON {
823                range / cache.open
824            } else {
825                0.0
826            });
827        }
828
829        // Issue #128: Garman-Klass volatility promoted from Tier 3 → Tier 2
830        if n >= 1 {
831            features.lookback_garman_klass_vol = Some(compute_garman_klass_with_ohlc(
832                cache.open, cache.high, cache.low, cache.close,
833            ));
834        }
835
836        features
837    }
838
839    /// Compute Tier 3 features (4 features, higher min trades)
840    ///
841    /// Issue #96 Task #77: Single-pass OHLC + prices extraction for 1.3-1.6x speedup
842    /// Compute Tier 3 features (4 features, higher min trades)
843    ///
844    /// Issue #96 Task #77: Single-pass OHLC + prices extraction for 1.3-1.6x speedup
845    /// Combines price collection with OHLC computation (eliminates double-pass)
846    /// Issue #96 Task #10: SmallVec optimization for price allocation (typical 100-500 trades)
847    /// Issue #96 Task #99: Reuses memoized float conversions from shared cache
848    /// Issue #115: Refactored to return InterBarFeatures for rayon parallelization support
849    /// Issue #96 Task #85: #[inline] — called once per bar from compute_features()
850    #[inline]
851    fn compute_tier3_features(
852        &self,
853        lookback: &[&TradeSnapshot],
854        cache: Option<&crate::interbar_math::LookbackCache>,
855    ) -> InterBarFeatures {
856        let mut features = InterBarFeatures::default();
857        let n = lookback.len();
858
859        // Issue #96 Task #187: Eliminate redundant SmallVec clone
860        // Use cache reference directly if provided, only extract on cache miss (rare)
861        // Avoids cloning ~400-2000 f64 values per tier computation
862        let cache_owned;
863        let cache = match cache {
864            Some(c) => c,  // Fast path: use reference directly (no clone)
865            None => {
866                // Slow path: extract only when not provided
867                cache_owned = crate::interbar_math::extract_lookback_cache(lookback);
868                &cache_owned
869            }
870        };
871        // Issue #110: Avoid cloning prices - all Tier 3 functions accept &[f64]
872        // Issue #128: OHLC destructure removed (Garman-Klass moved to Tier 2)
873        let prices = &cache.prices;
874
875        // Issue #96 Task #206: Early validity checks on price data
876        // Skip Tier 3 computation if price data is invalid (NaN or degenerate)
877        // Issue #96 Task #45: Use pre-computed all_prices_finite flag (O(1) vs O(n) scan)
878        if prices.is_empty() || !cache.all_prices_finite {
879            return features;  // Return default (all None) for invalid prices
880        }
881
882        // Kaufman Efficiency Ratio (min 2 trades)
883        if n >= 2 {
884            features.lookback_kaufman_er = Some(compute_kaufman_er(prices));
885        }
886
887        // Issue #128: Garman-Klass moved to compute_tier2_features()
888
889        // Issue #128: Per-feature flag resolution for Hurst and PE
890        let should_compute_hurst = self.config.should_compute_hurst();
891        let should_compute_pe = self.config.should_compute_permutation_entropy();
892
893        // Entropy: adaptive switching with caching (Issue #96 Task #7 + Task #117)
894        // - Small windows (n < 500): Permutation Entropy with caching (Issue #96 Task #117)
895        // - Large windows (n >= 500): Approximate Entropy (5-10x faster on large n)
896        // Minimum 60 trades for permutation entropy (m=3, need 10 * m! = 60)
897        // MUST compute entropy before Hurst for early-exit gating (Issue #96 Task #160)
898        // Issue #128: Compute entropy if PE enabled OR if Hurst enabled (for early-exit gating)
899        let mut entropy_value: Option<f64> = None;
900        if n >= 60 && (should_compute_pe || should_compute_hurst) {
901            // Issue #96 Task #156: Try-lock fast-path for entropy cache
902            // Attempt read-lock first to check cache without exclusive access.
903            // Fall back to write-lock only if miss to reduce lock contention overhead.
904            let entropy = if let Some(cache) = self.entropy_cache.try_read() {
905                // Fast path: Read lock acquired, check cache
906                let cache_result = crate::interbar_math::compute_entropy_adaptive_cached_readonly(
907                    prices,
908                    &cache,
909                );
910
911                if let Some(result) = cache_result {
912                    // Cache hit: return immediately without lock
913                    result
914                } else {
915                    // Cache miss: drop read lock and acquire write lock
916                    drop(cache);
917                    let mut cache_guard = self.entropy_cache.write();
918                    crate::interbar_math::compute_entropy_adaptive_cached(prices, &mut cache_guard)
919                }
920            } else {
921                // Contended: fall back to write-lock (rare, preserves correctness)
922                let mut cache_guard = self.entropy_cache.write();
923                crate::interbar_math::compute_entropy_adaptive_cached(prices, &mut cache_guard)
924            };
925
926            entropy_value = Some(entropy);
927            // Issue #128: Only write PE feature if PE is enabled
928            if should_compute_pe {
929                features.lookback_permutation_entropy = Some(entropy);
930            }
931        }
932
933        // Issue #96 Task #160: Hurst early-exit via entropy threshold
934        // High-entropy sequences (random walks) inherently have Hurst ≈ 0.5
935        // Early-exit logic: if entropy > 0.75 (high randomness), skip expensive computation
936        // Performance: 30-40% bars skipped in ranging markets (2-4% speedup)
937        // Issue #128: Only compute Hurst if enabled via per-feature flag
938        if n >= 64 && should_compute_hurst {
939            // Check if entropy is available and indicates high randomness (near random walk)
940            let should_skip_hurst = entropy_value.is_some_and(|e| e > 0.75);
941
942            if should_skip_hurst {
943                // High entropy indicates random walk behavior → Hurst ≈ 0.5
944                // Skipping expensive DFA computation saves ~1-2 µs per bar
945                features.lookback_hurst = Some(0.5);
946            } else {
947                // Low/medium entropy indicates order or mean-reversion → compute Hurst
948                features.lookback_hurst = Some(compute_hurst_dfa(prices));
949            }
950        }
951
952        features
953    }
954
955    /// Reset bar boundary tracking (Issue #81)
956    ///
957    /// Called at ouroboros boundaries. Clears bar close indices but preserves
958    /// trade history — trades are still valid lookback data for the first
959    /// bar of the new segment.
960    pub fn reset_bar_boundaries(&mut self) {
961        self.bar_close_indices.clear();
962    }
963
964    /// Clear the trade history (e.g., at ouroboros boundary)
965    pub fn clear(&mut self) {
966        self.trades.clear();
967    }
968
969    /// Get current number of trades in buffer
970    pub fn len(&self) -> usize {
971        self.trades.len()
972    }
973
974    /// Check if buffer is empty
975    pub fn is_empty(&self) -> bool {
976        self.trades.is_empty()
977    }
978}
979
980#[cfg(test)]
981mod tests {
982    use super::*;
983
984    // Helper to create test trades
985    fn create_test_snapshot(
986        timestamp: i64,
987        price: f64,
988        volume: f64,
989        is_buyer_maker: bool,
990    ) -> TradeSnapshot {
991        let price_fp = FixedPoint((price * 1e8) as i64);
992        let volume_fp = FixedPoint((volume * 1e8) as i64);
993        TradeSnapshot {
994            timestamp,
995            price: price_fp,
996            volume: volume_fp,
997            is_buyer_maker,
998            turnover: (price_fp.0 as i128) * (volume_fp.0 as i128),
999        }
1000    }
1001
1002    // ========== OFI Tests ==========
1003
1004    #[test]
1005    fn test_ofi_all_buys() {
1006        let mut history = TradeHistory::new(InterBarConfig::default());
1007
1008        // Add buy trades (is_buyer_maker = false = buy pressure)
1009        for i in 0..10 {
1010            let trade = AggTrade {
1011                agg_trade_id: i,
1012                price: FixedPoint(5000000000000), // 50000
1013                volume: FixedPoint(100000000),    // 1.0
1014                first_trade_id: i,
1015                last_trade_id: i,
1016                timestamp: i * 1000,
1017                is_buyer_maker: false, // Buy
1018                is_best_match: None,
1019            };
1020            history.push(&trade);
1021        }
1022
1023        let features = history.compute_features(10000);
1024
1025        assert!(
1026            (features.lookback_ofi.unwrap() - 1.0).abs() < f64::EPSILON,
1027            "OFI should be 1.0 for all buys, got {}",
1028            features.lookback_ofi.unwrap()
1029        );
1030    }
1031
1032    #[test]
1033    fn test_ofi_all_sells() {
1034        let mut history = TradeHistory::new(InterBarConfig::default());
1035
1036        // Add sell trades (is_buyer_maker = true = sell pressure)
1037        for i in 0..10 {
1038            let trade = AggTrade {
1039                agg_trade_id: i,
1040                price: FixedPoint(5000000000000),
1041                volume: FixedPoint(100000000),
1042                first_trade_id: i,
1043                last_trade_id: i,
1044                timestamp: i * 1000,
1045                is_buyer_maker: true, // Sell
1046                is_best_match: None,
1047            };
1048            history.push(&trade);
1049        }
1050
1051        let features = history.compute_features(10000);
1052
1053        assert!(
1054            (features.lookback_ofi.unwrap() - (-1.0)).abs() < f64::EPSILON,
1055            "OFI should be -1.0 for all sells, got {}",
1056            features.lookback_ofi.unwrap()
1057        );
1058    }
1059
1060    #[test]
1061    fn test_ofi_balanced() {
1062        let mut history = TradeHistory::new(InterBarConfig::default());
1063
1064        // Add equal buy and sell volumes
1065        for i in 0..10 {
1066            let trade = AggTrade {
1067                agg_trade_id: i,
1068                price: FixedPoint(5000000000000),
1069                volume: FixedPoint(100000000),
1070                first_trade_id: i,
1071                last_trade_id: i,
1072                timestamp: i * 1000,
1073                is_buyer_maker: i % 2 == 0, // Alternating
1074                is_best_match: None,
1075            };
1076            history.push(&trade);
1077        }
1078
1079        let features = history.compute_features(10000);
1080
1081        assert!(
1082            features.lookback_ofi.unwrap().abs() < f64::EPSILON,
1083            "OFI should be 0.0 for balanced volumes, got {}",
1084            features.lookback_ofi.unwrap()
1085        );
1086    }
1087
1088    // ========== Burstiness Tests ==========
1089
1090    #[test]
1091    fn test_burstiness_regular_intervals() {
1092        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1093        let t1 = create_test_snapshot(1000, 100.0, 1.0, false);
1094        let t2 = create_test_snapshot(2000, 100.0, 1.0, false);
1095        let t3 = create_test_snapshot(3000, 100.0, 1.0, false);
1096        let t4 = create_test_snapshot(4000, 100.0, 1.0, false);
1097        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2, &t3, &t4];
1098
1099        let b = compute_burstiness(&lookback);
1100
1101        // Perfectly regular: sigma = 0 -> B = -1
1102        assert!(
1103            (b - (-1.0)).abs() < 0.01,
1104            "Burstiness should be -1 for regular intervals, got {}",
1105            b
1106        );
1107    }
1108
1109    // ========== Kaufman ER Tests ==========
1110
1111    #[test]
1112    fn test_kaufman_er_perfect_trend() {
1113        let prices = vec![100.0, 101.0, 102.0, 103.0, 104.0];
1114        let er = compute_kaufman_er(&prices);
1115
1116        assert!(
1117            (er - 1.0).abs() < f64::EPSILON,
1118            "Kaufman ER should be 1.0 for perfect trend, got {}",
1119            er
1120        );
1121    }
1122
1123    #[test]
1124    fn test_kaufman_er_round_trip() {
1125        let prices = vec![100.0, 102.0, 104.0, 102.0, 100.0];
1126        let er = compute_kaufman_er(&prices);
1127
1128        assert!(
1129            er.abs() < f64::EPSILON,
1130            "Kaufman ER should be 0.0 for round trip, got {}",
1131            er
1132        );
1133    }
1134
1135    // ========== Permutation Entropy Tests ==========
1136
1137    #[test]
1138    fn test_permutation_entropy_monotonic() {
1139        // Strictly increasing: only pattern 012 appears -> H = 0
1140        let prices: Vec<f64> = (1..=100).map(|i| i as f64).collect();
1141        let pe = compute_permutation_entropy(&prices);
1142
1143        assert!(
1144            pe.abs() < f64::EPSILON,
1145            "PE should be 0 for monotonic, got {}",
1146            pe
1147        );
1148    }
1149
1150    // ========== Temporal Integrity Tests ==========
1151
1152    #[test]
1153    fn test_lookback_excludes_current_bar_trades() {
1154        let mut history = TradeHistory::new(InterBarConfig::default());
1155
1156        // Add trades at timestamps 0, 1000, 2000, 3000
1157        for i in 0..4 {
1158            let trade = AggTrade {
1159                agg_trade_id: i,
1160                price: FixedPoint(5000000000000),
1161                volume: FixedPoint(100000000),
1162                first_trade_id: i,
1163                last_trade_id: i,
1164                timestamp: i * 1000,
1165                is_buyer_maker: false,
1166                is_best_match: None,
1167            };
1168            history.push(&trade);
1169        }
1170
1171        // Get lookback for bar opening at timestamp 2000
1172        let lookback = history.get_lookback_trades(2000);
1173
1174        // Should only include trades with timestamp < 2000 (i.e., 0 and 1000)
1175        assert_eq!(lookback.len(), 2, "Should have 2 trades before bar open");
1176
1177        for trade in &lookback {
1178            assert!(
1179                trade.timestamp < 2000,
1180                "Trade at {} should be before bar open at 2000",
1181                trade.timestamp
1182            );
1183        }
1184    }
1185
1186    // ========== Bounded Output Tests ==========
1187
1188    #[test]
1189    fn test_count_imbalance_bounded() {
1190        let mut history = TradeHistory::new(InterBarConfig::default());
1191
1192        // Add random mix of buys and sells
1193        for i in 0..100 {
1194            let trade = AggTrade {
1195                agg_trade_id: i,
1196                price: FixedPoint(5000000000000),
1197                volume: FixedPoint((i % 10 + 1) * 100000000),
1198                first_trade_id: i,
1199                last_trade_id: i,
1200                timestamp: i * 1000,
1201                is_buyer_maker: i % 3 == 0,
1202                is_best_match: None,
1203            };
1204            history.push(&trade);
1205        }
1206
1207        let features = history.compute_features(100000);
1208        let imb = features.lookback_count_imbalance.unwrap();
1209
1210        assert!(
1211            imb >= -1.0 && imb <= 1.0,
1212            "Count imbalance should be in [-1, 1], got {}",
1213            imb
1214        );
1215    }
1216
1217    #[test]
1218    fn test_vwap_position_bounded() {
1219        let mut history = TradeHistory::new(InterBarConfig::default());
1220
1221        // Add trades at varying prices
1222        for i in 0..20 {
1223            let price = 50000.0 + (i as f64 * 10.0);
1224            let trade = AggTrade {
1225                agg_trade_id: i,
1226                price: FixedPoint((price * 1e8) as i64),
1227                volume: FixedPoint(100000000),
1228                first_trade_id: i,
1229                last_trade_id: i,
1230                timestamp: i * 1000,
1231                is_buyer_maker: false,
1232                is_best_match: None,
1233            };
1234            history.push(&trade);
1235        }
1236
1237        let features = history.compute_features(20000);
1238        let pos = features.lookback_vwap_position.unwrap();
1239
1240        assert!(
1241            pos >= 0.0 && pos <= 1.0,
1242            "VWAP position should be in [0, 1], got {}",
1243            pos
1244        );
1245    }
1246
1247    #[test]
1248    fn test_hurst_soft_clamp_bounded() {
1249        // Test with extreme input values
1250        // Note: tanh approaches 0 and 1 asymptotically, so we use >= and <=
1251        for raw_h in [-10.0, -1.0, 0.0, 0.5, 1.0, 2.0, 10.0] {
1252            let clamped = soft_clamp_hurst(raw_h);
1253            assert!(
1254                clamped >= 0.0 && clamped <= 1.0,
1255                "Hurst {} soft-clamped to {} should be in [0, 1]",
1256                raw_h,
1257                clamped
1258            );
1259        }
1260
1261        // Verify 0.5 maps to 0.5 exactly
1262        let h_half = soft_clamp_hurst(0.5);
1263        assert!(
1264            (h_half - 0.5).abs() < f64::EPSILON,
1265            "Hurst 0.5 should map to 0.5, got {}",
1266            h_half
1267        );
1268    }
1269
1270    // ========== Edge Case Tests ==========
1271
1272    #[test]
1273    fn test_empty_lookback() {
1274        let history = TradeHistory::new(InterBarConfig::default());
1275        let features = history.compute_features(1000);
1276
1277        assert!(
1278            features.lookback_trade_count.is_none() || features.lookback_trade_count == Some(0)
1279        );
1280    }
1281
1282    #[test]
1283    fn test_single_trade_lookback() {
1284        let mut history = TradeHistory::new(InterBarConfig::default());
1285
1286        let trade = AggTrade {
1287            agg_trade_id: 0,
1288            price: FixedPoint(5000000000000),
1289            volume: FixedPoint(100000000),
1290            first_trade_id: 0,
1291            last_trade_id: 0,
1292            timestamp: 0,
1293            is_buyer_maker: false,
1294            is_best_match: None,
1295        };
1296        history.push(&trade);
1297
1298        let features = history.compute_features(1000);
1299
1300        assert_eq!(features.lookback_trade_count, Some(1));
1301        assert_eq!(features.lookback_duration_us, Some(0)); // Single trade = 0 duration
1302    }
1303
1304    #[test]
1305    fn test_kyle_lambda_zero_imbalance() {
1306        // Equal buy/sell -> imbalance = 0 -> should return 0, not infinity
1307        let t0 = create_test_snapshot(0, 100.0, 1.0, false); // buy
1308        let t1 = create_test_snapshot(1000, 102.0, 1.0, true); // sell
1309        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1];
1310
1311        let lambda = compute_kyle_lambda(&lookback);
1312
1313        assert!(
1314            lambda.is_finite(),
1315            "Kyle lambda should be finite, got {}",
1316            lambda
1317        );
1318        assert!(
1319            lambda.abs() < f64::EPSILON,
1320            "Kyle lambda should be 0 for zero imbalance"
1321        );
1322    }
1323
1324    // ========== Tier 2 Features: Comprehensive Edge Cases (Issue #96 Task #43) ==========
1325
1326    #[test]
1327    fn test_kyle_lambda_strong_buy_pressure() {
1328        // Strong buy pressure: many buys, few sells -> positive lambda
1329        let trades: Vec<TradeSnapshot> = (0..5)
1330            .map(|i| create_test_snapshot(i * 1000, 100.0 + i as f64, 1.0, false))
1331            .chain((5..7).map(|i| create_test_snapshot(i * 1000, 100.0 + i as f64, 1.0, true)))
1332            .collect();
1333        let lookback: Vec<&TradeSnapshot> = trades.iter().collect();
1334
1335        let lambda = compute_kyle_lambda(&lookback);
1336        assert!(lambda > 0.0, "Buy pressure should yield positive lambda, got {}", lambda);
1337        assert!(lambda.is_finite(), "Kyle lambda should be finite");
1338    }
1339
1340    #[test]
1341    fn test_kyle_lambda_strong_sell_pressure() {
1342        // Strong sell pressure: many sell orders (is_buyer_maker=true) at declining prices
1343        let t0 = create_test_snapshot(0, 100.0, 1.0, false);    // buy
1344        let t1 = create_test_snapshot(1000, 99.9, 5.0, true);   // sell (larger)
1345        let t2 = create_test_snapshot(2000, 99.8, 5.0, true);   // sell (larger)
1346        let t3 = create_test_snapshot(3000, 99.7, 5.0, true);   // sell (larger)
1347        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2, &t3];
1348
1349        let lambda = compute_kyle_lambda(&lookback);
1350        assert!(lambda.is_finite(), "Kyle lambda should be finite");
1351        // With sell volume > buy volume and price declining, lambda should be negative
1352    }
1353
1354    #[test]
1355    fn test_burstiness_single_trade() {
1356        // Single trade: no inter-arrivals, should return default
1357        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1358        let lookback: Vec<&TradeSnapshot> = vec![&t0];
1359
1360        let b = compute_burstiness(&lookback);
1361        assert!(
1362            b.is_finite(),
1363            "Burstiness with single trade should be finite, got {}",
1364            b
1365        );
1366    }
1367
1368    #[test]
1369    fn test_burstiness_two_trades() {
1370        // Two trades: insufficient data, sigma = 0 -> B = -1
1371        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1372        let t1 = create_test_snapshot(1000, 100.0, 1.0, false);
1373        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1];
1374
1375        let b = compute_burstiness(&lookback);
1376        assert!(
1377            (b - (-1.0)).abs() < 0.01,
1378            "Burstiness with uniform inter-arrivals should be -1, got {}",
1379            b
1380        );
1381    }
1382
1383    #[test]
1384    fn test_burstiness_bursty_arrivals() {
1385        // Uneven inter-arrivals: clusters of fast then slow arrivals
1386        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1387        let t1 = create_test_snapshot(100, 100.0, 1.0, false);
1388        let t2 = create_test_snapshot(200, 100.0, 1.0, false);
1389        let t3 = create_test_snapshot(5000, 100.0, 1.0, false);
1390        let t4 = create_test_snapshot(10000, 100.0, 1.0, false);
1391        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2, &t3, &t4];
1392
1393        let b = compute_burstiness(&lookback);
1394        assert!(
1395            b > -1.0 && b <= 1.0,
1396            "Burstiness should be bounded [-1, 1], got {}",
1397            b
1398        );
1399    }
1400
1401    #[test]
1402    fn test_volume_skew_right_skewed() {
1403        // Right-skewed distribution (many small, few large volumes)
1404        let t0 = create_test_snapshot(0, 100.0, 0.1, false);
1405        let t1 = create_test_snapshot(1000, 100.0, 0.1, false);
1406        let t2 = create_test_snapshot(2000, 100.0, 0.1, false);
1407        let t3 = create_test_snapshot(3000, 100.0, 0.1, false);
1408        let t4 = create_test_snapshot(4000, 100.0, 10.0, false); // Large outlier
1409        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2, &t3, &t4];
1410
1411        let skew = compute_volume_moments(&lookback).0;
1412        assert!(skew > 0.0, "Right-skewed volume should have positive skewness, got {}", skew);
1413        assert!(skew.is_finite(), "Skewness must be finite");
1414    }
1415
1416    #[test]
1417    fn test_volume_kurtosis_heavy_tails() {
1418        // Heavy-tailed distribution (few very large, few very small, middle is sparse)
1419        let t0 = create_test_snapshot(0, 100.0, 0.01, false);
1420        let t1 = create_test_snapshot(1000, 100.0, 1.0, false);
1421        let t2 = create_test_snapshot(2000, 100.0, 1.0, false);
1422        let t3 = create_test_snapshot(3000, 100.0, 1.0, false);
1423        let t4 = create_test_snapshot(4000, 100.0, 100.0, false);
1424        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2, &t3, &t4];
1425
1426        let kurtosis = compute_volume_moments(&lookback).1;
1427        assert!(kurtosis > 0.0, "Heavy-tailed distribution should have positive kurtosis, got {}", kurtosis);
1428        assert!(kurtosis.is_finite(), "Kurtosis must be finite");
1429    }
1430
1431    #[test]
1432    fn test_volume_skew_symmetric() {
1433        // Symmetric distribution (equal volumes) -> skewness = 0
1434        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1435        let t1 = create_test_snapshot(1000, 100.0, 1.0, false);
1436        let t2 = create_test_snapshot(2000, 100.0, 1.0, false);
1437        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2];
1438
1439        let skew = compute_volume_moments(&lookback).0;
1440        assert!(
1441            skew.abs() < f64::EPSILON,
1442            "Symmetric volume distribution should have near-zero skewness, got {}",
1443            skew
1444        );
1445    }
1446
1447    #[test]
1448    fn test_kyle_lambda_price_unchanged() {
1449        // Price doesn't move but there's imbalance -> should be finite
1450        let t0 = create_test_snapshot(0, 100.0, 1.0, false);
1451        let t1 = create_test_snapshot(1000, 100.0, 1.0, false);
1452        let t2 = create_test_snapshot(2000, 100.0, 1.0, false);
1453        let lookback: Vec<&TradeSnapshot> = vec![&t0, &t1, &t2];
1454
1455        let lambda = compute_kyle_lambda(&lookback);
1456        assert!(
1457            lambda.is_finite(),
1458            "Kyle lambda should be finite even with no price change, got {}",
1459            lambda
1460        );
1461    }
1462
1463    // ========== BarRelative Mode Tests (Issue #81) ==========
1464
1465    /// Helper to create a test AggTrade
1466    fn make_trade(id: i64, timestamp: i64) -> AggTrade {
1467        AggTrade {
1468            agg_trade_id: id,
1469            price: FixedPoint(5000000000000), // 50000
1470            volume: FixedPoint(100000000),    // 1.0
1471            first_trade_id: id,
1472            last_trade_id: id,
1473            timestamp,
1474            is_buyer_maker: false,
1475            is_best_match: None,
1476        }
1477    }
1478
1479    #[test]
1480    fn test_bar_relative_bootstrap_keeps_all_trades() {
1481        // Before any bars close, BarRelative should keep all trades
1482        let config = InterBarConfig {
1483            lookback_mode: LookbackMode::BarRelative(3),
1484            compute_tier2: false,
1485            compute_tier3: false,
1486            ..Default::default()
1487        };
1488        let mut history = TradeHistory::new(config);
1489
1490        // Push 100 trades without closing any bar
1491        for i in 0..100 {
1492            history.push(&make_trade(i, i * 1000));
1493        }
1494
1495        assert_eq!(history.len(), 100, "Bootstrap phase should keep all trades");
1496    }
1497
1498    #[test]
1499    fn test_bar_relative_prunes_after_bar_close() {
1500        let config = InterBarConfig {
1501            lookback_mode: LookbackMode::BarRelative(2),
1502            compute_tier2: false,
1503            compute_tier3: false,
1504            ..Default::default()
1505        };
1506        let mut history = TradeHistory::new(config);
1507
1508        // Bar 1: 10 trades (timestamps 0-9000)
1509        for i in 0..10 {
1510            history.push(&make_trade(i, i * 1000));
1511        }
1512        history.on_bar_close(); // total_pushed = 10
1513
1514        // Bar 2: 20 trades (timestamps 10000-29000)
1515        for i in 10..30 {
1516            history.push(&make_trade(i, i * 1000));
1517        }
1518        history.on_bar_close(); // total_pushed = 30
1519
1520        // Bar 3: 5 trades (timestamps 30000-34000)
1521        for i in 30..35 {
1522            history.push(&make_trade(i, i * 1000));
1523        }
1524        history.on_bar_close(); // total_pushed = 35
1525
1526        // With BarRelative(2), after 3 bar closes we keep trades from last 2 bars:
1527        // bar_close_indices = [10, 30, 35] -> keep last 2 -> from index 10 to 35 = 25 trades
1528        // But bar 1 trades (0-9) should be pruned, keeping bars 2+3 = 25 trades + bar 3's 5
1529        // Actually: bar_close_indices keeps n+1=3 boundaries: [10, 30, 35]
1530        // Oldest boundary at [len-n_bars] = [3-2] = index 1 = 30
1531        // keep_count = total_pushed(35) - 30 = 5
1532        // But wait -- we also have current in-progress trades.
1533        // After bar 3 closes with 35 total, and no more pushes:
1534        // trades.len() should be <= keep_count from the prune in on_bar_close
1535        // The prune happens on each push, and on_bar_close records boundary then
1536        // next push triggers prune.
1537
1538        // Push one more trade to trigger prune with new boundary
1539        history.push(&make_trade(35, 35000));
1540
1541        // Issue #96 Task #155: max_safe_capacity for BarRelative = 2000.
1542        // With only 36 trades total, prune_if_needed() never fires (36 < 2000).
1543        // All trades are preserved — this is correct capacity-based behavior.
1544        assert_eq!(
1545            history.len(),
1546            36,
1547            "All trades preserved below max_safe_capacity (2000), got {}",
1548            history.len()
1549        );
1550    }
1551
1552    #[test]
1553    fn test_bar_relative_mixed_bar_sizes() {
1554        let config = InterBarConfig {
1555            lookback_mode: LookbackMode::BarRelative(2),
1556            compute_tier2: false,
1557            compute_tier3: false,
1558            ..Default::default()
1559        };
1560        let mut history = TradeHistory::new(config);
1561
1562        // Bar 1: 5 trades
1563        for i in 0..5 {
1564            history.push(&make_trade(i, i * 1000));
1565        }
1566        history.on_bar_close();
1567
1568        // Bar 2: 50 trades
1569        for i in 5..55 {
1570            history.push(&make_trade(i, i * 1000));
1571        }
1572        history.on_bar_close();
1573
1574        // Bar 3: 3 trades
1575        for i in 55..58 {
1576            history.push(&make_trade(i, i * 1000));
1577        }
1578        history.on_bar_close();
1579
1580        // Push one more to trigger prune
1581        history.push(&make_trade(58, 58000));
1582
1583        // Issue #96 Task #155: max_safe_capacity for BarRelative = 2000.
1584        // With only 59 trades total, prune_if_needed() never fires (59 < 2000).
1585        // All trades are preserved — this is correct capacity-based behavior.
1586        assert_eq!(
1587            history.len(),
1588            59,
1589            "All trades preserved below max_safe_capacity (2000), got {}",
1590            history.len()
1591        );
1592    }
1593
1594    #[test]
1595    fn test_bar_relative_lookback_features_computed() {
1596        let config = InterBarConfig {
1597            lookback_mode: LookbackMode::BarRelative(3),
1598            compute_tier2: false,
1599            compute_tier3: false,
1600            ..Default::default()
1601        };
1602        let mut history = TradeHistory::new(config);
1603
1604        // Push 20 trades (timestamps 0-19000)
1605        for i in 0..20 {
1606            let price = 50000.0 + (i as f64 * 10.0);
1607            let trade = AggTrade {
1608                agg_trade_id: i,
1609                price: FixedPoint((price * 1e8) as i64),
1610                volume: FixedPoint(100000000),
1611                first_trade_id: i,
1612                last_trade_id: i,
1613                timestamp: i * 1000,
1614                is_buyer_maker: i % 2 == 0,
1615                is_best_match: None,
1616            };
1617            history.push(&trade);
1618        }
1619        // Close bar 1 at total_pushed=20
1620        history.on_bar_close();
1621
1622        // Simulate bar 2 opening at timestamp 20000
1623        history.on_bar_open(20000);
1624
1625        // Compute features for bar 2 -- should use trades before 20000
1626        let features = history.compute_features(20000);
1627
1628        // All 20 trades are before bar open, should have lookback features
1629        assert_eq!(features.lookback_trade_count, Some(20));
1630        assert!(features.lookback_ofi.is_some());
1631        assert!(features.lookback_intensity.is_some());
1632    }
1633
1634    #[test]
1635    fn test_bar_relative_reset_bar_boundaries() {
1636        let config = InterBarConfig {
1637            lookback_mode: LookbackMode::BarRelative(2),
1638            compute_tier2: false,
1639            compute_tier3: false,
1640            ..Default::default()
1641        };
1642        let mut history = TradeHistory::new(config);
1643
1644        // Push trades and close a bar
1645        for i in 0..10 {
1646            history.push(&make_trade(i, i * 1000));
1647        }
1648        history.on_bar_close();
1649
1650        assert_eq!(history.bar_close_indices.len(), 1);
1651
1652        // Reset boundaries (ouroboros)
1653        history.reset_bar_boundaries();
1654
1655        assert!(
1656            history.bar_close_indices.is_empty(),
1657            "bar_close_indices should be empty after reset"
1658        );
1659        // Trades should still be there
1660        assert_eq!(
1661            history.len(),
1662            10,
1663            "Trades should persist after boundary reset"
1664        );
1665    }
1666
1667    #[test]
1668    fn test_bar_relative_on_bar_close_limits_indices() {
1669        let config = InterBarConfig {
1670            lookback_mode: LookbackMode::BarRelative(2),
1671            compute_tier2: false,
1672            compute_tier3: false,
1673            ..Default::default()
1674        };
1675        let mut history = TradeHistory::new(config);
1676
1677        // Close 5 bars
1678        for bar_num in 0..5 {
1679            for i in 0..5 {
1680                history.push(&make_trade(bar_num * 5 + i, (bar_num * 5 + i) * 1000));
1681            }
1682            history.on_bar_close();
1683        }
1684
1685        // With BarRelative(2), should keep at most n+1=3 boundaries
1686        assert!(
1687            history.bar_close_indices.len() <= 3,
1688            "Should keep at most n+1 boundaries, got {}",
1689            history.bar_close_indices.len()
1690        );
1691    }
1692
1693    #[test]
1694    fn test_bar_relative_does_not_affect_fixed_count() {
1695        // Verify FixedCount mode is unaffected by BarRelative changes
1696        let config = InterBarConfig {
1697            lookback_mode: LookbackMode::FixedCount(10),
1698            compute_tier2: false,
1699            compute_tier3: false,
1700            ..Default::default()
1701        };
1702        let mut history = TradeHistory::new(config);
1703
1704        for i in 0..30 {
1705            history.push(&make_trade(i, i * 1000));
1706        }
1707        // on_bar_close should be no-op for FixedCount
1708        history.on_bar_close();
1709
1710        // FixedCount(10) keeps 2*10=20 max
1711        assert!(
1712            history.len() <= 20,
1713            "FixedCount(10) should keep at most 20 trades, got {}",
1714            history.len()
1715        );
1716        assert!(
1717            history.bar_close_indices.is_empty(),
1718            "FixedCount should not track bar boundaries"
1719        );
1720    }
1721
1722    // === Memory efficiency tests (R5) ===
1723
1724    #[test]
1725    fn test_volume_moments_numerical_accuracy() {
1726        // R5: Verify 2-pass fold produces identical results to previous 4-pass.
1727        // Symmetric distribution [1,2,3,4,5] → skewness ≈ 0
1728        let price_fp = FixedPoint((100.0 * 1e8) as i64);
1729        let snapshots: Vec<TradeSnapshot> = (1..=5_i64)
1730            .map(|v| {
1731                let volume_fp = FixedPoint((v as f64 * 1e8) as i64);
1732                TradeSnapshot {
1733                    price: price_fp,
1734                    volume: volume_fp,
1735                    timestamp: v * 1000,
1736                    is_buyer_maker: false,
1737                    turnover: price_fp.0 as i128 * volume_fp.0 as i128,
1738                }
1739            })
1740            .collect();
1741        let refs: Vec<&TradeSnapshot> = snapshots.iter().collect();
1742        let (skew, kurt) = compute_volume_moments(&refs);
1743
1744        // Symmetric uniform-like distribution: skewness should be 0
1745        assert!(
1746            skew.abs() < 1e-10,
1747            "Symmetric distribution should have skewness ≈ 0, got {skew}"
1748        );
1749        // Uniform distribution excess kurtosis = -1.3
1750        assert!(
1751            (kurt - (-1.3)).abs() < 0.1,
1752            "Uniform-like kurtosis should be ≈ -1.3, got {kurt}"
1753        );
1754    }
1755
1756    #[test]
1757    fn test_volume_moments_edge_cases() {
1758        let price_fp = FixedPoint((100.0 * 1e8) as i64);
1759
1760        // n < 3 returns (0, 0)
1761        let v1 = FixedPoint((1.0 * 1e8) as i64);
1762        let v2 = FixedPoint((2.0 * 1e8) as i64);
1763        let s1 = TradeSnapshot {
1764            price: price_fp,
1765            volume: v1,
1766            timestamp: 1000,
1767            is_buyer_maker: false,
1768            turnover: price_fp.0 as i128 * v1.0 as i128,
1769        };
1770        let s2 = TradeSnapshot {
1771            price: price_fp,
1772            volume: v2,
1773            timestamp: 2000,
1774            is_buyer_maker: false,
1775            turnover: price_fp.0 as i128 * v2.0 as i128,
1776        };
1777        let refs: Vec<&TradeSnapshot> = vec![&s1, &s2];
1778        let (skew, kurt) = compute_volume_moments(&refs);
1779        assert_eq!(skew, 0.0, "n < 3 should return 0");
1780        assert_eq!(kurt, 0.0, "n < 3 should return 0");
1781
1782        // All same volume returns (0, 0)
1783        let vol = FixedPoint((5.0 * 1e8) as i64);
1784        let same: Vec<TradeSnapshot> = (0..10_i64)
1785            .map(|i| TradeSnapshot {
1786                price: price_fp,
1787                volume: vol,
1788                timestamp: i * 1000,
1789                is_buyer_maker: false,
1790                turnover: price_fp.0 as i128 * vol.0 as i128,
1791            })
1792            .collect();
1793        let refs: Vec<&TradeSnapshot> = same.iter().collect();
1794        let (skew, kurt) = compute_volume_moments(&refs);
1795        assert_eq!(skew, 0.0, "All same volume should return 0");
1796        assert_eq!(kurt, 0.0, "All same volume should return 0");
1797    }
1798
1799    // ========== Optimization Regression Tests (Task #115-119) ==========
1800
1801    #[test]
1802    fn test_optimization_edge_case_zero_trades() {
1803        // Task #115-119: Verify optimizations handle edge case of zero trades gracefully
1804        let history = TradeHistory::new(InterBarConfig::default());
1805
1806        // Try to compute features with no trades
1807        let features = history.compute_features(1000);
1808
1809        // All features should be None for empty lookback
1810        assert!(features.lookback_ofi.is_none());
1811        assert!(features.lookback_kyle_lambda.is_none());
1812        assert!(features.lookback_hurst.is_none());
1813    }
1814
1815    #[test]
1816    fn test_optimization_edge_case_large_lookback() {
1817        // Task #118/119: Verify optimizations handle large lookback windows correctly
1818        // Tests VecDeque capacity optimization and SmallVec trade accumulation
1819        let config = InterBarConfig {
1820            lookback_mode: LookbackMode::FixedCount(500),
1821            ..Default::default()
1822        };
1823        let mut history = TradeHistory::new(config);
1824
1825        // Add 600 trades (exceeds 500-trade lookback)
1826        for i in 0..600_i64 {
1827            let snapshot = create_test_snapshot(i * 1000, 100.0, 10.0, i % 2 == 0);
1828            history.push(&AggTrade {
1829                agg_trade_id: i,
1830                price: snapshot.price,
1831                volume: snapshot.volume,
1832                first_trade_id: i,
1833                last_trade_id: i,
1834                timestamp: snapshot.timestamp,
1835                is_buyer_maker: snapshot.is_buyer_maker,
1836                is_best_match: Some(false),
1837            });
1838        }
1839
1840        // Verify that pruning maintains correct lookback window
1841        let lookback = history.get_lookback_trades(599000);
1842        assert!(
1843            lookback.len() <= 600, // Should be around 500, maybe a bit more
1844            "Lookback should be <= 600 trades, got {}", lookback.len()
1845        );
1846
1847        // Compute features - this exercises the optimizations
1848        let features = history.compute_features(599000);
1849
1850        // Tier 1 features should be present
1851        assert!(features.lookback_trade_count.is_some(), "Trade count should be computed");
1852        assert!(features.lookback_ofi.is_some(), "OFI should be computed");
1853    }
1854
1855    #[test]
1856    fn test_optimization_edge_case_single_trade() {
1857        // Task #115-119: Verify optimizations handle single-trade edge case
1858        let mut history = TradeHistory::new(InterBarConfig::default());
1859
1860        let snapshot = create_test_snapshot(1000, 100.0, 10.0, false);
1861        history.push(&AggTrade {
1862            agg_trade_id: 1,
1863            price: snapshot.price,
1864            volume: snapshot.volume,
1865            first_trade_id: 1,
1866            last_trade_id: 1,
1867            timestamp: snapshot.timestamp,
1868            is_buyer_maker: snapshot.is_buyer_maker,
1869            is_best_match: Some(false),
1870        });
1871
1872        let features = history.compute_features(2000);
1873
1874        // Tier 1 should compute (only 1 trade needed)
1875        assert!(features.lookback_trade_count.is_some());
1876        // Tier 3 definitely not (needs >= 60 for Hurst/Entropy)
1877        assert!(features.lookback_hurst.is_none());
1878    }
1879
1880    #[test]
1881    fn test_optimization_many_trades() {
1882        // Task #119: Verify SmallVec handles typical bar trade counts (100-500)
1883        let mut history = TradeHistory::new(InterBarConfig::default());
1884
1885        // Add 300 trades
1886        for i in 0..300_i64 {
1887            let snapshot = create_test_snapshot(
1888                i * 1000,
1889                100.0 + (i as f64 % 10.0),
1890                10.0 + (i as f64 % 5.0),
1891                i % 2 == 0,
1892            );
1893            history.push(&AggTrade {
1894                agg_trade_id: i,
1895                price: snapshot.price,
1896                volume: snapshot.volume,
1897                first_trade_id: i,
1898                last_trade_id: i,
1899                timestamp: snapshot.timestamp,
1900                is_buyer_maker: snapshot.is_buyer_maker,
1901                is_best_match: Some(false),
1902            });
1903        }
1904
1905        // Get lookback trades
1906        let lookback = history.get_lookback_trades(299000);
1907
1908        // Compute features with both tiers enabled (Task #115: rayon parallelization)
1909        let features = history.compute_features(299000);
1910
1911        // Verify Tier 2 features are present
1912        assert!(features.lookback_kyle_lambda.is_some(), "Kyle lambda should be computed");
1913        assert!(features.lookback_burstiness.is_some(), "Burstiness should be computed");
1914
1915        // Verify Tier 3 features are present (only if n >= 60)
1916        if lookback.len() >= 60 {
1917            assert!(features.lookback_hurst.is_some(), "Hurst should be computed");
1918            assert!(features.lookback_permutation_entropy.is_some(), "Entropy should be computed");
1919        }
1920    }
1921
1922    #[test]
1923    fn test_trade_history_with_external_cache() {
1924        // Issue #145 Phase 2: Test that TradeHistory accepts optional external cache
1925        use crate::entropy_cache_global::get_global_entropy_cache;
1926
1927        // Test 1: Local cache (backward compatible)
1928        let _local_history = TradeHistory::new(InterBarConfig::default());
1929        // Should work without issues - backward compatible
1930
1931        // Test 2: External global cache
1932        let global_cache = get_global_entropy_cache();
1933        let _shared_history = TradeHistory::new_with_cache(InterBarConfig::default(), Some(global_cache.clone()));
1934        // Should work without issues - uses provided cache
1935
1936        // Both constructors work correctly and can be created without panicking
1937    }
1938
1939    #[test]
1940    fn test_feature_result_cache_hit_miss() {
1941        // Issue #96 Task #144 Phase 4: Verify cache hit/miss behavior
1942        use crate::types::AggTrade;
1943
1944        fn create_test_trade(price: f64, volume: f64, is_buyer_maker: bool) -> AggTrade {
1945            AggTrade {
1946                agg_trade_id: 1,
1947                timestamp: 1000000,
1948                price: FixedPoint((price * 1e8) as i64),
1949                volume: FixedPoint((volume * 1e8) as i64),
1950                first_trade_id: 1,
1951                last_trade_id: 1,
1952                is_buyer_maker,
1953                is_best_match: Some(true),
1954            }
1955        }
1956
1957        // Create trade history with Tier 1 only for speed
1958        let mut history = TradeHistory::new(InterBarConfig {
1959            lookback_mode: LookbackMode::FixedCount(50),
1960            compute_tier2: false,
1961            compute_tier3: false,
1962            ..Default::default()
1963        });
1964
1965        // Create test trades
1966        let trades = vec![
1967            create_test_trade(100.0, 1.0, false),
1968            create_test_trade(100.5, 1.5, true),
1969            create_test_trade(100.2, 1.2, false),
1970        ];
1971
1972        for trade in &trades {
1973            history.push(trade);
1974        }
1975
1976        // First call: cache miss (computes features and stores in cache)
1977        let features1 = history.compute_features(2000000);
1978        assert!(features1.lookback_trade_count == Some(3));
1979
1980        // Second call: cache hit (retrieves from cache)
1981        let features2 = history.compute_features(2000000);
1982        assert!(features2.lookback_trade_count == Some(3));
1983
1984        // Both should produce identical results
1985        assert_eq!(features1.lookback_ofi, features2.lookback_ofi);
1986        assert_eq!(features1.lookback_count_imbalance, features2.lookback_count_imbalance);
1987    }
1988
1989    #[test]
1990    fn test_feature_result_cache_multiple_computations() {
1991        // Issue #96 Task #144 Phase 4: Verify cache works across multiple computations
1992        use crate::types::AggTrade;
1993
1994        fn create_test_trade(price: f64, volume: f64, timestamp: i64, is_buyer_maker: bool) -> AggTrade {
1995            AggTrade {
1996                agg_trade_id: 1,
1997                timestamp,
1998                price: FixedPoint((price * 1e8) as i64),
1999                volume: FixedPoint((volume * 1e8) as i64),
2000                first_trade_id: 1,
2001                last_trade_id: 1,
2002                is_buyer_maker,
2003                is_best_match: Some(true),
2004            }
2005        }
2006
2007        let mut history = TradeHistory::new(InterBarConfig {
2008            lookback_mode: LookbackMode::FixedCount(50),
2009            compute_tier2: false,
2010            compute_tier3: false,
2011            ..Default::default()
2012        });
2013
2014        // Create trades with specific timestamps
2015        let trades = vec![
2016            create_test_trade(100.0, 1.0, 1000000, false),
2017            create_test_trade(100.5, 1.5, 2000000, true),
2018            create_test_trade(100.2, 1.2, 3000000, false),
2019            create_test_trade(100.1, 1.1, 4000000, true),
2020        ];
2021
2022        for trade in &trades {
2023            history.push(trade);
2024        }
2025
2026        // First computation - cache miss
2027        let features1 = history.compute_features(5000000); // Bar open after all trades
2028        assert_eq!(features1.lookback_trade_count, Some(4));
2029        let ofi1 = features1.lookback_ofi;
2030
2031        // Second computation with same bar_open_time - cache hit
2032        let features2 = history.compute_features(5000000);
2033        assert_eq!(features2.lookback_trade_count, Some(4));
2034        assert_eq!(features2.lookback_ofi, ofi1, "Cache hit should return identical OFI");
2035
2036        // Third computation - different bar_open_time, different window
2037        let features3 = history.compute_features(3500000); // Gets trades before 3.5M (3 trades)
2038        assert_eq!(features3.lookback_trade_count, Some(3));
2039
2040        // Fourth computation - same as first, should reuse cache
2041        let features4 = history.compute_features(5000000);
2042        assert_eq!(features4.lookback_ofi, ofi1, "Cache reuse should return identical results");
2043    }
2044
2045    #[test]
2046    fn test_feature_result_cache_different_windows() {
2047        // Issue #96 Task #144 Phase 4: Verify cache distinguishes different windows
2048        use crate::types::AggTrade;
2049
2050        fn create_test_trade(price: f64, volume: f64, timestamp: i64, is_buyer_maker: bool) -> AggTrade {
2051            AggTrade {
2052                agg_trade_id: 1,
2053                timestamp,
2054                price: FixedPoint((price * 1e8) as i64),
2055                volume: FixedPoint((volume * 1e8) as i64),
2056                first_trade_id: 1,
2057                last_trade_id: 1,
2058                is_buyer_maker,
2059                is_best_match: Some(true),
2060            }
2061        }
2062
2063        let mut history = TradeHistory::new(InterBarConfig {
2064            lookback_mode: LookbackMode::FixedCount(100),
2065            compute_tier2: false,
2066            compute_tier3: false,
2067            ..Default::default()
2068        });
2069
2070        // Add 10 trades with sequential timestamps
2071        for i in 0..10 {
2072            let trade = create_test_trade(
2073                100.0 + (i as f64 * 0.1),
2074                1.0 + (i as f64 * 0.01),
2075                1000000 + (i as i64 * 100000), // Timestamps: 1M, 1.1M, 1.2M, ..., 1.9M
2076                i % 2 == 0,
2077            );
2078            history.push(&trade);
2079        }
2080
2081        // Compute features at bar_open_time=2M (gets all 10 trades, all have ts < 2M)
2082        let features1 = history.compute_features(2000000);
2083        assert_eq!(features1.lookback_trade_count, Some(10));
2084
2085        // Add more trades beyond the bar_open_time cutoff (timestamps >= 2M)
2086        for i in 10..15 {
2087            let trade = create_test_trade(
2088                100.0 + (i as f64 * 0.1),
2089                1.0 + (i as f64 * 0.01),
2090                2000000 + (i as i64 * 100000), // Timestamps: 2M, 2.1M, ..., 2.4M (after bar_open_time)
2091                i % 2 == 0,
2092            );
2093            history.push(&trade);
2094        }
2095
2096        // Compute features at same bar_open_time=2M - should still get only 10 trades (same lookback cutoff)
2097        let features2 = history.compute_features(2000000);
2098        assert_eq!(features2.lookback_trade_count, Some(10));
2099
2100        // Results should be identical (same window)
2101        assert_eq!(features1.lookback_ofi, features2.lookback_ofi);
2102    }
2103
2104    #[test]
2105    fn test_adaptive_pruning_batch_size_tracked() {
2106        // Issue #96 Task #155: Verify adaptive pruning batch size is tracked
2107        use crate::types::AggTrade;
2108
2109        fn create_test_trade(price: f64, timestamp: i64) -> AggTrade {
2110            AggTrade {
2111                agg_trade_id: 1,
2112                timestamp,
2113                price: FixedPoint((price * 1e8) as i64),
2114                volume: FixedPoint((1.0 * 1e8) as i64),
2115                first_trade_id: 1,
2116                last_trade_id: 1,
2117                is_buyer_maker: false,
2118                is_best_match: Some(true),
2119            }
2120        }
2121
2122        let mut history = TradeHistory::new(InterBarConfig {
2123            lookback_mode: LookbackMode::FixedCount(100),
2124            compute_tier2: false,
2125            compute_tier3: false,
2126            ..Default::default()
2127        });
2128
2129        let initial_batch = history.adaptive_prune_batch;
2130        assert!(initial_batch > 0, "Initial batch size should be positive");
2131
2132        // Add trades and verify batch size remains reasonable
2133        for i in 0..100 {
2134            let trade = create_test_trade(
2135                100.0 + (i as f64 * 0.01),
2136                1_000_000 + (i as i64 * 100),
2137            );
2138            history.push(&trade);
2139        }
2140
2141        // Batch size should be reasonable (not zero, not excessively large)
2142        assert!(
2143            history.adaptive_prune_batch > 0 && history.adaptive_prune_batch <= initial_batch * 4,
2144            "Batch size should be reasonable"
2145        );
2146    }
2147
2148    #[test]
2149    fn test_adaptive_pruning_deferred() {
2150        // Issue #96 Task #155: Verify deferred pruning respects capacity bounds
2151        use crate::types::AggTrade;
2152
2153        fn create_test_trade(price: f64, timestamp: i64) -> AggTrade {
2154            AggTrade {
2155                agg_trade_id: 1,
2156                timestamp,
2157                price: FixedPoint((price * 1e8) as i64),
2158                volume: FixedPoint((1.0 * 1e8) as i64),
2159                first_trade_id: 1,
2160                last_trade_id: 1,
2161                is_buyer_maker: false,
2162                is_best_match: Some(true),
2163            }
2164        }
2165
2166        let mut history = TradeHistory::new(InterBarConfig {
2167            lookback_mode: LookbackMode::FixedCount(50),
2168            compute_tier2: false,
2169            compute_tier3: false,
2170            ..Default::default()
2171        });
2172
2173        let max_capacity = history.max_safe_capacity;
2174
2175        // Add 300 trades - should trigger deferred pruning when hitting 2x capacity
2176        for i in 0..300 {
2177            let trade = create_test_trade(
2178                100.0 + (i as f64 * 0.01),
2179                1_000_000 + (i as i64 * 100),
2180            );
2181            history.push(&trade);
2182        }
2183
2184        // After adding trades, trade count should be reasonable
2185        // (deferred pruning activates when > max_capacity * 2)
2186        assert!(
2187            history.trades.len() <= max_capacity * 3,
2188            "Trade count should be controlled by deferred pruning"
2189        );
2190    }
2191
2192    #[test]
2193    fn test_adaptive_pruning_stats_tracking() {
2194        // Issue #96 Task #155: Verify pruning statistics are tracked correctly
2195        use crate::types::AggTrade;
2196
2197        fn create_test_trade(price: f64, timestamp: i64) -> AggTrade {
2198            AggTrade {
2199                agg_trade_id: 1,
2200                timestamp,
2201                price: FixedPoint((price * 1e8) as i64),
2202                volume: FixedPoint((1.0 * 1e8) as i64),
2203                first_trade_id: 1,
2204                last_trade_id: 1,
2205                is_buyer_maker: false,
2206                is_best_match: Some(true),
2207            }
2208        }
2209
2210        let mut history = TradeHistory::new(InterBarConfig {
2211            lookback_mode: LookbackMode::FixedCount(100),
2212            compute_tier2: false,
2213            compute_tier3: false,
2214            ..Default::default()
2215        });
2216
2217        // Initial stats should be empty
2218        assert_eq!(history.prune_stats, (0, 0), "Initial stats should be zero");
2219
2220        // Add enough trades to trigger pruning (exceed 2x capacity)
2221        for i in 0..2000 {
2222            let trade = create_test_trade(
2223                100.0 + (i as f64 * 0.01),
2224                1_000_000 + (i as i64 * 100),
2225            );
2226            history.push(&trade);
2227        }
2228
2229        // Stats should have been updated after pruning
2230        // Note: Stats are reset every 10 prune calls, so they might be (0,0) if exactly 10 calls happened
2231        // Just verify structure is there and reasonable
2232        assert!(
2233            history.prune_stats.0 <= 2000 && history.prune_stats.1 <= 10,
2234            "Pruning stats should be reasonable"
2235        );
2236    }
2237
2238    // === EDGE CASE TESTS (Issue #96 Task #22) ===
2239
2240    fn make_agg_trade(id: i64, price: f64, timestamp: i64) -> AggTrade {
2241        AggTrade {
2242            agg_trade_id: id,
2243            price: FixedPoint((price * 1e8) as i64),
2244            volume: FixedPoint(100000000), // 1.0
2245            first_trade_id: id,
2246            last_trade_id: id,
2247            timestamp,
2248            is_buyer_maker: false,
2249            is_best_match: None,
2250        }
2251    }
2252
2253    #[test]
2254    fn test_get_lookback_empty_history() {
2255        let history = TradeHistory::new(InterBarConfig::default());
2256        let lookback = history.get_lookback_trades(1000);
2257        assert!(lookback.is_empty(), "Empty history should return empty lookback");
2258    }
2259
2260    #[test]
2261    fn test_has_lookback_empty_history() {
2262        let history = TradeHistory::new(InterBarConfig::default());
2263        assert!(!history.has_lookback_trades(1000), "Empty history should have no lookback");
2264    }
2265
2266    #[test]
2267    fn test_get_lookback_all_trades_after_bar_open() {
2268        let mut history = TradeHistory::new(InterBarConfig::default());
2269        for i in 0..5 {
2270            history.push(&make_agg_trade(i, 100.0, 2000 + i));
2271        }
2272        let lookback = history.get_lookback_trades(1000);
2273        assert!(lookback.is_empty(), "All trades after bar_open_time should yield empty lookback");
2274    }
2275
2276    #[test]
2277    fn test_compute_features_minimum_lookback() {
2278        let mut history = TradeHistory::new(InterBarConfig::default());
2279        history.push(&make_agg_trade(1, 100.0, 1000));
2280        history.push(&make_agg_trade(2, 101.0, 2000));
2281
2282        let features = history.compute_features(3000);
2283        assert!(features.lookback_ofi.is_some(), "OFI should compute with 2 trades");
2284        assert_eq!(features.lookback_trade_count, Some(2));
2285    }
2286
2287    #[test]
2288    fn test_has_lookback_cache_hit_path() {
2289        let mut history = TradeHistory::new(InterBarConfig::default());
2290        for i in 0..10 {
2291            history.push(&make_agg_trade(i, 100.0, i * 100));
2292        }
2293        let has1 = history.has_lookback_trades(500);
2294        let has2 = history.has_lookback_trades(500);
2295        assert_eq!(has1, has2, "Cache hit should return same result");
2296        assert!(has1, "Should have lookback trades before ts=500");
2297    }
2298
2299    #[test]
2300    fn test_get_lookback_trades_at_exact_timestamp() {
2301        let mut history = TradeHistory::new(InterBarConfig::default());
2302        for i in 1..=3i64 {
2303            history.push(&make_agg_trade(i, 100.0, i * 100));
2304        }
2305        // bar_open_time = 200: should get trades BEFORE 200 (only ts=100)
2306        let lookback = history.get_lookback_trades(200);
2307        assert_eq!(lookback.len(), 1, "Should get 1 trade before ts=200");
2308        assert_eq!(lookback[0].timestamp, 100);
2309    }
2310
2311    // === buffer_stats() and has_lookback_trades() edge case tests (Issue #96 Task #71) ===
2312
2313    #[test]
2314    fn test_buffer_stats_empty_history() {
2315        let history = TradeHistory::new(InterBarConfig::default());
2316        let (trades_len, max_capacity, _batch, trades_pruned) = history.buffer_stats();
2317        assert_eq!(trades_len, 0, "Empty history should have 0 trades");
2318        assert!(max_capacity > 0, "max_safe_capacity should be positive");
2319        assert_eq!(trades_pruned, 0, "No trades should have been pruned");
2320    }
2321
2322    #[test]
2323    fn test_buffer_stats_after_pushes() {
2324        let mut history = TradeHistory::new(InterBarConfig::default());
2325        for i in 0..5 {
2326            history.push(&make_agg_trade(i, 100.0, i * 100));
2327        }
2328        let (trades_len, _max_capacity, _batch, _trades_pruned) = history.buffer_stats();
2329        assert_eq!(trades_len, 5, "Should have 5 trades after 5 pushes");
2330    }
2331
2332    #[test]
2333    fn test_has_lookback_no_trades_before_open() {
2334        let mut history = TradeHistory::new(InterBarConfig::default());
2335        // All trades at timestamp 1000+
2336        for i in 0..5 {
2337            history.push(&make_agg_trade(i, 100.0, 1000 + i * 100));
2338        }
2339        // bar_open_time before all trades: should have lookback
2340        assert!(history.has_lookback_trades(1000 + 200), "Should have lookback before ts=1200");
2341        // bar_open_time at first trade: no trades BEFORE it
2342        assert!(!history.has_lookback_trades(1000), "No trades before first trade timestamp");
2343        // bar_open_time before all trades: no lookback
2344        assert!(!history.has_lookback_trades(500), "No trades before ts=500");
2345    }
2346
2347    #[test]
2348    fn test_has_lookback_all_trades_before_open() {
2349        let mut history = TradeHistory::new(InterBarConfig::default());
2350        for i in 0..5 {
2351            history.push(&make_agg_trade(i, 100.0, i * 100));
2352        }
2353        // bar_open_time after all trades: all 5 trades are lookback
2354        assert!(history.has_lookback_trades(999), "All trades should be lookback");
2355    }
2356
2357    #[test]
2358    fn test_buffer_stats_len_matches_is_empty() {
2359        let history = TradeHistory::new(InterBarConfig::default());
2360        assert!(history.is_empty(), "New history should be empty");
2361        assert_eq!(history.len(), 0, "New history length should be 0");
2362
2363        let mut history2 = TradeHistory::new(InterBarConfig::default());
2364        history2.push(&make_agg_trade(1, 100.0, 1000));
2365        assert!(!history2.is_empty(), "History with 1 trade should not be empty");
2366        assert_eq!(history2.len(), 1, "History length should be 1");
2367    }
2368
2369    // Issue #96 Task #94: Integration test for Tier 2/3 dispatch paths
2370    // All prior tests use compute_tier2: false, compute_tier3: false.
2371    // This exercises the 4-branch parallelization dispatch (lines 656-695).
2372
2373    #[test]
2374    fn test_tier2_features_computed_when_enabled() {
2375        let config = InterBarConfig {
2376            lookback_mode: LookbackMode::FixedCount(500),
2377            compute_tier2: true,
2378            compute_tier3: false,
2379            ..Default::default()
2380        };
2381        let mut history = TradeHistory::new(config);
2382
2383        // Push 120 trades with realistic price variation and mixed buy/sell
2384        for i in 0..120i64 {
2385            let price = 50000.0 + (i as f64 * 0.7).sin() * 50.0;
2386            let volume = 1.0 + (i % 5) as f64 * 0.5;
2387            let trade = AggTrade {
2388                agg_trade_id: i,
2389                price: FixedPoint((price * 1e8) as i64),
2390                volume: FixedPoint((volume * 1e8) as i64),
2391                first_trade_id: i,
2392                last_trade_id: i,
2393                timestamp: i * 500, // 500us apart
2394                is_buyer_maker: i % 3 == 0, // ~33% sellers
2395                is_best_match: None,
2396            };
2397            history.push(&trade);
2398        }
2399
2400        let features = history.compute_features(120 * 500);
2401
2402        // Tier 1 should always be present
2403        assert!(features.lookback_trade_count.is_some(), "trade_count should be Some");
2404        assert!(features.lookback_ofi.is_some(), "ofi should be Some");
2405
2406        // Tier 2 features should be computed
2407        assert!(features.lookback_kyle_lambda.is_some(), "kyle_lambda should be Some with tier2 enabled");
2408        assert!(features.lookback_burstiness.is_some(), "burstiness should be Some with tier2 enabled");
2409        assert!(features.lookback_volume_skew.is_some(), "volume_skew should be Some with tier2 enabled");
2410        assert!(features.lookback_volume_kurt.is_some(), "volume_kurt should be Some with tier2 enabled");
2411        assert!(features.lookback_price_range.is_some(), "price_range should be Some with tier2 enabled");
2412        // Issue #128: GK promoted from Tier 3 → Tier 2
2413        assert!(features.lookback_garman_klass_vol.is_some(), "garman_klass should be Some with tier2 enabled");
2414
2415        // Tier 3 features should remain None
2416        assert!(features.lookback_kaufman_er.is_none(), "kaufman_er should be None with tier3 disabled");
2417        assert!(features.lookback_hurst.is_none(), "hurst should be None with tier3 disabled");
2418    }
2419
2420    #[test]
2421    fn test_tier3_features_computed_when_enabled() {
2422        let config = InterBarConfig {
2423            lookback_mode: LookbackMode::FixedCount(500),
2424            compute_tier2: false,
2425            compute_tier3: true,
2426            ..Default::default()
2427        };
2428        let mut history = TradeHistory::new(config);
2429
2430        // Push 120 trades (>64 for Hurst, >60 for PE)
2431        for i in 0..120i64 {
2432            let price = 50000.0 + (i as f64 * 0.7).sin() * 50.0;
2433            let trade = AggTrade {
2434                agg_trade_id: i,
2435                price: FixedPoint((price * 1e8) as i64),
2436                volume: FixedPoint((1.5 * 1e8) as i64),
2437                first_trade_id: i,
2438                last_trade_id: i,
2439                timestamp: i * 500,
2440                is_buyer_maker: i % 2 == 0,
2441                is_best_match: None,
2442            };
2443            history.push(&trade);
2444        }
2445
2446        let features = history.compute_features(120 * 500);
2447
2448        // Tier 1 should be present
2449        assert!(features.lookback_trade_count.is_some(), "trade_count should be Some");
2450
2451        // Tier 2 should remain None (Issue #128: GK promoted to Tier 2)
2452        assert!(features.lookback_kyle_lambda.is_none(), "kyle_lambda should be None with tier2 disabled");
2453        assert!(features.lookback_burstiness.is_none(), "burstiness should be None with tier2 disabled");
2454        assert!(features.lookback_garman_klass_vol.is_none(), "garman_klass should be None with tier2 disabled (promoted from tier3)");
2455
2456        // Tier 3 features should be computed (120 trades > 64 for Hurst, > 60 for PE)
2457        assert!(features.lookback_kaufman_er.is_some(), "kaufman_er should be Some with tier3 enabled");
2458    }
2459
2460    #[test]
2461    fn test_all_tiers_enabled_parallel_dispatch() {
2462        let config = InterBarConfig {
2463            lookback_mode: LookbackMode::FixedCount(500),
2464            compute_tier2: true,
2465            compute_tier3: true,
2466            ..Default::default()
2467        };
2468        let mut history = TradeHistory::new(config);
2469
2470        // Push 200 trades to exceed parallel thresholds (80 for Tier2, 150 for Tier3)
2471        for i in 0..200i64 {
2472            let price = 50000.0 + (i as f64 * 0.3).sin() * 100.0;
2473            let volume = 0.5 + (i % 7) as f64 * 0.3;
2474            let trade = AggTrade {
2475                agg_trade_id: i,
2476                price: FixedPoint((price * 1e8) as i64),
2477                volume: FixedPoint((volume * 1e8) as i64),
2478                first_trade_id: i,
2479                last_trade_id: i + 2, // aggregation_density > 1
2480                timestamp: i * 1000,
2481                is_buyer_maker: i % 4 == 0, // ~25% sellers
2482                is_best_match: None,
2483            };
2484            history.push(&trade);
2485        }
2486
2487        let features = history.compute_features(200 * 1000);
2488
2489        // All tiers should be computed
2490        assert!(features.lookback_trade_count.is_some(), "trade_count");
2491        assert!(features.lookback_ofi.is_some(), "ofi");
2492        assert!(features.lookback_intensity.is_some(), "intensity");
2493        assert!(features.lookback_vwap.is_some(), "vwap");
2494
2495        // Tier 2 (Issue #128: GK promoted from Tier 3)
2496        assert!(features.lookback_kyle_lambda.is_some(), "kyle_lambda");
2497        assert!(features.lookback_burstiness.is_some(), "burstiness");
2498        assert!(features.lookback_volume_skew.is_some(), "volume_skew");
2499        assert!(features.lookback_volume_kurt.is_some(), "volume_kurt");
2500        assert!(features.lookback_price_range.is_some(), "price_range");
2501        assert!(features.lookback_garman_klass_vol.is_some(), "garman_klass_vol");
2502
2503        // Tier 3
2504        assert!(features.lookback_kaufman_er.is_some(), "kaufman_er");
2505
2506        // Verify feature values are finite
2507        assert!(features.lookback_ofi.unwrap().is_finite(), "ofi should be finite");
2508        assert!(features.lookback_kyle_lambda.unwrap().is_finite(), "kyle_lambda should be finite");
2509        assert!(features.lookback_kaufman_er.unwrap().is_finite(), "kaufman_er should be finite");
2510    }
2511}