1use nalgebra::{Complex, DMatrix, DVector};
5use ndarray::Array1;
6use rayon::prelude::*;
7use std::collections::HashMap;
8
9pub type StateSpace = Vec<String>;
11pub type QuantumState = Array1<Complex<f64>>;
12
13#[derive(Debug, Clone)]
14pub struct AdvancedTransitionModel {
15 pub contexts: HashMap<Vec<String>, ContextNode>,
16 pub max_order: usize,
17 pub quantum_representation: Option<QuantumState>,
18 pub spectral_decomposition: Option<SpectralAnalysis>,
19
20 state_to_id: HashMap<String, usize>,
22 id_to_state: Vec<String>,
23
24 min_probability: f64,
26 smoothing_alpha: f64,
27}
28
29#[derive(Debug, Clone)]
30pub struct ContextNode {
31 pub counts: HashMap<String, usize>,
32 pub probabilities: HashMap<String, f64>,
33 pub entropy: f64,
34 pub kl_divergence: f64,
35
36 pub transition_information: HashMap<String, f64>, }
40
41#[derive(Debug, Clone)]
42pub struct SpectralAnalysis {
43 pub eigenvalues: DVector<Complex<f64>>,
44 pub eigenvectors: DMatrix<Complex<f64>>,
45 pub stationary_distribution: DVector<f64>,
46 pub mixing_time: f64,
47 pub spectral_gap: f64,
48
49 pub condition_number: f64,
51 pub convergence_error: f64,
52}
53
54#[derive(Debug, Clone)]
55pub struct AnomalyScore {
56 pub state_sequence: Vec<String>,
57 pub log_likelihood: f64, pub likelihood: f64, pub information_theoretic_score: f64,
60 pub spectral_anomaly_score: f64,
61 pub quantum_coherence_measure: f64,
62 pub topological_signature: Vec<f64>,
63 pub confidence_interval: (f64, f64),
64
65 pub numerical_stability_flag: bool,
67 pub anomaly_strength: f64, }
69
70impl AdvancedTransitionModel {
71 pub fn new(max_order: usize) -> Self {
72 Self {
73 contexts: HashMap::new(),
74 max_order,
75 quantum_representation: None,
76 spectral_decomposition: None,
77 state_to_id: HashMap::new(),
78 id_to_state: Vec::new(),
79 min_probability: 1e-12,
80 smoothing_alpha: 1.0, }
82 }
83
84 pub fn build_context_tree(&mut self, sequence: &[String]) -> Result<(), String> {
86 if sequence.len() < 2 {
87 return Err("Sequence too short for context tree".to_string());
88 }
89
90 self.build_state_mapping(sequence);
92
93 for window_size in 1..=self.max_order {
95 for window in sequence.windows(window_size + 1) {
96 let context = window[..window_size].to_vec();
97 let next_state = &window[window_size];
98
99 let node = self
100 .contexts
101 .entry(context)
102 .or_insert_with(ContextNode::new);
103 *node.counts.entry(next_state.clone()).or_insert(0) += 1;
104 }
105 }
106
107 self.calculate_information_measures_correct()?;
109 self.perform_spectral_analysis_robust()?;
110 self.generate_quantum_representation_physical()?;
111
112 Ok(())
113 }
114
115 fn build_state_mapping(&mut self, sequence: &[String]) {
116 let mut unique_states: std::collections::HashSet<String> =
117 sequence.iter().cloned().collect();
118
119 for (id, state) in unique_states.drain().enumerate() {
120 self.state_to_id.insert(state.clone(), id);
121 self.id_to_state.push(state);
122 }
123 }
124
125 fn calculate_information_measures_correct(&mut self) -> Result<(), String> {
127 for (context, node) in &mut self.contexts {
128 let total_count: usize = node.counts.values().sum();
129 let vocab_size = node.counts.len();
130
131 for (state, &count) in &node.counts {
133 let smoothed_prob = (count as f64 + self.smoothing_alpha)
134 / (total_count as f64 + self.smoothing_alpha * vocab_size as f64);
135
136 let prob = smoothed_prob.max(self.min_probability);
138 node.probabilities.insert(state.clone(), prob);
139
140 node.transition_information
142 .insert(state.clone(), -prob.log2());
143 }
144
145 node.entropy = node
147 .probabilities
148 .values()
149 .map(|&p| {
150 if p <= self.min_probability {
151 0.0 } else {
153 -p * p.log2()
154 }
155 })
156 .sum();
157
158 let max_entropy = (vocab_size as f64).log2();
160 if node.entropy > max_entropy + 1e-10 {
161 return Err(format!(
162 "Entropy violation in context {:?}: {} > {}",
163 context, node.entropy, max_entropy
164 ));
165 }
166
167 let uniform_prob = 1.0 / vocab_size as f64;
169 node.kl_divergence = node
170 .probabilities
171 .values()
172 .map(|&p| {
173 if p > self.min_probability {
174 p * (p / uniform_prob).log2()
175 } else {
176 0.0
177 }
178 })
179 .sum();
180 }
181
182 Ok(())
183 }
184
185 fn perform_spectral_analysis_robust(&mut self) -> Result<(), String> {
187 if self.contexts.is_empty() {
188 return Ok(());
189 }
190
191 let transition_matrix = self.build_transition_matrix_robust()?;
192
193 let condition_number = self.estimate_condition_number(&transition_matrix);
195 if condition_number > 1e12 {
196 eprintln!(
197 "Warning: Ill-conditioned transition matrix (κ = {:.2e})",
198 condition_number
199 );
200 }
201
202 let eigenvalues = transition_matrix.complex_eigenvalues();
204
205 let (stationary_dist, convergence_error) =
207 self.find_stationary_distribution_robust(&transition_matrix)?;
208
209 let mut eigenvalue_magnitudes: Vec<f64> = eigenvalues.iter().map(|c| c.norm()).collect();
211 eigenvalue_magnitudes.sort_by(|a, b| b.partial_cmp(a).unwrap_or(std::cmp::Ordering::Equal));
212
213 let spectral_gap = if eigenvalue_magnitudes.len() > 1 {
214 (eigenvalue_magnitudes[0] - eigenvalue_magnitudes[1]).max(0.0)
215 } else {
216 0.0
217 };
218
219 let mixing_time = if eigenvalue_magnitudes.len() > 1 && eigenvalue_magnitudes[1] > 1e-12 {
221 let lambda_2 = eigenvalue_magnitudes[1];
222 if lambda_2 < 1.0 {
223 let mixing_estimate = 1.0 / (1.0 - lambda_2);
225 mixing_estimate.min(1e6) } else {
227 f64::INFINITY
228 }
229 } else {
230 f64::INFINITY
231 };
232
233 self.spectral_decomposition = Some(SpectralAnalysis {
234 eigenvalues,
235 eigenvectors: transition_matrix
236 .clone_owned()
237 .map(|v| Complex::new(v, 0.0)),
238 stationary_distribution: stationary_dist,
239 mixing_time,
240 spectral_gap,
241 condition_number,
242 convergence_error,
243 });
244
245 Ok(())
246 }
247
248 fn build_transition_matrix_robust(&self) -> Result<DMatrix<f64>, String> {
249 let n_states = self.id_to_state.len();
250 if n_states == 0 {
251 return Err("No states found".to_string());
252 }
253
254 let mut matrix = DMatrix::zeros(n_states, n_states);
255
256 for (context, node) in &self.contexts {
258 if context.len() == 1 {
259 if let Some(&from_id) = self.state_to_id.get(&context[0]) {
260 for (to_state, &prob) in &node.probabilities {
261 if let Some(&to_id) = self.state_to_id.get(to_state) {
262 matrix[(from_id, to_id)] = prob;
263 }
264 }
265 }
266 }
267 }
268
269 for i in 0..n_states {
271 let row_sum: f64 = matrix.row(i).sum();
272 if row_sum < 1e-15 {
273 for j in 0..n_states {
275 matrix[(i, j)] = 1.0 / n_states as f64;
276 }
277 } else if (row_sum - 1.0).abs() > 1e-10 {
278 for j in 0..n_states {
280 matrix[(i, j)] /= row_sum;
281 }
282 }
283 }
284
285 Ok(matrix)
286 }
287
288 fn estimate_condition_number(&self, matrix: &DMatrix<f64>) -> f64 {
289 let eigenvalues = matrix.complex_eigenvalues();
293 let magnitudes: Vec<f64> = eigenvalues.iter().map(|c| c.norm()).collect();
294
295 if let (Some(&max_val), Some(&min_val)) = (
296 magnitudes.iter().max_by(|a, b| a.partial_cmp(b).unwrap()),
297 magnitudes.iter().min_by(|a, b| a.partial_cmp(b).unwrap()),
298 ) {
299 if min_val > 1e-15 {
300 max_val / min_val
301 } else {
302 f64::INFINITY
303 }
304 } else {
305 1.0
306 }
307 }
308
309 fn find_stationary_distribution_robust(
310 &self,
311 matrix: &DMatrix<f64>,
312 ) -> Result<(DVector<f64>, f64), String> {
313 let n = matrix.nrows();
314
315 if let Some((dist, error)) = self.solve_stationary_direct(matrix) {
317 return Ok((dist, error));
318 }
319
320 if let Some((dist, error)) = self.power_iteration_robust(matrix) {
322 return Ok((dist, error));
323 }
324
325 Ok((DVector::from_element(n, 1.0 / n as f64), 1.0))
327 }
328
329 fn solve_stationary_direct(&self, matrix: &DMatrix<f64>) -> Option<(DVector<f64>, f64)> {
330 let n = matrix.nrows();
331 if n == 0 {
332 return None;
333 }
334
335 let mut augmented = matrix.transpose() - DMatrix::identity(n, n);
337
338 for j in 0..n {
340 augmented[(n - 1, j)] = 1.0;
341 }
342
343 let mut rhs = DVector::zeros(n);
344 rhs[n - 1] = 1.0;
345
346 let lu = augmented.clone().lu();
348 if let Some(solution) = lu.solve(&rhs) {
349 let residual = &augmented * &solution - &rhs;
351 let error = residual.norm();
352
353 let all_positive = solution.iter().all(|&x| x >= -1e-10);
355 let sum_close_to_one = (solution.sum() - 1.0).abs() < 1e-8;
356
357 if all_positive && sum_close_to_one && error < 1e-8 {
358 let corrected = solution.map(|x| x.max(0.0));
359 let normalized = &corrected / corrected.sum();
360 return Some((normalized, error));
361 }
362 }
363
364 None
365 }
366
367 fn power_iteration_robust(&self, matrix: &DMatrix<f64>) -> Option<(DVector<f64>, f64)> {
368 let n = matrix.nrows();
369 let mut dist = DVector::from_element(n, 1.0 / n as f64);
370
371 const MAX_ITER: usize = 1000;
372 const TOLERANCE: f64 = 1e-12;
373
374 let mut prev_error = f64::INFINITY;
375 let mut stagnation_count = 0;
376
377 for _ in 0..MAX_ITER {
378 let new_dist = matrix.transpose() * &dist;
381
382 let norm = new_dist.sum();
383 if norm < 1e-15 {
384 return None;
385 }
386
387 let new_dist_normalized = new_dist / norm;
388
389 let error = (&new_dist_normalized - &dist).norm();
391
392 if error < TOLERANCE {
393 return Some((new_dist_normalized, error));
394 }
395
396 if (prev_error - error).abs() < 1e-15 {
398 stagnation_count += 1;
399 if stagnation_count > 10 {
400 return Some((new_dist_normalized, error));
401 }
402 } else {
403 stagnation_count = 0;
404 }
405
406 dist = new_dist_normalized;
407 prev_error = error;
408 }
409
410 None
411 }
412
413 fn generate_quantum_representation_physical(&mut self) -> Result<(), String> {
415 let n_states = self.id_to_state.len();
416 if n_states == 0 {
417 return Ok(());
418 }
419
420 if let Some(spectral) = &self.spectral_decomposition {
422 let stationary = &spectral.stationary_distribution;
423
424 let mut quantum_state = Array1::zeros(n_states);
425
426 for i in 0..n_states {
428 let amplitude = stationary[i].sqrt().max(0.0);
429 quantum_state[i] = Complex::new(amplitude, 0.0);
430 }
431
432 let norm_squared: f64 = quantum_state.iter().map(|c| c.norm_sqr()).sum();
434
435 if (norm_squared - 1.0).abs() > 1e-10 {
436 let norm = norm_squared.sqrt();
438 if norm > 1e-15 {
439 for amplitude in quantum_state.iter_mut() {
440 *amplitude /= norm;
441 }
442 }
443 }
444
445 self.quantum_representation = Some(quantum_state);
446 }
447
448 Ok(())
449 }
450
451 pub fn detect_advanced_anomalies(
453 &self,
454 sequence: &[String],
455 threshold: f64,
456 ) -> Vec<AnomalyScore> {
457 if sequence.len() <= self.max_order {
458 return Vec::new();
459 }
460
461 sequence
462 .windows(self.max_order + 1)
463 .par_bridge()
464 .filter_map(|window| {
465 self.calculate_comprehensive_anomaly_score_correct(window, threshold)
466 })
467 .collect()
468 }
469
470 fn calculate_comprehensive_anomaly_score_correct(
472 &self,
473 sequence: &[String],
474 threshold: f64,
475 ) -> Option<AnomalyScore> {
476 let log_likelihood = self.calculate_log_likelihood_robust(sequence)?;
478 let likelihood = self.safe_exp(log_likelihood);
479
480 if likelihood > threshold * 10.0 {
482 return None;
483 }
484
485 let info_score = self.calculate_information_score_correct(sequence)?;
487
488 let spectral_score = self
490 .calculate_spectral_anomaly_score(sequence)
491 .unwrap_or(0.0);
492
493 let quantum_score = self
495 .calculate_quantum_coherence_correct(sequence)
496 .unwrap_or(0.0);
497
498 let topo_signature = self.calculate_topological_signature_simple(sequence);
500
501 let confidence_interval =
503 self.calculate_confidence_interval_robust(sequence, log_likelihood);
504
505 let numerical_stability = likelihood.is_finite() && likelihood > 0.0;
507 let anomaly_strength =
508 self.calculate_anomaly_strength(likelihood, info_score, spectral_score);
509
510 Some(AnomalyScore {
511 state_sequence: sequence.to_vec(),
512 log_likelihood,
513 likelihood,
514 information_theoretic_score: info_score,
515 spectral_anomaly_score: spectral_score,
516 quantum_coherence_measure: quantum_score,
517 topological_signature: topo_signature,
518 confidence_interval,
519 numerical_stability_flag: numerical_stability,
520 anomaly_strength,
521 })
522 }
523
524 fn calculate_log_likelihood_robust(&self, sequence: &[String]) -> Option<f64> {
526 let mut log_likelihood = 0.0;
527
528 for i in 1..sequence.len() {
529 let mut best_log_prob = f64::NEG_INFINITY;
530
531 for context_len in (1..=std::cmp::min(i, self.max_order)).rev() {
533 let context = sequence[i - context_len..i].to_vec();
534
535 if let Some(node) = self.contexts.get(&context) {
536 if let Some(&prob) = node.probabilities.get(&sequence[i]) {
537 best_log_prob = prob.ln();
538 break; }
540 }
541 }
542
543 if best_log_prob.is_infinite() {
545 let vocab_size = self.id_to_state.len() as f64;
546 best_log_prob = -(vocab_size + 1.0).ln(); }
548
549 log_likelihood += best_log_prob;
550 }
551
552 Some(log_likelihood)
553 }
554
555 fn safe_exp(&self, log_val: f64) -> f64 {
556 if log_val < -700.0 {
557 0.0 } else if log_val > 700.0 {
559 f64::INFINITY } else {
561 log_val.exp()
562 }
563 }
564
565 fn calculate_information_score_correct(&self, sequence: &[String]) -> Option<f64> {
567 let mut total_information = 0.0;
568 let mut count = 0;
569
570 for i in 1..sequence.len() {
571 let context_len = std::cmp::min(i, self.max_order);
572 let context = sequence[i - context_len..i].to_vec();
573
574 if let Some(node) = self.contexts.get(&context) {
575 if let Some(&info_content) = node.transition_information.get(&sequence[i]) {
576 total_information += info_content;
577 count += 1;
578 }
579 }
580 }
581
582 if count > 0 {
583 Some(total_information / count as f64)
584 } else {
585 None
586 }
587 }
588
589 fn calculate_quantum_coherence_correct(&self, _sequence: &[String]) -> Option<f64> {
591 let quantum_state = self.quantum_representation.as_ref()?;
592 let n = quantum_state.len();
593
594 let mut coherence = 0.0;
596
597 for i in 0..n {
599 for j in 0..n {
600 if i != j {
601 let rho_ij = quantum_state[i].conj() * quantum_state[j];
602 coherence += rho_ij.norm();
603 }
604 }
605 }
606
607 Some(coherence)
608 }
609
610 fn calculate_topological_signature_simple(&self, sequence: &[String]) -> Vec<f64> {
611 vec![
612 sequence.len() as f64, self.count_unique_transitions(sequence) as f64, self.calculate_repetition_index(sequence), ]
616 }
617
618 fn count_unique_transitions(&self, sequence: &[String]) -> usize {
619 let mut transitions = std::collections::HashSet::new();
620 for window in sequence.windows(2) {
621 transitions.insert((window[0].clone(), window[1].clone()));
622 }
623 transitions.len()
624 }
625
626 fn calculate_repetition_index(&self, sequence: &[String]) -> f64 {
627 let mut repetitions = 0;
628 for len in 2..=sequence.len() / 2 {
629 for i in 0..=sequence.len() - 2 * len {
630 if sequence[i..i + len] == sequence[i + len..i + 2 * len] {
631 repetitions += 1;
632 }
633 }
634 }
635 repetitions as f64 / sequence.len() as f64
636 }
637
638 fn calculate_confidence_interval_robust(
639 &self,
640 sequence: &[String],
641 log_likelihood: f64,
642 ) -> (f64, f64) {
643 let n = sequence.len() as f64;
645 let std_error = (1.0 / n.sqrt()).max(1e-6);
646 let z_score = 1.96; let margin = z_score * std_error;
649 let likelihood = self.safe_exp(log_likelihood);
650
651 (
652 self.safe_exp(log_likelihood - margin).max(likelihood * 0.1),
653 self.safe_exp(log_likelihood + margin)
654 .min(likelihood * 10.0),
655 )
656 }
657
658 fn calculate_anomaly_strength(
659 &self,
660 likelihood: f64,
661 info_score: f64,
662 spectral_score: f64,
663 ) -> f64 {
664 let log_likelihood_component = if likelihood > 0.0 {
666 -likelihood.ln()
667 } else {
668 10.0
669 };
670 let combined_score = log_likelihood_component.log10().max(0.0) * 0.5
671 + info_score * 0.3
672 + spectral_score * 0.2;
673
674 (combined_score / 10.0).tanh()
676 }
677
678 fn calculate_spectral_anomaly_score(&self, sequence: &[String]) -> Option<f64> {
680 let spectral = self.spectral_decomposition.as_ref()?;
681
682 let mut deviation = 0.0;
683 for state in sequence {
684 if let Some(&state_id) = self.state_to_id.get(state) {
685 if state_id < spectral.stationary_distribution.len() {
686 let expected_prob = spectral.stationary_distribution[state_id];
687 let observed_freq = self.calculate_observed_frequency(state, sequence);
688 deviation += (observed_freq - expected_prob).abs();
689 }
690 }
691 }
692
693 Some(deviation / sequence.len() as f64)
694 }
695
696 fn calculate_observed_frequency(&self, target_state: &str, sequence: &[String]) -> f64 {
697 let count = sequence.iter().filter(|&s| s == target_state).count();
698 count as f64 / sequence.len() as f64
699 }
700}
701
702impl ContextNode {
703 fn new() -> Self {
704 Self {
705 counts: HashMap::new(),
706 probabilities: HashMap::new(),
707 entropy: 0.0,
708 kl_divergence: 0.0,
709 transition_information: HashMap::new(),
710 }
711 }
712}
713
714pub fn batch_process_sequences(
716 sequences: &[Vec<String>],
717 max_order: usize,
718 threshold: f64,
719) -> Vec<Vec<AnomalyScore>> {
720 sequences
721 .par_iter()
722 .map(|sequence| {
723 if sequence.len() <= max_order {
724 return Vec::new();
725 }
726
727 let mut model = AdvancedTransitionModel::new(max_order);
728 match model.build_context_tree(sequence) {
729 Ok(()) => model.detect_advanced_anomalies(sequence, threshold),
730 Err(_) => Vec::new(), }
732 })
733 .collect()
734}
735
736#[cfg(test)]
737mod tests {
738 use super::*;
739
740 #[test]
741 fn test_corrected_mathematical_properties() {
742 let sequence: Vec<String> = vec!["A", "B", "A", "B", "A", "C", "A", "C"]
743 .into_iter()
744 .map(String::from)
745 .collect();
746
747 let mut model = AdvancedTransitionModel::new(2);
748 assert!(model.build_context_tree(&sequence).is_ok());
749
750 for (context, node) in &model.contexts {
752 let prob_sum: f64 = node.probabilities.values().sum();
753 assert!(
754 (prob_sum - 1.0).abs() < 1e-10,
755 "Context {:?}: probabilities sum to {}, not 1.0",
756 context,
757 prob_sum
758 );
759 }
760
761 for (context, node) in &model.contexts {
763 let max_entropy = (node.probabilities.len() as f64).log2();
764 assert!(
765 node.entropy >= 0.0 && node.entropy <= max_entropy + 1e-10,
766 "Context {:?}: entropy {} outside bounds [0, {}]",
767 context,
768 node.entropy,
769 max_entropy
770 );
771 }
772
773 for (context, node) in &model.contexts {
775 for (state, &prob) in &node.probabilities {
776 let expected_info = -prob.log2();
777 let actual_info = node.transition_information[state];
778 assert!(
779 (expected_info - actual_info).abs() < 1e-10,
780 "Information content mismatch for {}->{}:",
781 context[0],
782 state
783 );
784 }
785 }
786 }
787
788 #[test]
789 fn test_numerical_stability() {
790 let sequence: Vec<String> = (0..100).map(|i| format!("S{}", i % 5)).collect();
792
793 let mut model = AdvancedTransitionModel::new(3);
794 assert!(model.build_context_tree(&sequence).is_ok());
795
796 let anomalies = model.detect_advanced_anomalies(&sequence, 1e-10);
797
798 for anomaly in &anomalies {
800 assert!(
801 anomaly.likelihood.is_finite() && anomaly.likelihood > 0.0,
802 "Non-finite likelihood: {}",
803 anomaly.likelihood
804 );
805 assert!(
806 anomaly.log_likelihood.is_finite(),
807 "Non-finite log-likelihood: {}",
808 anomaly.log_likelihood
809 );
810 }
811 }
812
813 #[test]
814 fn test_quantum_representation_normalization() {
815 let sequence: Vec<String> = vec!["A", "B", "C", "A", "B", "C"]
816 .into_iter()
817 .map(String::from)
818 .collect();
819
820 let mut model = AdvancedTransitionModel::new(2);
821 assert!(model.build_context_tree(&sequence).is_ok());
822
823 if let Some(quantum_state) = &model.quantum_representation {
824 let norm_squared: f64 = quantum_state.iter().map(|c| c.norm_sqr()).sum();
825 assert!(
826 (norm_squared - 1.0).abs() < 1e-10,
827 "Quantum state not normalized: ||ψ||² = {}",
828 norm_squared
829 );
830 } else {
831 panic!("Quantum representation not generated");
832 }
833 }
834 #[test]
835 fn test_spectral_analysis_properties() {
836 let sequence: Vec<String> = vec!["A", "B", "A", "C", "B", "A"]
837 .into_iter()
838 .map(String::from)
839 .collect();
840 let mut model = AdvancedTransitionModel::new(2);
841 assert!(model.build_context_tree(&sequence).is_ok());
842 if let Some(spectral) = &model.spectral_decomposition {
843 assert!(!spectral.eigenvalues.is_empty(), "No eigenvalues computed");
845 assert!(
846 spectral.eigenvalues.iter().all(|&c| c.norm() >= 0.0),
847 "Negative eigenvalue found"
848 );
849
850 let stationary_sum: f64 = spectral.stationary_distribution.sum();
852 assert!(
853 (stationary_sum - 1.0).abs() < 1e-10,
854 "Stationary distribution not normalized: sum = {}",
855 stationary_sum
856 );
857
858 assert!(
860 spectral.spectral_gap >= 0.0,
861 "Negative spectral gap: {}",
862 spectral.spectral_gap
863 );
864 } else {
865 panic!("Spectral decomposition not generated");
866 }
867 }
868 #[test]
869 fn test_anomaly_detection_correctness() {
870 let sequence: Vec<String> = vec!["A", "B", "A", "C", "A", "B", "A", "D"]
871 .into_iter()
872 .map(String::from)
873 .collect();
874 let mut model = AdvancedTransitionModel::new(2);
875 assert!(model.build_context_tree(&sequence).is_ok());
876 let anomalies = model.detect_advanced_anomalies(&sequence, 0.1);
877 assert!(!anomalies.is_empty(), "No anomalies detected in sequence");
878 for anomaly in &anomalies {
879 assert!(
880 anomaly.likelihood.is_finite() && anomaly.likelihood > 0.0,
881 "Non-finite likelihood in anomaly: {}",
882 anomaly.likelihood
883 );
884 assert!(
885 anomaly.log_likelihood.is_finite(),
886 "Non-finite log-likelihood in anomaly: {}",
887 anomaly.log_likelihood
888 );
889 assert!(
890 anomaly.information_theoretic_score >= 0.0,
891 "Negative information score in anomaly: {}",
892 anomaly.information_theoretic_score
893 );
894 }
895 }
896 #[test]
897 fn test_batch_processing_correctness() {
898 let sequences: Vec<Vec<String>> = vec![
899 vec!["A", "B", "A", "C", "A", "B", "A", "D"]
900 .into_iter()
901 .map(String::from)
902 .collect(),
903 vec!["X", "Y", "X", "Z", "X", "Y", "X", "W"]
904 .into_iter()
905 .map(String::from)
906 .collect(),
907 ];
908 let results = batch_process_sequences(&sequences, 2, 0.1);
909 assert_eq!(
910 results.len(),
911 sequences.len(),
912 "Batch processing returned incorrect number of results"
913 );
914 for (i, anomalies) in results.iter().enumerate() {
915 assert!(
916 !anomalies.is_empty(),
917 "No anomalies detected in sequence {}",
918 i
919 );
920 for anomaly in anomalies {
921 assert!(
922 anomaly.likelihood.is_finite() && anomaly.likelihood > 0.0,
923 "Non-finite likelihood in anomaly {}: {}",
924 i,
925 anomaly.likelihood
926 );
927 assert!(
928 anomaly.log_likelihood.is_finite(),
929 "Non-finite log-likelihood in anomaly {}: {}",
930 i,
931 anomaly.log_likelihood
932 );
933 assert!(
934 anomaly.information_theoretic_score >= 0.0,
935 "Negative information score in anomaly {}: {}",
936 i,
937 anomaly.information_theoretic_score
938 );
939 }
940 }
941 }
942}
943
944pub mod func_tests;
945pub mod test_runner;