1use serde::Serialize;
7
8use crate::math;
9
10#[derive(Debug, Clone, Serialize)]
11pub struct ChangePoint {
12 pub offset: usize,
13 pub mean_before: f64,
14 pub mean_after: f64,
15 pub magnitude: f64,
16 pub significance: f64,
17}
18
19#[derive(Debug, Clone, Serialize)]
20pub struct ChangePointResult {
21 pub change_points: Vec<ChangePoint>,
22 pub n_segments: usize,
23 pub is_valid: bool,
24}
25
26pub fn change_point_detection(data: &[u8], min_segment: usize) -> ChangePointResult {
27 if min_segment == 0 || data.len() < 2 * min_segment {
28 return ChangePointResult {
29 change_points: Vec::new(),
30 n_segments: 0,
31 is_valid: false,
32 };
33 }
34
35 let values: Vec<f64> = data.iter().map(|&b| b as f64).collect();
36 let n = values.len();
37 let global_mean = values.iter().sum::<f64>() / n as f64;
38 let global_std = stddev(&values, global_mean);
39
40 if global_std < 1e-12 {
41 return ChangePointResult {
42 change_points: Vec::new(),
43 n_segments: 1,
44 is_valid: true,
45 };
46 }
47
48 let x: Vec<f64> = (0..n).map(|i| i as f64).collect();
49 let (slope, intercept, _) = math::linear_regression(&x, &values);
50 let detrended: Vec<f64> = if slope.is_nan() || intercept.is_nan() {
51 values.clone()
52 } else {
53 values
54 .iter()
55 .enumerate()
56 .map(|(i, &v)| v - (slope * i as f64 + intercept))
57 .collect()
58 };
59 let detrended_mean = detrended.iter().sum::<f64>() / n as f64;
60
61 let mut cusum = vec![0.0_f64; n + 1];
62 for i in 0..n {
63 cusum[i + 1] = cusum[i] + (detrended[i] - detrended_mean);
64 }
65
66 let mut best_offset = min_segment;
67 let mut best_score = f64::NEG_INFINITY;
68 #[allow(clippy::needless_range_loop)]
69 for k in min_segment..=(n - min_segment) {
70 let score = cusum[k].abs();
71 if score > best_score {
72 best_score = score;
73 best_offset = k;
74 }
75 }
76
77 let mean_before = values[..best_offset].iter().sum::<f64>() / best_offset as f64;
78 let right_len = n - best_offset;
79 let mean_after = values[best_offset..].iter().sum::<f64>() / right_len as f64;
80 let magnitude = (mean_after - mean_before).abs();
81 let significance = magnitude / global_std;
82
83 let change_points = if significance >= 1.5 {
84 vec![ChangePoint {
85 offset: best_offset,
86 mean_before,
87 mean_after,
88 magnitude,
89 significance,
90 }]
91 } else {
92 Vec::new()
93 };
94
95 ChangePointResult {
96 n_segments: change_points.len() + 1,
97 change_points,
98 is_valid: true,
99 }
100}
101
102pub fn change_point_detection_default(data: &[u8]) -> ChangePointResult {
103 change_point_detection(data, 100)
104}
105
106#[derive(Debug, Clone, Serialize)]
107pub struct Anomaly {
108 pub offset: usize,
109 pub window_mean: f64,
110 pub window_entropy: f64,
111 pub z_score: f64,
112 pub anomaly_type: String,
113}
114
115#[derive(Debug, Clone, Serialize)]
116pub struct AnomalyDetectionResult {
117 pub anomalies: Vec<Anomaly>,
118 pub total_windows: usize,
119 pub anomaly_rate: f64,
120 pub is_valid: bool,
121}
122
123pub fn anomaly_detection(
124 data: &[u8],
125 window_size: usize,
126 z_threshold: f64,
127) -> AnomalyDetectionResult {
128 if window_size == 0 || data.len() < window_size {
129 return AnomalyDetectionResult {
130 anomalies: Vec::new(),
131 total_windows: 0,
132 anomaly_rate: 0.0,
133 is_valid: false,
134 };
135 }
136
137 let total_windows = data.len() / window_size;
138 if total_windows == 0 {
139 return AnomalyDetectionResult {
140 anomalies: Vec::new(),
141 total_windows: 0,
142 anomaly_rate: 0.0,
143 is_valid: false,
144 };
145 }
146
147 let mut means = Vec::with_capacity(total_windows);
148 let mut entropies = Vec::with_capacity(total_windows);
149 for w in 0..total_windows {
150 let start = w * window_size;
151 let end = start + window_size;
152 let window = &data[start..end];
153 means.push(window.iter().map(|&b| b as f64).sum::<f64>() / window_size as f64);
154 entropies.push(shannon_entropy(window));
155 }
156
157 let global_mean = means.iter().sum::<f64>() / total_windows as f64;
158 let global_std = stddev(&means, global_mean);
159 if global_std < 1e-12 {
160 return AnomalyDetectionResult {
161 anomalies: Vec::new(),
162 total_windows,
163 anomaly_rate: 0.0,
164 is_valid: true,
165 };
166 }
167
168 let mut anomalies = Vec::new();
169 for i in 0..total_windows {
170 let z_score = (means[i] - global_mean).abs() / global_std;
171 if z_score > z_threshold {
172 anomalies.push(Anomaly {
173 offset: i * window_size,
174 window_mean: means[i],
175 window_entropy: entropies[i],
176 z_score,
177 anomaly_type: if means[i] > global_mean {
178 "high_mean".to_string()
179 } else {
180 "low_mean".to_string()
181 },
182 });
183 }
184 }
185
186 let anomaly_rate = anomalies.len() as f64 / total_windows as f64;
187 AnomalyDetectionResult {
188 anomalies,
189 total_windows,
190 anomaly_rate,
191 is_valid: true,
192 }
193}
194
195pub fn anomaly_detection_default(data: &[u8]) -> AnomalyDetectionResult {
196 anomaly_detection(data, 256, 3.0)
197}
198
199#[derive(Debug, Clone, Serialize)]
200pub struct Burst {
201 pub start: usize,
202 pub end: usize,
203 pub length: usize,
204 pub mean_value: f64,
205}
206
207#[derive(Debug, Clone, Serialize)]
208pub struct BurstResult {
209 pub bursts: Vec<Burst>,
210 pub n_bursts: usize,
211 pub max_burst_length: usize,
212 pub mean_burst_length: f64,
213 pub total_burst_fraction: f64,
214 pub is_valid: bool,
215}
216
217pub fn burst_detection(data: &[u8], threshold_percentile: f64) -> BurstResult {
218 if data.is_empty() {
219 return BurstResult {
220 bursts: Vec::new(),
221 n_bursts: 0,
222 max_burst_length: 0,
223 mean_burst_length: 0.0,
224 total_burst_fraction: 0.0,
225 is_valid: false,
226 };
227 }
228
229 let mut sorted = data.to_vec();
230 sorted.sort_unstable();
231 let p = threshold_percentile.clamp(0.0, 100.0);
232 let idx = ((p / 100.0) * (sorted.len() - 1) as f64).round() as usize;
233 let threshold = sorted[idx];
234
235 let mut bursts = Vec::new();
236 let mut i = 0;
237 while i < data.len() {
238 if data[i] < threshold {
239 i += 1;
240 continue;
241 }
242 let start = i;
243 let mut sum = 0.0;
244 while i < data.len() && data[i] >= threshold {
245 sum += data[i] as f64;
246 i += 1;
247 }
248 let end = i - 1;
249 let length = end - start + 1;
250 bursts.push(Burst {
251 start,
252 end,
253 length,
254 mean_value: sum / length as f64,
255 });
256 }
257
258 let n_bursts = bursts.len();
259 let total_burst_len: usize = bursts.iter().map(|b| b.length).sum();
260 let max_burst_length = bursts.iter().map(|b| b.length).max().unwrap_or(0);
261 let mean_burst_length = if n_bursts == 0 {
262 0.0
263 } else {
264 total_burst_len as f64 / n_bursts as f64
265 };
266
267 BurstResult {
268 bursts,
269 n_bursts,
270 max_burst_length,
271 mean_burst_length,
272 total_burst_fraction: total_burst_len as f64 / data.len() as f64,
273 is_valid: true,
274 }
275}
276
277pub fn burst_detection_default(data: &[u8]) -> BurstResult {
278 burst_detection(data, 95.0)
279}
280
281#[derive(Debug, Clone, Serialize)]
282pub struct Shift {
283 pub offset: usize,
284 pub mean_before: f64,
285 pub mean_after: f64,
286 pub delta: f64,
287 pub z_score: f64,
288}
289
290#[derive(Debug, Clone, Serialize)]
291pub struct ShiftResult {
292 pub shifts: Vec<Shift>,
293 pub n_shifts: usize,
294 pub is_valid: bool,
295}
296
297pub fn shift_detection(data: &[u8], window_size: usize, threshold_sigma: f64) -> ShiftResult {
298 if window_size == 0 || data.len() < 2 * window_size {
299 return ShiftResult {
300 shifts: Vec::new(),
301 n_shifts: 0,
302 is_valid: false,
303 };
304 }
305
306 let n_windows = data.len() / window_size;
307 if n_windows < 2 {
308 return ShiftResult {
309 shifts: Vec::new(),
310 n_shifts: 0,
311 is_valid: false,
312 };
313 }
314
315 let mut means = Vec::with_capacity(n_windows);
316 for i in 0..n_windows {
317 let start = i * window_size;
318 let end = start + window_size;
319 let mean = data[start..end].iter().map(|&b| b as f64).sum::<f64>() / window_size as f64;
320 means.push(mean);
321 }
322
323 let global_mean = means.iter().sum::<f64>() / means.len() as f64;
324 let global_std = stddev(&means, global_mean);
325 if global_std < 1e-12 {
326 return ShiftResult {
327 shifts: Vec::new(),
328 n_shifts: 0,
329 is_valid: true,
330 };
331 }
332
333 let mut shifts = Vec::new();
334
335 let mut diffs = Vec::new();
337 for i in 0..(means.len() - 1) {
338 diffs.push((means[i + 1] - means[i]).abs());
339 }
340
341 let diff_mean = diffs.iter().sum::<f64>() / diffs.len() as f64;
343 let diff_std = stddev(&diffs, diff_mean);
344
345 for i in 0..(means.len() - 1) {
346 let delta = (means[i + 1] - means[i]).abs();
347 let z_score = if diff_std > 1e-12 {
348 delta / diff_std
349 } else {
350 0.0
351 };
352 if z_score > threshold_sigma {
353 shifts.push(Shift {
354 offset: (i + 1) * window_size,
355 mean_before: means[i],
356 mean_after: means[i + 1],
357 delta,
358 z_score,
359 });
360 }
361 }
362
363 ShiftResult {
364 n_shifts: shifts.len(),
365 shifts,
366 is_valid: true,
367 }
368}
369
370pub fn shift_detection_default(data: &[u8]) -> ShiftResult {
371 shift_detection(data, 500, 3.0)
372}
373
374#[derive(Debug, Clone, Serialize)]
375pub struct DriftSegment {
376 pub index: usize,
377 pub mean: f64,
378 pub variance: f64,
379 pub entropy: f64,
380}
381
382#[derive(Debug, Clone, Serialize)]
383pub struct DriftResult {
384 pub segments: Vec<DriftSegment>,
385 pub drift_slope: f64,
386 pub drift_r_squared: f64,
387 pub is_drifting: bool,
388 pub is_valid: bool,
389}
390
391pub fn temporal_drift(data: &[u8], n_segments: usize) -> DriftResult {
392 if n_segments < 2 || data.len() < n_segments {
393 return DriftResult {
394 segments: Vec::new(),
395 drift_slope: 0.0,
396 drift_r_squared: 0.0,
397 is_drifting: false,
398 is_valid: false,
399 };
400 }
401
402 let mut segments = Vec::with_capacity(n_segments);
403 let mut means = Vec::with_capacity(n_segments);
404 let mut x = Vec::with_capacity(n_segments);
405
406 for i in 0..n_segments {
407 let start = i * data.len() / n_segments;
408 let end = (i + 1) * data.len() / n_segments;
409 let segment = &data[start..end];
410 let (mean, variance) = mean_variance(segment);
411 let entropy = shannon_entropy(segment);
412
413 segments.push(DriftSegment {
414 index: i,
415 mean,
416 variance,
417 entropy,
418 });
419 means.push(mean);
420 x.push(i as f64);
421 }
422
423 let (drift_slope, intercept, drift_r_squared) = math::linear_regression(&x, &means);
424 let residual_sum_sq = means
425 .iter()
426 .zip(x.iter())
427 .map(|(y, xi)| {
428 let y_hat = drift_slope * *xi + intercept;
429 (y - y_hat).powi(2)
430 })
431 .sum::<f64>();
432 let residual_variance = residual_sum_sq / n_segments as f64;
433 let stderr = (residual_variance / n_segments as f64).sqrt();
434 let is_drifting = drift_slope.is_finite() && drift_slope.abs() > 2.0 * stderr;
435
436 DriftResult {
437 segments,
438 drift_slope,
439 drift_r_squared,
440 is_drifting,
441 is_valid: true,
442 }
443}
444
445pub fn temporal_drift_default(data: &[u8]) -> DriftResult {
446 temporal_drift(data, 10)
447}
448
449#[derive(Debug, Clone, Serialize)]
450pub struct SessionStats {
451 pub index: usize,
452 pub mean: f64,
453 pub variance: f64,
454 pub entropy: f64,
455 pub sample_size: usize,
456}
457
458#[derive(Debug, Clone, Serialize)]
459pub struct StabilityResult {
460 pub session_stats: Vec<SessionStats>,
461 pub cv_mean: f64,
462 pub cv_variance: f64,
463 pub cv_entropy: f64,
464 pub is_stable: bool,
465 pub is_valid: bool,
466}
467
468pub fn inter_session_stability(sessions: &[&[u8]]) -> StabilityResult {
469 if sessions.len() < 2 {
470 return StabilityResult {
471 session_stats: Vec::new(),
472 cv_mean: 0.0,
473 cv_variance: 0.0,
474 cv_entropy: 0.0,
475 is_stable: false,
476 is_valid: false,
477 };
478 }
479
480 let mut session_stats = Vec::new();
481 for (index, session) in sessions.iter().enumerate() {
482 if session.is_empty() {
483 continue;
484 }
485 let (mean, variance) = mean_variance(session);
486 session_stats.push(SessionStats {
487 index,
488 mean,
489 variance,
490 entropy: shannon_entropy(session),
491 sample_size: session.len(),
492 });
493 }
494
495 if session_stats.len() < 2 {
496 return StabilityResult {
497 session_stats,
498 cv_mean: 0.0,
499 cv_variance: 0.0,
500 cv_entropy: 0.0,
501 is_stable: false,
502 is_valid: false,
503 };
504 }
505
506 let means: Vec<f64> = session_stats.iter().map(|s| s.mean).collect();
507 let variances: Vec<f64> = session_stats.iter().map(|s| s.variance).collect();
508 let entropies: Vec<f64> = session_stats.iter().map(|s| s.entropy).collect();
509
510 let cv_mean = coefficient_of_variation(&means);
511 let cv_variance = coefficient_of_variation(&variances);
512 let cv_entropy = coefficient_of_variation(&entropies);
513
514 StabilityResult {
515 session_stats,
516 cv_mean,
517 cv_variance,
518 cv_entropy,
519 is_stable: cv_mean < 0.1 && cv_variance < 0.1 && cv_entropy < 0.1,
520 is_valid: true,
521 }
522}
523
524#[derive(Debug, Clone, Serialize)]
525pub struct TemporalAnalysisSuite {
526 pub change_points: ChangePointResult,
527 pub anomalies: AnomalyDetectionResult,
528 pub bursts: BurstResult,
529 pub shifts: ShiftResult,
530 pub drift: DriftResult,
531}
532
533pub fn temporal_analysis_suite(data: &[u8]) -> TemporalAnalysisSuite {
534 TemporalAnalysisSuite {
535 change_points: change_point_detection_default(data),
536 anomalies: anomaly_detection_default(data),
537 bursts: burst_detection_default(data),
538 shifts: shift_detection_default(data),
539 drift: temporal_drift_default(data),
540 }
541}
542
543fn mean_variance(values: &[u8]) -> (f64, f64) {
544 if values.is_empty() {
545 return (0.0, 0.0);
546 }
547 let n = values.len() as f64;
548 let mean = values.iter().map(|&b| b as f64).sum::<f64>() / n;
549 let variance = values
550 .iter()
551 .map(|&b| {
552 let d = b as f64 - mean;
553 d * d
554 })
555 .sum::<f64>()
556 / n;
557 (mean, variance)
558}
559
560fn coefficient_of_variation(values: &[f64]) -> f64 {
561 if values.is_empty() {
562 return 0.0;
563 }
564 let mean = values.iter().sum::<f64>() / values.len() as f64;
565 let sd = stddev(values, mean);
566 if mean.abs() < 1e-12 {
567 if sd < 1e-12 { 0.0 } else { f64::INFINITY }
568 } else {
569 sd / mean.abs()
570 }
571}
572
573fn stddev(values: &[f64], mean: f64) -> f64 {
574 if values.is_empty() {
575 return 0.0;
576 }
577
578 let variance = values
579 .iter()
580 .map(|v| {
581 let d = v - mean;
582 d * d
583 })
584 .sum::<f64>()
585 / values.len() as f64;
586 variance.sqrt()
587}
588
589fn shannon_entropy(window: &[u8]) -> f64 {
590 if window.is_empty() {
591 return 0.0;
592 }
593
594 let mut counts = [0_usize; 256];
595 for &b in window {
596 counts[b as usize] += 1;
597 }
598
599 let n = window.len() as f64;
600 let mut entropy = 0.0;
601 for count in counts {
602 if count == 0 {
603 continue;
604 }
605 let p = count as f64 / n;
606 entropy -= p * p.log2();
607 }
608 entropy
609}
610
611#[cfg(test)]
612mod tests {
613 use super::*;
614
615 fn random_data_seeded(len: usize, seed: u64) -> Vec<u8> {
616 let mut state = seed;
617 let mut data = Vec::with_capacity(len);
618 for _ in 0..len {
619 state = state
620 .wrapping_mul(6_364_136_223_846_793_005)
621 .wrapping_add(1_442_695_040_888_963_407);
622 data.push((state >> 33) as u8);
623 }
624 data
625 }
626
627 #[test]
628 fn change_point_detects_clear_shift() {
629 let mut data = vec![0_u8; 5000];
630 data.extend(vec![255_u8; 5000]);
631
632 let result = change_point_detection(&data, 100);
633 assert!(result.is_valid);
634 let cp = result
635 .change_points
636 .iter()
637 .min_by_key(|cp| cp.offset.abs_diff(5000));
638 assert!(cp.is_some());
639 let cp = cp.expect("expected a change point near midpoint");
640 assert!(cp.offset.abs_diff(5000) <= 200);
641 }
642
643 #[test]
644 fn change_point_random_data_has_few_significant_changes() {
645 let data = random_data_seeded(10000, 0xdeadbeef);
646 let result = change_point_detection(&data, 100);
647 assert!(result.is_valid);
648 assert!(result.change_points.len() <= 2);
649 }
650
651 #[test]
652 fn change_point_constant_data_has_no_change_points() {
653 let data = vec![42_u8; 10000];
654 let result = change_point_detection(&data, 100);
655 assert!(result.is_valid);
656 assert_eq!(result.change_points.len(), 0);
657 }
658
659 #[test]
660 fn change_point_too_short_is_invalid() {
661 let data = random_data_seeded(50, 0x1234);
662 let result = change_point_detection(&data, 100);
663 assert!(!result.is_valid);
664 }
665
666 #[test]
667 fn anomaly_detection_random_data_has_low_rate() {
668 let data = random_data_seeded(5000, 0xdeadbeef);
669 let result = anomaly_detection(&data, 256, 3.0);
670 assert!(result.is_valid);
671 assert!(result.anomaly_rate < 0.05);
672 }
673
674 #[test]
675 fn anomaly_detection_detects_injected_spike_window() {
676 let mut data = vec![128_u8; 5000];
677 let spike_start = 256 * 5;
678 for b in data.iter_mut().skip(spike_start).take(256) {
679 *b = 255;
680 }
681
682 let result = anomaly_detection(&data, 256, 3.0);
683 assert!(result.is_valid);
684 assert!(result.anomalies.iter().any(|a| a.offset == spike_start));
685 }
686
687 #[test]
688 fn anomaly_detection_too_short_is_invalid() {
689 let data = random_data_seeded(100, 0x9abc);
690 let result = anomaly_detection(&data, 256, 3.0);
691 assert!(!result.is_valid);
692 }
693
694 #[test]
695 fn anomaly_detection_empty_input_is_invalid_no_panic() {
696 let result = anomaly_detection(&[], 256, 3.0);
697 assert!(!result.is_valid);
698 assert_eq!(result.anomalies.len(), 0);
699 }
700
701 #[test]
702 fn burst_detection_random_data_has_few_short_bursts() {
703 let data = random_data_seeded(10_000, 0x1234_5678);
704 let result = burst_detection(&data, 95.0);
705 assert!(result.is_valid);
706 assert!(result.n_bursts < 700);
707 assert!(result.mean_burst_length < 3.0);
708 }
709
710 #[test]
711 fn burst_detection_detects_injected_high_run() {
712 let mut data = vec![128_u8; 10_000];
713 let start = 4_200;
714 let end = 4_300;
715 for b in data.iter_mut().take(end).skip(start) {
716 *b = 255;
717 }
718
719 let result = burst_detection(&data, 99.0);
720 assert!(result.is_valid);
721 assert!(
722 result
723 .bursts
724 .iter()
725 .any(|b| b.start <= start && b.end >= end - 1)
726 );
727 }
728
729 #[test]
730 fn burst_detection_empty_is_invalid() {
731 let result = burst_detection(&[], 95.0);
732 assert!(!result.is_valid);
733 }
734
735 #[test]
736 fn burst_detection_all_255_is_single_burst() {
737 let data = vec![255_u8; 5000];
738 let result = burst_detection(&data, 95.0);
739 assert!(result.is_valid);
740 assert_eq!(result.n_bursts, 1);
741 assert_eq!(result.max_burst_length, data.len());
742 }
743
744 #[test]
745 fn shift_detection_detects_clear_mean_shift() {
746 let mut data = vec![0_u8; 5000];
747 data.extend(vec![128_u8; 5000]);
748 let result = shift_detection(&data, 500, 3.0);
749
750 assert!(result.is_valid);
751 assert!(result.shifts.iter().any(|s| s.offset.abs_diff(5000) <= 500));
752 }
753
754 #[test]
755 fn shift_detection_random_data_has_few_shifts() {
756 let data = random_data_seeded(10_000, 0xa5a5_a5a5);
757 let result = shift_detection(&data, 500, 3.0);
758 assert!(result.is_valid);
759 assert!(result.n_shifts <= 2);
760 }
761
762 #[test]
763 fn shift_detection_short_data_is_invalid() {
764 let data = random_data_seeded(900, 0x9999);
765 let result = shift_detection(&data, 500, 3.0);
766 assert!(!result.is_valid);
767 }
768
769 #[test]
770 fn shift_detection_empty_is_invalid() {
771 let result = shift_detection(&[], 500, 3.0);
772 assert!(!result.is_valid);
773 }
774
775 #[test]
776 fn temporal_drift_increasing_mean_detected() {
777 let mut data = Vec::with_capacity(10_000);
778 for segment in 0..10 {
779 let base = 20 + segment * 20;
780 data.extend(std::iter::repeat_n(base as u8, 1000));
781 }
782
783 let result = temporal_drift(&data, 10);
784 assert!(result.is_valid);
785 assert!(result.is_drifting);
786 assert!(result.drift_slope > 0.0);
787 }
788
789 #[test]
790 fn temporal_drift_stationary_random_not_drifting() {
791 let data = random_data_seeded(10_000, 0x1111_2222);
792 let result = temporal_drift(&data, 10);
793 assert!(result.is_valid);
794 assert!(!result.is_drifting);
795 }
796
797 #[test]
798 fn temporal_drift_short_data_is_invalid() {
799 let data = random_data_seeded(8, 0x7777);
800 let result = temporal_drift(&data, 10);
801 assert!(!result.is_valid);
802 }
803
804 #[test]
805 fn temporal_drift_empty_is_invalid() {
806 let result = temporal_drift(&[], 10);
807 assert!(!result.is_valid);
808 }
809
810 #[test]
811 fn inter_session_stability_similar_sessions_are_stable() {
812 let s1 = random_data_seeded(5000, 0x1001);
813 let s2 = random_data_seeded(5000, 0x1002);
814 let s3 = random_data_seeded(5000, 0x1003);
815 let s4 = random_data_seeded(5000, 0x1004);
816 let s5 = random_data_seeded(5000, 0x1005);
817 let sessions: Vec<&[u8]> = vec![&s1, &s2, &s3, &s4, &s5];
818
819 let result = inter_session_stability(&sessions);
820 assert!(result.is_valid);
821 assert!(result.is_stable);
822 }
823
824 #[test]
825 fn inter_session_stability_biased_session_is_unstable() {
826 let s1 = random_data_seeded(5000, 0x2001);
827 let s2 = random_data_seeded(5000, 0x2002);
828 let s3 = random_data_seeded(5000, 0x2003);
829 let s4 = random_data_seeded(5000, 0x2004);
830 let s5 = vec![255_u8; 5000];
831 let sessions: Vec<&[u8]> = vec![&s1, &s2, &s3, &s4, &s5];
832
833 let result = inter_session_stability(&sessions);
834 assert!(result.is_valid);
835 assert!(!result.is_stable);
836 }
837
838 #[test]
839 fn inter_session_stability_too_few_sessions_is_invalid() {
840 let s1 = random_data_seeded(5000, 0x3001);
841 let sessions: Vec<&[u8]> = vec![&s1];
842 let result = inter_session_stability(&sessions);
843 assert!(!result.is_valid);
844 }
845
846 #[test]
847 fn inter_session_stability_empty_sessions_is_invalid() {
848 let s1 = Vec::<u8>::new();
849 let s2 = Vec::<u8>::new();
850 let sessions: Vec<&[u8]> = vec![&s1, &s2];
851 let result = inter_session_stability(&sessions);
852 assert!(!result.is_valid);
853 }
854
855 #[test]
856 fn test_temporal_analysis_suite_serializes() {
857 let data = random_data_seeded(5000, 0xdeadbeef);
858 let result = temporal_analysis_suite(&data);
859 let json = serde_json::to_string(&result).expect("serialization failed");
860 assert!(json.contains("change_points"));
861 assert!(json.contains("anomalies"));
862 }
863}