1use crate::diffx_core_mock::{diff, DiffResult};
2use crate::error::Result;
3
4#[derive(Debug, Clone)]
6pub struct TimeSeriesPoint {
7 pub timestamp: f64,
8 pub value: f64,
9}
10
11#[derive(Debug, Clone)]
13pub struct TimeSeriesAnalysis {
14 pub trend: TrendAnalysis,
15 pub seasonality: SeasonalityAnalysis,
16 pub changepoints: Vec<ChangePoint>,
17 pub forecasts: Vec<ForecastPoint>,
18 pub anomalies: Vec<TimeSeriesAnomaly>,
19 pub statistics: TimeSeriesStatistics,
20}
21
22#[derive(Debug, Clone)]
24pub struct TrendAnalysis {
25 pub slope: f64,
26 pub intercept: f64,
27 pub r_squared: f64,
28 pub trend_strength: f64,
29 pub direction: TrendDirection,
30}
31
32#[derive(Debug, Clone)]
34pub enum TrendDirection {
35 Increasing,
36 Decreasing,
37 Stable,
38 Volatile,
39}
40
41#[derive(Debug, Clone)]
43pub struct SeasonalityAnalysis {
44 pub detected: bool,
45 pub period: Option<f64>,
46 pub strength: f64,
47 pub seasonal_components: Vec<f64>,
48}
49
50#[derive(Debug, Clone)]
52pub struct ChangePoint {
53 pub timestamp: f64,
54 pub index: usize,
55 pub significance: f64,
56 pub change_type: ChangeType,
57 pub before_value: f64,
58 pub after_value: f64,
59}
60
61#[derive(Debug, Clone)]
63pub enum ChangeType {
64 LevelShift,
65 TrendChange,
66 VarianceChange,
67}
68
69#[derive(Debug, Clone)]
71pub struct ForecastPoint {
72 pub timestamp: f64,
73 pub predicted_value: f64,
74 pub confidence_interval: (f64, f64),
75 pub uncertainty: f64,
76}
77
78#[derive(Debug, Clone)]
80pub struct TimeSeriesAnomaly {
81 pub timestamp: f64,
82 pub index: usize,
83 pub value: f64,
84 pub expected_value: f64,
85 pub anomaly_score: f64,
86 pub anomaly_type: AnomalyType,
87}
88
89#[derive(Debug, Clone)]
91pub enum AnomalyType {
92 PointAnomaly,
93 SequentialAnomaly,
94 SeasonalAnomaly,
95}
96
97#[derive(Debug, Clone)]
99pub struct TimeSeriesStatistics {
100 pub autocorrelation: Vec<f64>,
101 pub partial_autocorrelation: Vec<f64>,
102 pub stationarity_test: StationarityResult,
103 pub noise_level: f64,
104 pub data_quality: DataQuality,
105}
106
107#[derive(Debug, Clone)]
109pub struct StationarityResult {
110 pub is_stationary: bool,
111 pub test_statistic: f64,
112 pub p_value: f64,
113 pub differencing_required: usize,
114}
115
116#[derive(Debug, Clone)]
118pub struct DataQuality {
119 pub completeness: f64,
120 pub consistency: f64,
121 pub regularity: f64,
122 pub outlier_ratio: f64,
123}
124
125pub fn analyze_timeseries(data: &[TimeSeriesPoint]) -> Result<TimeSeriesAnalysis> {
127 if data.len() < 10 {
128 return Err(crate::error::BenfError::InsufficientData(data.len()));
129 }
130
131 let values: Vec<f64> = data.iter().map(|p| p.value).collect();
132 let timestamps: Vec<f64> = data.iter().map(|p| p.timestamp).collect();
133
134 Ok(TimeSeriesAnalysis {
135 trend: analyze_trend(×tamps, &values)?,
136 seasonality: detect_seasonality(&values)?,
137 changepoints: detect_changepoints(×tamps, &values)?,
138 forecasts: generate_forecasts(×tamps, &values, 5)?,
139 anomalies: detect_timeseries_anomalies(×tamps, &values)?,
140 statistics: calculate_timeseries_statistics(&values)?,
141 })
142}
143
144fn analyze_trend(timestamps: &[f64], values: &[f64]) -> Result<TrendAnalysis> {
146 let n = values.len() as f64;
147
148 let sum_x: f64 = timestamps.iter().sum();
150 let sum_y: f64 = values.iter().sum();
151 let sum_xy: f64 = timestamps
152 .iter()
153 .zip(values.iter())
154 .map(|(x, y)| x * y)
155 .sum();
156 let sum_x2: f64 = timestamps.iter().map(|x| x * x).sum();
157
158 let slope = (n * sum_xy - sum_x * sum_y) / (n * sum_x2 - sum_x * sum_x);
159 let intercept = (sum_y - slope * sum_x) / n;
160
161 let mean_y = sum_y / n;
163 let ss_tot: f64 = values.iter().map(|y| (y - mean_y).powi(2)).sum();
164 let ss_res: f64 = timestamps
165 .iter()
166 .zip(values.iter())
167 .map(|(x, y)| {
168 let predicted = slope * x + intercept;
169 (y - predicted).powi(2)
170 })
171 .sum();
172
173 let r_squared = 1.0 - (ss_res / ss_tot);
174
175 let trend_strength = r_squared * slope.abs();
177 let direction = if slope.abs() < 0.01 {
178 TrendDirection::Stable
179 } else if slope > 0.0 {
180 TrendDirection::Increasing
181 } else {
182 TrendDirection::Decreasing
183 };
184
185 Ok(TrendAnalysis {
186 slope,
187 intercept,
188 r_squared,
189 trend_strength,
190 direction,
191 })
192}
193
194fn detect_seasonality(values: &[f64]) -> Result<SeasonalityAnalysis> {
196 let n = values.len();
197 let mut best_period = None;
198 let mut best_strength = 0.0;
199
200 for period in 2..=(n / 4) {
202 let strength = calculate_seasonal_strength(values, period);
203 if strength > best_strength {
204 best_strength = strength;
205 best_period = Some(period as f64);
206 }
207 }
208
209 let detected = best_strength > 0.3; let seasonal_components = if detected {
211 calculate_seasonal_components(values, best_period.unwrap() as usize)
212 } else {
213 vec![0.0; n]
214 };
215
216 Ok(SeasonalityAnalysis {
217 detected,
218 period: best_period,
219 strength: best_strength,
220 seasonal_components,
221 })
222}
223
224fn detect_changepoints(timestamps: &[f64], values: &[f64]) -> Result<Vec<ChangePoint>> {
226 let mut changepoints = Vec::new();
227 let window_size = (values.len() / 10).clamp(5, 20);
228
229 for i in window_size..(values.len() - window_size) {
230 let before_window = &values[(i - window_size)..i];
232 let after_window = &values[i..(i + window_size)];
233
234 let before_mean: f64 = before_window.iter().sum::<f64>() / before_window.len() as f64;
235 let after_mean: f64 = after_window.iter().sum::<f64>() / after_window.len() as f64;
236
237 let before_var: f64 = before_window
238 .iter()
239 .map(|x| (x - before_mean).powi(2))
240 .sum::<f64>()
241 / before_window.len() as f64;
242 let after_var: f64 = after_window
243 .iter()
244 .map(|x| (x - after_mean).powi(2))
245 .sum::<f64>()
246 / after_window.len() as f64;
247
248 let before_stats = serde_json::json!({
250 "mean": before_mean,
251 "variance": before_var,
252 "std_dev": before_var.sqrt(),
253 "cv": if before_mean.abs() > 0.0 { before_var.sqrt() / before_mean.abs() } else { 0.0 }
254 });
255
256 let after_stats = serde_json::json!({
257 "mean": after_mean,
258 "variance": after_var,
259 "std_dev": after_var.sqrt(),
260 "cv": if after_mean.abs() > 0.0 { after_var.sqrt() / after_mean.abs() } else { 0.0 }
261 });
262
263 let diff_results = diff(&before_stats, &after_stats, None);
265
266 let mean_change = (after_mean - before_mean).abs();
268 let pooled_std = ((before_var + after_var) / 2.0).sqrt();
269
270 if pooled_std > 0.0 {
271 let significance = mean_change / pooled_std;
272
273 let mut change_type = ChangeType::LevelShift;
275 let mut max_change_ratio = 0.0;
276
277 if let Ok(results) = &diff_results {
278 for diff_result in results {
279 if let DiffResult::Modified(path, old_val, new_val) = diff_result {
280 if path.contains("variance") || path.contains("std_dev") {
281 if let (Some(old), Some(new)) = (old_val.as_f64(), new_val.as_f64()) {
282 let ratio = (new / old.max(0.001)).max(old / new.max(0.001));
283 if ratio > max_change_ratio {
284 max_change_ratio = ratio;
285 if ratio > 2.0 {
286 change_type = ChangeType::VarianceChange;
287 }
288 }
289 }
290 }
291 }
292 }
293 }
294
295 if significance > 2.0 || max_change_ratio > 2.0 {
296 changepoints.push(ChangePoint {
297 timestamp: timestamps[i],
298 index: i,
299 significance: significance.max(max_change_ratio),
300 change_type,
301 before_value: before_mean,
302 after_value: after_mean,
303 });
304 }
305 }
306 }
307
308 Ok(changepoints)
309}
310
311fn generate_forecasts(
313 timestamps: &[f64],
314 values: &[f64],
315 steps: usize,
316) -> Result<Vec<ForecastPoint>> {
317 let trend = analyze_trend(timestamps, values)?;
318 let last_timestamp = timestamps.last().unwrap();
319 let time_step = if timestamps.len() > 1 {
320 (timestamps[timestamps.len() - 1] - timestamps[0]) / (timestamps.len() - 1) as f64
321 } else {
322 1.0
323 };
324
325 let mut forecasts = Vec::new();
326
327 let residuals: Vec<f64> = timestamps
329 .iter()
330 .zip(values.iter())
331 .map(|(x, y)| {
332 let predicted = trend.slope * x + trend.intercept;
333 y - predicted
334 })
335 .collect();
336
337 let residual_std = {
338 let mean_residual = residuals.iter().sum::<f64>() / residuals.len() as f64;
339 let variance = residuals
340 .iter()
341 .map(|r| (r - mean_residual).powi(2))
342 .sum::<f64>()
343 / residuals.len() as f64;
344 variance.sqrt()
345 };
346
347 for i in 1..=steps {
348 let future_timestamp = last_timestamp + (i as f64 * time_step);
349 let predicted_value = trend.slope * future_timestamp + trend.intercept;
350
351 let uncertainty = residual_std * (1.0 + i as f64 * 0.1); let confidence_interval = (
354 predicted_value - 1.96 * uncertainty,
355 predicted_value + 1.96 * uncertainty,
356 );
357
358 forecasts.push(ForecastPoint {
359 timestamp: future_timestamp,
360 predicted_value,
361 confidence_interval,
362 uncertainty,
363 });
364 }
365
366 Ok(forecasts)
367}
368
369fn detect_timeseries_anomalies(
371 timestamps: &[f64],
372 values: &[f64],
373) -> Result<Vec<TimeSeriesAnomaly>> {
374 let mut anomalies = Vec::new();
375 let window_size = (values.len() / 20).clamp(3, 10);
376
377 for i in window_size..(values.len() - window_size) {
378 let window = &values[(i - window_size)..(i + window_size + 1)];
380 let mean: f64 = window.iter().sum::<f64>() / window.len() as f64;
381 let std: f64 = {
382 let variance =
383 window.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / window.len() as f64;
384 variance.sqrt()
385 };
386
387 if std > 0.0 {
388 let z_score = (values[i] - mean) / std;
389
390 if z_score.abs() > 3.0 {
391 let expected_value = mean;
392 let anomaly_score = z_score.abs() / 3.0;
393
394 anomalies.push(TimeSeriesAnomaly {
395 timestamp: timestamps[i],
396 index: i,
397 value: values[i],
398 expected_value,
399 anomaly_score,
400 anomaly_type: AnomalyType::PointAnomaly,
401 });
402 }
403 }
404 }
405
406 Ok(anomalies)
407}
408
409fn calculate_timeseries_statistics(values: &[f64]) -> Result<TimeSeriesStatistics> {
411 let n = values.len();
412
413 let max_lags = (n / 4).min(20);
415 let mut autocorrelation = Vec::new();
416
417 for lag in 0..max_lags {
418 let correlation = calculate_autocorrelation(values, lag);
419 autocorrelation.push(correlation);
420 }
421
422 let partial_autocorrelation = autocorrelation.clone(); let stationarity_test = test_stationarity(values);
427
428 let noise_level = calculate_noise_level(values);
430
431 let data_quality = assess_data_quality(values);
433
434 Ok(TimeSeriesStatistics {
435 autocorrelation,
436 partial_autocorrelation,
437 stationarity_test,
438 noise_level,
439 data_quality,
440 })
441}
442
443fn calculate_seasonal_strength(values: &[f64], period: usize) -> f64 {
445 if period >= values.len() {
446 return 0.0;
447 }
448
449 let mut seasonal_means = vec![0.0; period];
450 let mut counts = vec![0; period];
451
452 for (i, &value) in values.iter().enumerate() {
453 let season_idx = i % period;
454 seasonal_means[season_idx] += value;
455 counts[season_idx] += 1;
456 }
457
458 for (i, &count) in counts.iter().enumerate() {
460 if count > 0 {
461 seasonal_means[i] /= count as f64;
462 }
463 }
464
465 let overall_mean: f64 = seasonal_means.iter().sum::<f64>() / seasonal_means.len() as f64;
467 let seasonal_variance: f64 = seasonal_means
468 .iter()
469 .map(|x| (x - overall_mean).powi(2))
470 .sum::<f64>()
471 / seasonal_means.len() as f64;
472
473 let total_variance: f64 = values
474 .iter()
475 .map(|x| (x - overall_mean).powi(2))
476 .sum::<f64>()
477 / values.len() as f64;
478
479 if total_variance > 0.0 {
480 seasonal_variance / total_variance
481 } else {
482 0.0
483 }
484}
485
486fn calculate_seasonal_components(values: &[f64], period: usize) -> Vec<f64> {
487 let mut components = vec![0.0; values.len()];
488 let mut seasonal_means = vec![0.0; period];
489 let mut counts = vec![0; period];
490
491 for (i, &value) in values.iter().enumerate() {
493 let season_idx = i % period;
494 seasonal_means[season_idx] += value;
495 counts[season_idx] += 1;
496 }
497
498 for (i, &count) in counts.iter().enumerate() {
499 if count > 0 {
500 seasonal_means[i] /= count as f64;
501 }
502 }
503
504 for (i, component) in components.iter_mut().enumerate() {
506 let season_idx = i % period;
507 *component = seasonal_means[season_idx];
508 }
509
510 components
511}
512
513fn calculate_autocorrelation(values: &[f64], lag: usize) -> f64 {
514 if lag >= values.len() {
515 return 0.0;
516 }
517
518 let n = values.len() - lag;
519 let mean: f64 = values.iter().sum::<f64>() / values.len() as f64;
520
521 let numerator: f64 = (0..n)
522 .map(|i| (values[i] - mean) * (values[i + lag] - mean))
523 .sum();
524
525 let denominator: f64 = values.iter().map(|x| (x - mean).powi(2)).sum();
526
527 if denominator > 0.0 {
528 numerator / denominator
529 } else {
530 0.0
531 }
532}
533
534fn test_stationarity(values: &[f64]) -> StationarityResult {
535 let n = values.len();
537 if n < 3 {
538 return StationarityResult {
539 is_stationary: false,
540 test_statistic: 0.0,
541 p_value: 1.0,
542 differencing_required: 1,
543 };
544 }
545
546 let diff: Vec<f64> = (1..n).map(|i| values[i] - values[i - 1]).collect();
548
549 let original_var: f64 = {
551 let mean = values.iter().sum::<f64>() / values.len() as f64;
552 values.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / values.len() as f64
553 };
554
555 let diff_var: f64 = {
556 let mean = diff.iter().sum::<f64>() / diff.len() as f64;
557 diff.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / diff.len() as f64
558 };
559
560 let test_statistic = if original_var > 0.0 {
561 diff_var / original_var
562 } else {
563 1.0
564 };
565 let is_stationary = test_statistic < 0.8;
566 let p_value = if is_stationary { 0.01 } else { 0.99 };
567
568 StationarityResult {
569 is_stationary,
570 test_statistic,
571 p_value,
572 differencing_required: if is_stationary { 0 } else { 1 },
573 }
574}
575
576fn calculate_noise_level(values: &[f64]) -> f64 {
577 if values.len() < 3 {
578 return 0.0;
579 }
580
581 let second_diff: Vec<f64> = (2..values.len())
583 .map(|i| values[i] - 2.0 * values[i - 1] + values[i - 2])
584 .collect();
585
586 let rms: f64 = second_diff.iter().map(|x| x.powi(2)).sum::<f64>() / second_diff.len() as f64;
587 rms.sqrt()
588}
589
590fn assess_data_quality(values: &[f64]) -> DataQuality {
591 let n = values.len();
592
593 let completeness = 1.0;
595
596 let changes: Vec<f64> = (1..n)
598 .map(|i| ((values[i] - values[i - 1]) / values[i - 1].abs().max(1e-10)).abs())
599 .collect();
600
601 let consistency = 1.0 - (changes.iter().sum::<f64>() / changes.len() as f64).min(1.0);
602
603 let regularity = 1.0;
605
606 let mean = values.iter().sum::<f64>() / n as f64;
608 let std = {
609 let variance = values.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / n as f64;
610 variance.sqrt()
611 };
612
613 let outlier_count = values
614 .iter()
615 .filter(|&&x| (x - mean).abs() > 3.0 * std)
616 .count();
617
618 let outlier_ratio = outlier_count as f64 / n as f64;
619
620 DataQuality {
621 completeness,
622 consistency,
623 regularity,
624 outlier_ratio,
625 }
626}
627
628pub fn create_timeseries_from_values(values: &[f64]) -> Vec<TimeSeriesPoint> {
630 values
631 .iter()
632 .enumerate()
633 .map(|(i, &value)| TimeSeriesPoint {
634 timestamp: i as f64,
635 value,
636 })
637 .collect()
638}
639
640#[cfg(test)]
641mod tests {
642 use super::*;
643
644 #[test]
645 fn test_create_timeseries_from_values() {
646 let values = vec![1.0, 2.0, 3.0, 4.0, 5.0];
647 let timeseries = create_timeseries_from_values(&values);
648
649 assert_eq!(timeseries.len(), 5);
650 assert_eq!(timeseries[0].value, 1.0);
651 assert_eq!(timeseries[0].timestamp, 0.0);
652 assert_eq!(timeseries[4].value, 5.0);
653 assert_eq!(timeseries[4].timestamp, 4.0);
654 }
655
656 #[test]
657 fn test_analyze_timeseries_basic() {
658 let data = vec![
660 TimeSeriesPoint {
661 timestamp: 0.0,
662 value: 1.0,
663 },
664 TimeSeriesPoint {
665 timestamp: 1.0,
666 value: 2.0,
667 },
668 TimeSeriesPoint {
669 timestamp: 2.0,
670 value: 3.0,
671 },
672 TimeSeriesPoint {
673 timestamp: 3.0,
674 value: 4.0,
675 },
676 TimeSeriesPoint {
677 timestamp: 4.0,
678 value: 5.0,
679 },
680 TimeSeriesPoint {
681 timestamp: 5.0,
682 value: 6.0,
683 },
684 TimeSeriesPoint {
685 timestamp: 6.0,
686 value: 7.0,
687 },
688 TimeSeriesPoint {
689 timestamp: 7.0,
690 value: 8.0,
691 },
692 TimeSeriesPoint {
693 timestamp: 8.0,
694 value: 9.0,
695 },
696 TimeSeriesPoint {
697 timestamp: 9.0,
698 value: 10.0,
699 },
700 ];
701
702 let result = analyze_timeseries(&data).unwrap();
703
704 assert!(result.trend.slope > 0.0); assert!(result.trend.r_squared > 0.9); matches!(result.trend.direction, TrendDirection::Increasing);
708
709 assert_eq!(result.forecasts.len(), 5);
711 assert!(result.forecasts[0].predicted_value > 10.0); assert!(!result.statistics.autocorrelation.is_empty());
715 }
716
717 #[test]
718 fn test_analyze_trend() {
719 let timestamps = vec![0.0, 1.0, 2.0, 3.0, 4.0];
720 let values = vec![1.0, 3.0, 5.0, 7.0, 9.0]; let trend = analyze_trend(×tamps, &values).unwrap();
723
724 assert!(trend.slope > 1.5); assert!(trend.slope < 2.5);
726 assert!(trend.r_squared > 0.99); matches!(trend.direction, TrendDirection::Increasing);
728 }
729
730 #[test]
731 fn test_detect_seasonality() {
732 let values = vec![
734 0.0, 1.0, 0.0, -1.0, 0.0, 1.0, 0.0, -1.0, 0.0, 1.0, 0.0, -1.0,
735 ];
736
737 let seasonality = detect_seasonality(&values).unwrap();
738
739 if seasonality.detected {
741 assert!(seasonality.period.unwrap() >= 2.0);
742 assert!(seasonality.strength > 0.0);
743 }
744 }
745
746 #[test]
747 fn test_detect_changepoints() {
748 let timestamps = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
750 let values = vec![1.0, 1.1, 0.9, 1.05, 0.95, 10.0, 9.9, 10.1, 9.95, 10.05];
751
752 let changepoints = detect_changepoints(×tamps, &values).unwrap();
753
754 if !changepoints.is_empty() {
756 let major_changepoint = &changepoints[0];
757 assert!(major_changepoint.index >= 4);
758 assert!(major_changepoint.index <= 6);
759 assert!(major_changepoint.significance > 2.0);
760 }
761 }
762
763 #[test]
764 fn test_generate_forecasts() {
765 let timestamps = vec![0.0, 1.0, 2.0, 3.0, 4.0];
766 let values = vec![1.0, 2.0, 3.0, 4.0, 5.0]; let forecasts = generate_forecasts(×tamps, &values, 3).unwrap();
769
770 assert_eq!(forecasts.len(), 3);
771
772 assert!(forecasts[0].predicted_value > 5.0);
774 assert!(forecasts[1].predicted_value > forecasts[0].predicted_value);
775 assert!(forecasts[2].predicted_value > forecasts[1].predicted_value);
776
777 assert!(forecasts[0].confidence_interval.0 <= forecasts[0].confidence_interval.1);
779 assert!(forecasts[0].uncertainty >= 0.0); }
781
782 #[test]
783 fn test_detect_timeseries_anomalies() {
784 let timestamps = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
786 let values = vec![1.0, 2.0, 3.0, 100.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]; let anomalies = detect_timeseries_anomalies(×tamps, &values).unwrap();
789
790 if !anomalies.is_empty() {
792 let anomaly = &anomalies[0];
793 assert!(anomaly.value == 100.0 || anomaly.anomaly_score > 3.0);
794 matches!(anomaly.anomaly_type, AnomalyType::PointAnomaly);
795 }
796 }
797
798 #[test]
799 fn test_calculate_timeseries_statistics() {
800 let values = vec![1.0, 2.0, 3.0, 4.0, 5.0, 4.0, 3.0, 2.0, 1.0, 2.0];
801
802 let stats = calculate_timeseries_statistics(&values).unwrap();
803
804 assert!(!stats.autocorrelation.is_empty());
805 assert!(!stats.partial_autocorrelation.is_empty());
806 assert!(stats.noise_level >= 0.0);
807 assert!(stats.data_quality.completeness > 0.0);
808 assert!(stats.data_quality.consistency >= 0.0);
809 assert!(stats.data_quality.outlier_ratio >= 0.0);
810 }
811
812 #[test]
813 fn test_insufficient_data_error() {
814 let data = vec![
815 TimeSeriesPoint {
816 timestamp: 0.0,
817 value: 1.0,
818 },
819 TimeSeriesPoint {
820 timestamp: 1.0,
821 value: 2.0,
822 },
823 ]; let result = analyze_timeseries(&data);
826 assert!(result.is_err());
827 }
828
829 #[test]
830 fn test_stable_trend_detection() {
831 let timestamps = vec![0.0, 1.0, 2.0, 3.0, 4.0];
832 let values = vec![5.0, 5.01, 4.99, 5.005, 4.995]; let trend = analyze_trend(×tamps, &values).unwrap();
835
836 matches!(trend.direction, TrendDirection::Stable);
837 assert!(trend.slope.abs() < 0.1); }
839}