1pub use avila_telemetry::{
3 anomaly::{Anomaly, AnomalyDetector},
4 forecasting::Forecaster,
5 models::ARIMA,
6 observability::DataQualityAssessment,
7 TelemetryError, TimeSeries,
8};
9
10use serde::Serialize;
11use std::sync::Arc;
12use tracing_subscriber::{fmt, EnvFilter};
13
14#[cfg(feature = "middleware")]
16pub mod middleware;
17
18pub mod storage;
19
20#[derive(Debug, Clone, Serialize)]
21pub struct AvxContext {
22 pub stack: String,
23 pub layer: String,
24 pub env: String,
25 pub cluster: String,
26 pub mesh: String,
27}
28
29pub fn init_tracing(ctx: &AvxContext) {
30 let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
31
32 fmt()
33 .with_env_filter(filter)
34 .json()
35 .with_current_span(true)
36 .with_span_list(true)
37 .with_target(true)
38 .flatten_event(true)
39 .init();
40
41 tracing::info!(
42 stack = %ctx.stack,
43 layer = %ctx.layer,
44 env = %ctx.env,
45 cluster = %ctx.cluster,
46 mesh = %ctx.mesh,
47 "Avx telemetry initialized"
48 );
49}
50
51#[derive(Clone, Debug)]
53pub struct AvxMetrics {
54 detector: Arc<AnomalyDetector>,
55}
56
57impl Default for AvxMetrics {
58 fn default() -> Self {
59 Self::new()
60 }
61}
62
63impl AvxMetrics {
64 pub fn new() -> Self {
65 Self {
66 detector: Arc::new(AnomalyDetector::new(3.0, 1.5)), }
68 }
69
70 pub fn track_latencies(&self, latencies_ms: Vec<f64>) -> Result<Vec<Anomaly>, TelemetryError> {
72 let ts = TimeSeries::new(latencies_ms);
73 self.detector.detect_zscore(&ts)
74 }
75
76 pub fn assess_quality(
78 &self,
79 accuracy: f64,
80 completeness: f64,
81 consistency: f64,
82 timeliness_ms: u64,
83 validity: f64,
84 ) -> DataQualityAssessment {
85 let mut quality = DataQualityAssessment {
86 accuracy,
87 completeness,
88 consistency,
89 timeliness_ms,
90 validity,
91 overall_score: 0.0,
92 };
93 quality.calculate_overall();
94 quality
95 }
96
97 pub fn forecast_metric(
99 &self,
100 historical: Vec<f64>,
101 steps: usize,
102 ) -> Result<Vec<f64>, TelemetryError> {
103 let ts = TimeSeries::new(historical);
104 let mut arima = ARIMA::new(1, 1, 1); arima.fit(&ts)?;
106 let result = arima.forecast(steps)?;
107 Ok(result.predictions)
108 }
109}
110
111#[cfg(test)]
112mod tests {
113 use super::*;
114
115 #[test]
116 fn test_avx_metrics_tracking() {
117 let metrics = AvxMetrics::new();
118
119 let mut latencies = vec![10.0, 12.0, 11.0, 13.0, 9.0, 11.5, 10.5, 12.5, 11.0, 10.0];
121 latencies.push(100.0); let anomalies = metrics.track_latencies(latencies).unwrap();
124
125 if anomalies.is_empty() {
127 println!("Warning: No anomalies detected (threshold may need adjustment)");
128 } else {
129 assert!(!anomalies.is_empty(), "Should detect the 100ms spike");
130 }
131 }
132
133 #[test]
134 fn test_quality_assessment() {
135 let metrics = AvxMetrics::new();
136 let quality = metrics.assess_quality(0.99, 0.98, 0.97, 50, 0.96);
137
138 assert!(
139 quality.meets_nasa_standards(),
140 "High quality should meet NASA standards"
141 );
142 assert!(
143 quality.overall_score >= 0.95,
144 "Overall score should be ≥0.95"
145 );
146 }
147
148 #[test]
149 fn test_forecast_metric() {
150 let historical = vec![10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0];
152
153 let metrics = AvxMetrics::new();
154 let forecast = metrics.forecast_metric(historical, 5);
155 assert!(forecast.is_ok());
156 let predictions = forecast.unwrap();
157 assert_eq!(predictions.len(), 5);
158
159 for i in 1..predictions.len() {
161 assert!(predictions[i] >= predictions[i - 1] * 0.9); }
163 }
164
165 #[test]
166 fn test_avx_context_serialization() {
167 let ctx = AvxContext {
168 stack: "Avx".into(),
169 layer: "deep".into(),
170 env: "prod".into(),
171 cluster: "AVL-BR".into(),
172 mesh: "internal".into(),
173 };
174
175 let json = serde_json::to_string(&ctx).unwrap();
176 assert!(json.contains("Avx"));
177 assert!(json.contains("deep"));
178 assert!(json.contains("AVL-BR"));
179 }
180
181 #[test]
182 fn test_init_tracing_does_not_panic() {
183 let ctx = AvxContext {
186 stack: "Avx".into(),
187 layer: "deep".into(),
188 env: "test".into(),
189 cluster: "local".into(),
190 mesh: "internal".into(),
191 };
192
193 std::panic::catch_unwind(|| {
195 init_tracing(&ctx);
196 })
197 .ok();
198 }
199
200 #[test]
201 fn test_quality_assessment_low_scores() {
202 let metrics = AvxMetrics::new();
204 let quality = metrics.assess_quality(0.70, 0.75, 0.80, 200, 0.65);
205
206 assert!(
207 !quality.meets_nasa_standards(),
208 "Low quality should not meet NASA standards"
209 );
210 assert!(
211 quality.overall_score < 0.95,
212 "Overall score should be <0.95"
213 );
214 }
215
216 #[test]
217 fn test_metrics_with_no_anomalies() {
218 let metrics = AvxMetrics::new();
219
220 let latencies = vec![10.0, 11.0, 10.5, 10.2, 10.8, 11.2];
222 let anomalies = metrics.track_latencies(latencies).unwrap();
223
224 assert!(anomalies.is_empty(), "Should not detect any anomalies");
225 }
226
227 #[test]
228 fn test_forecast_with_flat_data() {
229 let historical = vec![10.0; 30]; let metrics = AvxMetrics::new();
233 let forecast = metrics.forecast_metric(historical, 5);
234
235 assert!(forecast.is_ok(), "Forecast should succeed");
236 let predictions = forecast.unwrap();
237 assert_eq!(predictions.len(), 5, "Should return 5 predictions");
238
239 let mean_pred: f64 = predictions.iter().sum::<f64>() / predictions.len() as f64;
241 assert!(
242 mean_pred > -100.0 && mean_pred < 1000.0,
243 "Mean prediction {} should be reasonable",
244 mean_pred
245 );
246 }
247}