1use crate::pdf::ExecutionReport;
4use crate::{ReportingError, Result};
5use chrono::{DateTime, Duration, Utc};
6use serde::{Deserialize, Serialize};
7
8#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
10#[serde(rename_all = "lowercase")]
11pub enum TrendDirection {
12 Improving,
13 Degrading,
14 Stable,
15 Volatile,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
20pub struct TrendReport {
21 pub metric_name: String,
22 pub trend: TrendDirection,
23 pub change_percentage: f64,
24 pub current_value: f64,
25 pub previous_value: f64,
26 pub average_value: f64,
27 pub std_deviation: f64,
28 pub data_points: Vec<DataPoint>,
29 pub forecast: Vec<ForecastPoint>,
30 pub anomalies: Vec<AnomalyPoint>,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct DataPoint {
36 pub timestamp: DateTime<Utc>,
37 pub value: f64,
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
42pub struct ForecastPoint {
43 pub timestamp: DateTime<Utc>,
44 pub predicted_value: f64,
45 pub confidence_interval: (f64, f64),
46}
47
48#[derive(Debug, Clone, Serialize, Deserialize)]
50pub struct AnomalyPoint {
51 pub timestamp: DateTime<Utc>,
52 pub value: f64,
53 pub severity: String,
54}
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct RegressionResult {
59 pub slope: f64,
60 pub intercept: f64,
61 pub r_squared: f64,
62}
63
64pub struct TrendAnalyzer {
66 historical_reports: Vec<ExecutionReport>,
67}
68
69impl TrendAnalyzer {
70 pub fn new() -> Self {
72 Self {
73 historical_reports: Vec::new(),
74 }
75 }
76
77 pub fn add_report(&mut self, report: ExecutionReport) {
79 self.historical_reports.push(report);
80 self.historical_reports.sort_by_key(|r| r.start_time);
82 }
83
84 pub fn analyze_metric(&self, metric_name: &str) -> Result<TrendReport> {
86 if self.historical_reports.is_empty() {
87 return Err(ReportingError::Analysis("No historical data available".to_string()));
88 }
89
90 let data_points = self.extract_metric_values(metric_name)?;
92
93 if data_points.is_empty() {
94 return Err(ReportingError::Analysis(format!("No data for metric: {}", metric_name)));
95 }
96
97 let values: Vec<f64> = data_points.iter().map(|dp| dp.value).collect();
99 let average_value = values.iter().sum::<f64>() / values.len() as f64;
100
101 let variance =
102 values.iter().map(|v| (v - average_value).powi(2)).sum::<f64>() / values.len() as f64;
103 let std_deviation = variance.sqrt();
104
105 let regression = self.linear_regression(&data_points);
107 let trend = self.determine_trend(®ression, std_deviation);
108
109 let current_value = data_points.last().unwrap().value;
111 let previous_value = if data_points.len() > 1 {
112 data_points[data_points.len() - 2].value
113 } else {
114 current_value
115 };
116
117 let change_percentage = if previous_value != 0.0 {
118 ((current_value - previous_value) / previous_value) * 100.0
119 } else {
120 0.0
121 };
122
123 let anomalies = self.detect_anomalies(&data_points, average_value, std_deviation);
125
126 let forecast = self.generate_forecast(®ression, &data_points, 5);
128
129 Ok(TrendReport {
130 metric_name: metric_name.to_string(),
131 trend,
132 change_percentage,
133 current_value,
134 previous_value,
135 average_value,
136 std_deviation,
137 data_points,
138 forecast,
139 anomalies,
140 })
141 }
142
143 fn extract_metric_values(&self, metric_name: &str) -> Result<Vec<DataPoint>> {
145 let mut data_points = Vec::new();
146
147 for report in &self.historical_reports {
148 let value = match metric_name {
149 "error_rate" => report.metrics.error_rate,
150 "avg_latency" => report.metrics.avg_latency_ms,
151 "p95_latency" => report.metrics.p95_latency_ms,
152 "p99_latency" => report.metrics.p99_latency_ms,
153 "total_requests" => report.metrics.total_requests as f64,
154 "failed_requests" => report.metrics.failed_requests as f64,
155 "success_rate" => {
156 if report.metrics.total_requests > 0 {
157 report.metrics.successful_requests as f64
158 / report.metrics.total_requests as f64
159 } else {
160 0.0
161 }
162 }
163 _ => {
164 return Err(ReportingError::Analysis(format!(
165 "Unknown metric: {}",
166 metric_name
167 )))
168 }
169 };
170
171 data_points.push(DataPoint {
172 timestamp: report.start_time,
173 value,
174 });
175 }
176
177 Ok(data_points)
178 }
179
180 fn linear_regression(&self, data_points: &[DataPoint]) -> RegressionResult {
182 if data_points.len() < 2 {
183 return RegressionResult {
184 slope: 0.0,
185 intercept: 0.0,
186 r_squared: 0.0,
187 };
188 }
189
190 let n = data_points.len() as f64;
191
192 let x_values: Vec<f64> = data_points
194 .iter()
195 .map(|dp| (dp.timestamp - data_points[0].timestamp).num_seconds() as f64 / 86400.0)
196 .collect();
197
198 let y_values: Vec<f64> = data_points.iter().map(|dp| dp.value).collect();
199
200 let sum_x: f64 = x_values.iter().sum();
201 let sum_y: f64 = y_values.iter().sum();
202 let sum_xy: f64 = x_values.iter().zip(&y_values).map(|(x, y)| x * y).sum();
203 let sum_xx: f64 = x_values.iter().map(|x| x * x).sum();
204
205 let slope = (n * sum_xy - sum_x * sum_y) / (n * sum_xx - sum_x * sum_x);
206 let intercept = (sum_y - slope * sum_x) / n;
207
208 let mean_y = sum_y / n;
210 let ss_tot: f64 = y_values.iter().map(|y| (y - mean_y).powi(2)).sum();
211 let ss_res: f64 = x_values
212 .iter()
213 .zip(&y_values)
214 .map(|(x, y)| {
215 let predicted = slope * x + intercept;
216 (y - predicted).powi(2)
217 })
218 .sum();
219
220 let r_squared = if ss_tot > 0.0 {
221 1.0 - (ss_res / ss_tot)
222 } else {
223 0.0
224 };
225
226 RegressionResult {
227 slope,
228 intercept,
229 r_squared,
230 }
231 }
232
233 fn determine_trend(&self, regression: &RegressionResult, std_dev: f64) -> TrendDirection {
235 let slope_threshold = std_dev * 0.1;
236
237 if regression.r_squared < 0.5 {
238 TrendDirection::Volatile
240 } else if regression.slope.abs() < slope_threshold {
241 TrendDirection::Stable
243 } else if regression.slope > 0.0 {
244 TrendDirection::Degrading
246 } else {
247 TrendDirection::Improving
249 }
250 }
251
252 fn detect_anomalies(
254 &self,
255 data_points: &[DataPoint],
256 mean: f64,
257 std_dev: f64,
258 ) -> Vec<AnomalyPoint> {
259 let mut anomalies = Vec::new();
260 let threshold = 2.0; for point in data_points {
263 let z_score = ((point.value - mean) / std_dev).abs();
264
265 if z_score > threshold {
266 let severity = if z_score > 3.0 { "high" } else { "medium" };
267
268 anomalies.push(AnomalyPoint {
269 timestamp: point.timestamp,
270 value: point.value,
271 severity: severity.to_string(),
272 });
273 }
274 }
275
276 anomalies
277 }
278
279 fn generate_forecast(
281 &self,
282 regression: &RegressionResult,
283 data_points: &[DataPoint],
284 periods: usize,
285 ) -> Vec<ForecastPoint> {
286 let mut forecast = Vec::new();
287
288 if data_points.is_empty() {
289 return forecast;
290 }
291
292 let last_timestamp = data_points.last().unwrap().timestamp;
293 let first_timestamp = data_points[0].timestamp;
294
295 for i in 1..=periods {
296 let future_timestamp = last_timestamp + Duration::days(i as i64);
297 let days_from_start =
298 (future_timestamp - first_timestamp).num_seconds() as f64 / 86400.0;
299
300 let predicted_value = regression.slope * days_from_start + regression.intercept;
301
302 let std_error = 0.1; let confidence_interval =
305 (predicted_value - 2.0 * std_error, predicted_value + 2.0 * std_error);
306
307 forecast.push(ForecastPoint {
308 timestamp: future_timestamp,
309 predicted_value,
310 confidence_interval,
311 });
312 }
313
314 forecast
315 }
316
317 pub fn available_metrics(&self) -> Vec<String> {
319 vec![
320 "error_rate".to_string(),
321 "avg_latency".to_string(),
322 "p95_latency".to_string(),
323 "p99_latency".to_string(),
324 "total_requests".to_string(),
325 "failed_requests".to_string(),
326 "success_rate".to_string(),
327 ]
328 }
329
330 pub fn analyze_all_metrics(&self) -> Result<Vec<TrendReport>> {
332 let mut reports = Vec::new();
333
334 for metric in self.available_metrics() {
335 if let Ok(report) = self.analyze_metric(&metric) {
336 reports.push(report);
337 }
338 }
339
340 Ok(reports)
341 }
342}
343
344impl Default for TrendAnalyzer {
345 fn default() -> Self {
346 Self::new()
347 }
348}
349
350#[cfg(test)]
351mod tests {
352 use super::*;
353 use crate::pdf::ReportMetrics;
354
355 #[test]
356 fn test_trend_analyzer() {
357 let mut analyzer = TrendAnalyzer::new();
358
359 for i in 0..10 {
360 let report = ExecutionReport {
361 orchestration_name: "test".to_string(),
362 start_time: Utc::now() - Duration::days(10 - i),
363 end_time: Utc::now() - Duration::days(10 - i),
364 duration_seconds: 100,
365 status: "Completed".to_string(),
366 total_steps: 5,
367 completed_steps: 5,
368 failed_steps: 0,
369 metrics: ReportMetrics {
370 total_requests: 1000,
371 successful_requests: 980,
372 failed_requests: 20,
373 avg_latency_ms: 100.0 + i as f64 * 5.0,
374 p95_latency_ms: 200.0,
375 p99_latency_ms: 300.0,
376 error_rate: 0.02,
377 },
378 failures: vec![],
379 recommendations: vec![],
380 };
381
382 analyzer.add_report(report);
383 }
384
385 let trend = analyzer.analyze_metric("avg_latency").unwrap();
386 assert_eq!(trend.metric_name, "avg_latency");
387 assert!(trend.data_points.len() >= 10);
388 }
389}