1use axum::{
10 extract::{Query, State},
11 http::StatusCode,
12 response::Json,
13};
14use serde::{Deserialize, Serialize};
15use std::sync::Arc;
16
17#[cfg(feature = "database")]
18use chrono::{DateTime, Utc};
19
20#[derive(Clone)]
22pub struct ContractHealthState {
23 pub incident_manager: Arc<mockforge_core::incidents::IncidentManager>,
25 pub semantic_manager: Arc<mockforge_core::incidents::SemanticIncidentManager>,
27 pub database: Option<crate::database::Database>,
29}
30
31#[derive(Debug, Deserialize)]
33pub struct TimelineQuery {
34 pub workspace_id: Option<String>,
36 pub endpoint: Option<String>,
38 pub method: Option<String>,
40 pub start_date: Option<String>,
42 pub end_date: Option<String>,
44 pub event_type: Option<String>,
46 pub limit: Option<usize>,
48}
49
50#[derive(Debug, Clone, Serialize, Deserialize)]
52#[serde(tag = "type")]
53pub enum TimelineEvent {
54 #[serde(rename = "structural_drift")]
56 StructuralDrift {
57 id: String,
59 endpoint: String,
61 method: String,
63 incident_type: String,
65 severity: String,
67 status: String,
69 detected_at: i64,
71 details: serde_json::Value,
73 },
74 #[serde(rename = "semantic_drift")]
76 SemanticDrift {
77 id: String,
79 endpoint: String,
81 method: String,
83 change_type: String,
85 severity: String,
87 status: String,
89 semantic_confidence: f64,
91 soft_breaking_score: f64,
93 detected_at: i64,
95 details: serde_json::Value,
97 },
98 #[serde(rename = "threat_assessment")]
100 ThreatAssessment {
101 id: String,
103 endpoint: Option<String>,
105 method: Option<String>,
107 threat_level: String,
109 threat_score: f64,
111 assessed_at: i64,
113 findings_count: usize,
115 },
116 #[serde(rename = "forecast")]
118 Forecast {
119 id: String,
121 endpoint: String,
123 method: String,
125 window_days: u32,
127 change_probability: f64,
129 break_probability: f64,
131 next_expected_change: Option<i64>,
133 confidence: f64,
135 predicted_at: i64,
137 },
138}
139
140#[derive(Debug, Serialize)]
142pub struct TimelineResponse {
143 pub events: Vec<TimelineEvent>,
145 pub total: usize,
147}
148
149pub async fn get_timeline(
153 State(state): State<ContractHealthState>,
154 Query(params): Query<TimelineQuery>,
155) -> Result<Json<TimelineResponse>, StatusCode> {
156 let mut events = Vec::new();
157
158 let event_type_filter = params.event_type.as_deref().unwrap_or("all");
159
160 if event_type_filter == "all" || event_type_filter == "structural" {
162 let mut query = mockforge_core::incidents::types::IncidentQuery::default();
163 query.workspace_id = params.workspace_id.clone();
164 query.endpoint = params.endpoint.clone();
165 query.method = params.method.clone();
166
167 let incidents = state.incident_manager.query_incidents(query).await;
168 for incident in incidents {
169 events.push(TimelineEvent::StructuralDrift {
170 id: incident.id,
171 endpoint: incident.endpoint,
172 method: incident.method,
173 incident_type: format!("{:?}", incident.incident_type),
174 severity: format!("{:?}", incident.severity),
175 status: format!("{:?}", incident.status),
176 detected_at: incident.detected_at,
177 details: incident.details,
178 });
179 }
180 }
181
182 if event_type_filter == "all" || event_type_filter == "semantic" {
184 let status = None; let semantic_incidents = state
186 .semantic_manager
187 .list_incidents(
188 params.workspace_id.as_deref(),
189 params.endpoint.as_deref(),
190 params.method.as_deref(),
191 status,
192 params.limit,
193 )
194 .await;
195
196 for incident in semantic_incidents {
197 events.push(TimelineEvent::SemanticDrift {
198 id: incident.id,
199 endpoint: incident.endpoint,
200 method: incident.method,
201 change_type: format!("{:?}", incident.semantic_change_type),
202 severity: format!("{:?}", incident.severity),
203 status: format!("{:?}", incident.status),
204 semantic_confidence: incident.semantic_confidence,
205 soft_breaking_score: incident.soft_breaking_score,
206 detected_at: incident.detected_at,
207 details: incident.details,
208 });
209 }
210 }
211
212 #[cfg(feature = "database")]
214 {
215 use sqlx::Row;
216 if let Some(pool) = state.database.as_ref().and_then(|db| db.pool()) {
217 if let Ok(ta_rows) = sqlx::query(
219 "SELECT id, workspace_id, service_id, service_name, endpoint, method, aggregation_level,
220 threat_level, threat_score, threat_categories, findings, remediation_suggestions, assessed_at
221 FROM contract_threat_assessments
222 WHERE workspace_id = $1 OR workspace_id IS NULL
223 ORDER BY assessed_at DESC LIMIT 50"
224 )
225 .bind(params.workspace_id.as_deref())
226 .fetch_all(pool)
227 .await
228 {
229 use mockforge_core::contract_drift::threat_modeling::{ThreatLevel, AggregationLevel};
230 for row in ta_rows {
231 let id: uuid::Uuid = match row.try_get("id") {
232 Ok(id) => id,
233 Err(_) => continue,
234 };
235 let threat_level_str: String = match row.try_get("threat_level") {
236 Ok(s) => s,
237 Err(_) => continue,
238 };
239 let threat_score: f64 = match row.try_get("threat_score") {
240 Ok(s) => s,
241 Err(_) => continue,
242 };
243 let assessed_at: DateTime<Utc> = match row.try_get("assessed_at") {
244 Ok(dt) => dt,
245 Err(_) => continue,
246 };
247 let endpoint: Option<String> = row.try_get("endpoint").ok();
248 let method: Option<String> = row.try_get("method").ok();
249
250 let threat_level = match threat_level_str.as_str() {
251 "low" => ThreatLevel::Low,
252 "medium" => ThreatLevel::Medium,
253 "high" => ThreatLevel::High,
254 "critical" => ThreatLevel::Critical,
255 _ => continue,
256 };
257
258 let findings_count = row.try_get::<serde_json::Value, _>("findings")
260 .ok()
261 .and_then(|v| v.as_array().map(|arr| arr.len()))
262 .unwrap_or(0);
263
264 events.push(TimelineEvent::ThreatAssessment {
265 id: id.to_string(),
266 endpoint,
267 method,
268 threat_level: format!("{:?}", threat_level),
269 threat_score,
270 assessed_at: assessed_at.timestamp(),
271 findings_count,
272 });
273 }
274 }
275
276 if let Ok(forecast_rows) = sqlx::query(
278 "SELECT id, service_id, service_name, endpoint, method, forecast_window_days,
279 predicted_change_probability, predicted_break_probability, next_expected_change_date,
280 confidence, predicted_at
281 FROM api_change_forecasts
282 WHERE workspace_id = $1 OR workspace_id IS NULL
283 ORDER BY predicted_at DESC LIMIT 50",
284 )
285 .bind(params.workspace_id.as_deref())
286 .fetch_all(pool)
287 .await
288 {
289 use sqlx::Row;
290 for row in forecast_rows {
291 let id: uuid::Uuid = match row.try_get("id") {
292 Ok(id) => id,
293 Err(_) => continue,
294 };
295 let endpoint: String = match row.try_get("endpoint") {
296 Ok(e) => e,
297 Err(_) => continue,
298 };
299 let method: String = match row.try_get("method") {
300 Ok(m) => m,
301 Err(_) => continue,
302 };
303 let forecast_window_days: i32 = match row.try_get("forecast_window_days") {
304 Ok(d) => d,
305 Err(_) => continue,
306 };
307 let predicted_change_probability: f64 =
308 match row.try_get("predicted_change_probability") {
309 Ok(p) => p,
310 Err(_) => continue,
311 };
312 let predicted_break_probability: f64 =
313 match row.try_get("predicted_break_probability") {
314 Ok(p) => p,
315 Err(_) => continue,
316 };
317 let next_expected_change_date: Option<DateTime<Utc>> =
318 row.try_get("next_expected_change_date").ok();
319 let predicted_at: DateTime<Utc> = match row.try_get("predicted_at") {
320 Ok(dt) => dt,
321 Err(_) => continue,
322 };
323 let confidence: f64 = match row.try_get("confidence") {
324 Ok(c) => c,
325 Err(_) => continue,
326 };
327
328 events.push(TimelineEvent::Forecast {
329 id: id.to_string(),
330 endpoint,
331 method,
332 window_days: forecast_window_days as u32,
333 change_probability: predicted_change_probability,
334 break_probability: predicted_break_probability,
335 next_expected_change: next_expected_change_date.map(|d| d.timestamp()),
336 confidence,
337 predicted_at: predicted_at.timestamp(),
338 });
339 }
340 }
341 }
342 }
343
344 events.sort_by_key(|e| {
346 std::cmp::Reverse(match e {
347 TimelineEvent::StructuralDrift { detected_at, .. } => *detected_at,
348 TimelineEvent::SemanticDrift { detected_at, .. } => *detected_at,
349 TimelineEvent::ThreatAssessment { assessed_at, .. } => *assessed_at,
350 TimelineEvent::Forecast { predicted_at, .. } => *predicted_at,
351 })
352 });
353
354 let total = events.len();
356 if let Some(limit) = params.limit {
357 events.truncate(limit);
358 }
359
360 Ok(Json(TimelineResponse { events, total }))
361}
362
363pub fn contract_health_router(state: ContractHealthState) -> axum::Router {
365 use axum::routing::get;
366 use axum::Router;
367
368 Router::new()
369 .route("/api/v1/contract-health/timeline", get(get_timeline))
370 .with_state(state)
371}