1use axum::{
10 extract::{Query, State},
11 http::StatusCode,
12 response::Json,
13};
14use serde::{Deserialize, Serialize};
15use std::sync::Arc;
16
17#[cfg(feature = "database")]
18use chrono::{DateTime, Utc};
19
20#[derive(Clone)]
22pub struct ContractHealthState {
23 pub incident_manager: Arc<mockforge_core::incidents::IncidentManager>,
25 pub semantic_manager: Arc<mockforge_core::incidents::SemanticIncidentManager>,
27 pub database: Option<crate::database::Database>,
29}
30
31#[derive(Debug, Deserialize)]
33pub struct TimelineQuery {
34 pub workspace_id: Option<String>,
36 pub endpoint: Option<String>,
38 pub method: Option<String>,
40 pub start_date: Option<String>,
42 pub end_date: Option<String>,
44 pub event_type: Option<String>,
46 pub limit: Option<usize>,
48}
49
50#[derive(Debug, Clone, Serialize, Deserialize)]
52#[serde(tag = "type")]
53pub enum TimelineEvent {
54 #[serde(rename = "structural_drift")]
56 StructuralDrift {
57 id: String,
59 endpoint: String,
61 method: String,
63 incident_type: String,
65 severity: String,
67 status: String,
69 detected_at: i64,
71 details: serde_json::Value,
73 },
74 #[serde(rename = "semantic_drift")]
76 SemanticDrift {
77 id: String,
79 endpoint: String,
81 method: String,
83 change_type: String,
85 severity: String,
87 status: String,
89 semantic_confidence: f64,
91 soft_breaking_score: f64,
93 detected_at: i64,
95 details: serde_json::Value,
97 },
98 #[serde(rename = "threat_assessment")]
100 ThreatAssessment {
101 id: String,
103 endpoint: Option<String>,
105 method: Option<String>,
107 threat_level: String,
109 threat_score: f64,
111 assessed_at: i64,
113 findings_count: usize,
115 },
116 #[serde(rename = "forecast")]
118 Forecast {
119 id: String,
121 endpoint: String,
123 method: String,
125 window_days: u32,
127 change_probability: f64,
129 break_probability: f64,
131 next_expected_change: Option<i64>,
133 confidence: f64,
135 predicted_at: i64,
137 },
138}
139
140#[derive(Debug, Serialize)]
142pub struct TimelineResponse {
143 pub events: Vec<TimelineEvent>,
145 pub total: usize,
147}
148
149pub async fn get_timeline(
153 State(state): State<ContractHealthState>,
154 Query(params): Query<TimelineQuery>,
155) -> Result<Json<TimelineResponse>, StatusCode> {
156 let mut events = Vec::new();
157
158 let event_type_filter = params.event_type.as_deref().unwrap_or("all");
159
160 if event_type_filter == "all" || event_type_filter == "structural" {
162 let query = mockforge_core::incidents::types::IncidentQuery {
163 workspace_id: params.workspace_id.clone(),
164 endpoint: params.endpoint.clone(),
165 method: params.method.clone(),
166 ..mockforge_core::incidents::types::IncidentQuery::default()
167 };
168
169 let incidents = state.incident_manager.query_incidents(query).await;
170 for incident in incidents {
171 events.push(TimelineEvent::StructuralDrift {
172 id: incident.id,
173 endpoint: incident.endpoint,
174 method: incident.method,
175 incident_type: format!("{:?}", incident.incident_type),
176 severity: format!("{:?}", incident.severity),
177 status: format!("{:?}", incident.status),
178 detected_at: incident.detected_at,
179 details: incident.details,
180 });
181 }
182 }
183
184 if event_type_filter == "all" || event_type_filter == "semantic" {
186 let status = None; let semantic_incidents = state
188 .semantic_manager
189 .list_incidents(
190 params.workspace_id.as_deref(),
191 params.endpoint.as_deref(),
192 params.method.as_deref(),
193 status,
194 params.limit,
195 )
196 .await;
197
198 for incident in semantic_incidents {
199 events.push(TimelineEvent::SemanticDrift {
200 id: incident.id,
201 endpoint: incident.endpoint,
202 method: incident.method,
203 change_type: format!("{:?}", incident.semantic_change_type),
204 severity: format!("{:?}", incident.severity),
205 status: format!("{:?}", incident.status),
206 semantic_confidence: incident.semantic_confidence,
207 soft_breaking_score: incident.soft_breaking_score,
208 detected_at: incident.detected_at,
209 details: incident.details,
210 });
211 }
212 }
213
214 #[cfg(feature = "database")]
216 {
217 use sqlx::Row;
218 if let Some(pool) = state.database.as_ref().and_then(|db| db.pool()) {
219 if let Ok(ta_rows) = sqlx::query(
221 "SELECT id, workspace_id, service_id, service_name, endpoint, method, aggregation_level,
222 threat_level, threat_score, threat_categories, findings, remediation_suggestions, assessed_at
223 FROM contract_threat_assessments
224 WHERE workspace_id = $1 OR workspace_id IS NULL
225 ORDER BY assessed_at DESC LIMIT 50"
226 )
227 .bind(params.workspace_id.as_deref())
228 .fetch_all(pool)
229 .await
230 {
231 use mockforge_core::contract_drift::threat_modeling::ThreatLevel;
232 for row in ta_rows {
233 let id: uuid::Uuid = match row.try_get("id") {
234 Ok(id) => id,
235 Err(_) => continue,
236 };
237 let threat_level_str: String = match row.try_get("threat_level") {
238 Ok(s) => s,
239 Err(_) => continue,
240 };
241 let threat_score: f64 = match row.try_get("threat_score") {
242 Ok(s) => s,
243 Err(_) => continue,
244 };
245 let assessed_at: DateTime<Utc> = match row.try_get("assessed_at") {
246 Ok(dt) => dt,
247 Err(_) => continue,
248 };
249 let endpoint: Option<String> = row.try_get("endpoint").ok();
250 let method: Option<String> = row.try_get("method").ok();
251
252 let threat_level = match threat_level_str.as_str() {
253 "low" => ThreatLevel::Low,
254 "medium" => ThreatLevel::Medium,
255 "high" => ThreatLevel::High,
256 "critical" => ThreatLevel::Critical,
257 _ => continue,
258 };
259
260 let findings_count = row.try_get::<serde_json::Value, _>("findings")
262 .ok()
263 .and_then(|v| v.as_array().map(|arr| arr.len()))
264 .unwrap_or(0);
265
266 events.push(TimelineEvent::ThreatAssessment {
267 id: id.to_string(),
268 endpoint,
269 method,
270 threat_level: format!("{:?}", threat_level),
271 threat_score,
272 assessed_at: assessed_at.timestamp(),
273 findings_count,
274 });
275 }
276 }
277
278 if let Ok(forecast_rows) = sqlx::query(
280 "SELECT id, service_id, service_name, endpoint, method, forecast_window_days,
281 predicted_change_probability, predicted_break_probability, next_expected_change_date,
282 confidence, predicted_at
283 FROM api_change_forecasts
284 WHERE workspace_id = $1 OR workspace_id IS NULL
285 ORDER BY predicted_at DESC LIMIT 50",
286 )
287 .bind(params.workspace_id.as_deref())
288 .fetch_all(pool)
289 .await
290 {
291 use sqlx::Row;
292 for row in forecast_rows {
293 let id: uuid::Uuid = match row.try_get("id") {
294 Ok(id) => id,
295 Err(_) => continue,
296 };
297 let endpoint: String = match row.try_get("endpoint") {
298 Ok(e) => e,
299 Err(_) => continue,
300 };
301 let method: String = match row.try_get("method") {
302 Ok(m) => m,
303 Err(_) => continue,
304 };
305 let forecast_window_days: i32 = match row.try_get("forecast_window_days") {
306 Ok(d) => d,
307 Err(_) => continue,
308 };
309 let predicted_change_probability: f64 =
310 match row.try_get("predicted_change_probability") {
311 Ok(p) => p,
312 Err(_) => continue,
313 };
314 let predicted_break_probability: f64 =
315 match row.try_get("predicted_break_probability") {
316 Ok(p) => p,
317 Err(_) => continue,
318 };
319 let next_expected_change_date: Option<DateTime<Utc>> =
320 row.try_get("next_expected_change_date").ok();
321 let predicted_at: DateTime<Utc> = match row.try_get("predicted_at") {
322 Ok(dt) => dt,
323 Err(_) => continue,
324 };
325 let confidence: f64 = match row.try_get("confidence") {
326 Ok(c) => c,
327 Err(_) => continue,
328 };
329
330 events.push(TimelineEvent::Forecast {
331 id: id.to_string(),
332 endpoint,
333 method,
334 window_days: forecast_window_days as u32,
335 change_probability: predicted_change_probability,
336 break_probability: predicted_break_probability,
337 next_expected_change: next_expected_change_date.map(|d| d.timestamp()),
338 confidence,
339 predicted_at: predicted_at.timestamp(),
340 });
341 }
342 }
343 }
344 }
345
346 events.sort_by_key(|e| {
348 std::cmp::Reverse(match e {
349 TimelineEvent::StructuralDrift { detected_at, .. } => *detected_at,
350 TimelineEvent::SemanticDrift { detected_at, .. } => *detected_at,
351 TimelineEvent::ThreatAssessment { assessed_at, .. } => *assessed_at,
352 TimelineEvent::Forecast { predicted_at, .. } => *predicted_at,
353 })
354 });
355
356 let total = events.len();
358 if let Some(limit) = params.limit {
359 events.truncate(limit);
360 }
361
362 Ok(Json(TimelineResponse { events, total }))
363}
364
365pub fn contract_health_router(state: ContractHealthState) -> axum::Router {
367 use axum::routing::get;
368 use axum::Router;
369
370 Router::new()
371 .route("/api/v1/contract-health/timeline", get(get_timeline))
372 .with_state(state)
373}