Skip to main content

mockforge_http/handlers/
contract_health.rs

1//! Unified contract health timeline
2//!
3//! This module provides a unified endpoint that combines:
4//! - Structural drift incidents
5//! - Semantic drift incidents
6//! - Threat assessments
7//! - Forecast predictions
8
9use axum::{
10    extract::{Query, State},
11    http::StatusCode,
12    response::Json,
13};
14use serde::{Deserialize, Serialize};
15use std::sync::Arc;
16
17#[cfg(feature = "database")]
18use chrono::{DateTime, Utc};
19
20/// State for contract health handlers
21#[derive(Clone)]
22pub struct ContractHealthState {
23    /// Incident manager for structural incidents
24    pub incident_manager: Arc<mockforge_core::incidents::IncidentManager>,
25    /// Semantic incident manager
26    pub semantic_manager: Arc<mockforge_core::incidents::SemanticIncidentManager>,
27    /// Database connection (optional)
28    pub database: Option<crate::database::Database>,
29}
30
31/// Query parameters for timeline
32#[derive(Debug, Deserialize)]
33pub struct TimelineQuery {
34    /// Workspace ID filter
35    pub workspace_id: Option<String>,
36    /// Endpoint filter
37    pub endpoint: Option<String>,
38    /// Method filter
39    pub method: Option<String>,
40    /// Start date (ISO 8601)
41    pub start_date: Option<String>,
42    /// End date (ISO 8601)
43    pub end_date: Option<String>,
44    /// Filter by type: "structural", "semantic", "threat", "forecast", or "all"
45    pub event_type: Option<String>,
46    /// Limit results
47    pub limit: Option<usize>,
48}
49
50/// Timeline event
51#[derive(Debug, Clone, Serialize, Deserialize)]
52#[serde(tag = "type")]
53pub enum TimelineEvent {
54    /// Structural drift incident
55    #[serde(rename = "structural_drift")]
56    StructuralDrift {
57        /// Incident ID
58        id: String,
59        /// Endpoint path
60        endpoint: String,
61        /// HTTP method
62        method: String,
63        /// Type of incident
64        incident_type: String,
65        /// Severity level
66        severity: String,
67        /// Current status
68        status: String,
69        /// Detection timestamp
70        detected_at: i64,
71        /// Additional details
72        details: serde_json::Value,
73    },
74    /// Semantic drift incident
75    #[serde(rename = "semantic_drift")]
76    SemanticDrift {
77        /// Incident ID
78        id: String,
79        /// Endpoint path
80        endpoint: String,
81        /// HTTP method
82        method: String,
83        /// Type of semantic change
84        change_type: String,
85        /// Severity level
86        severity: String,
87        /// Current status
88        status: String,
89        /// Semantic confidence score
90        semantic_confidence: f64,
91        /// Soft-breaking score
92        soft_breaking_score: f64,
93        /// Detection timestamp
94        detected_at: i64,
95        /// Additional details
96        details: serde_json::Value,
97    },
98    /// Threat assessment
99    #[serde(rename = "threat_assessment")]
100    ThreatAssessment {
101        /// Assessment ID
102        id: String,
103        /// Endpoint path (optional for workspace-level)
104        endpoint: Option<String>,
105        /// HTTP method (optional for workspace-level)
106        method: Option<String>,
107        /// Threat level
108        threat_level: String,
109        /// Threat score (0.0-1.0)
110        threat_score: f64,
111        /// Assessment timestamp
112        assessed_at: i64,
113        /// Number of findings
114        findings_count: usize,
115    },
116    /// Forecast prediction
117    #[serde(rename = "forecast")]
118    Forecast {
119        /// Forecast ID
120        id: String,
121        /// Endpoint path
122        endpoint: String,
123        /// HTTP method
124        method: String,
125        /// Forecast window in days
126        window_days: u32,
127        /// Change probability (0.0-1.0)
128        change_probability: f64,
129        /// Break probability (0.0-1.0)
130        break_probability: f64,
131        /// Next expected change timestamp (optional)
132        next_expected_change: Option<i64>,
133        /// Confidence score (0.0-1.0)
134        confidence: f64,
135        /// Prediction timestamp
136        predicted_at: i64,
137    },
138}
139
140/// Timeline response
141#[derive(Debug, Serialize)]
142pub struct TimelineResponse {
143    /// Timeline events
144    pub events: Vec<TimelineEvent>,
145    /// Total count
146    pub total: usize,
147}
148
149/// Get unified contract health timeline
150///
151/// GET /api/v1/contract-health/timeline
152pub async fn get_timeline(
153    State(state): State<ContractHealthState>,
154    Query(params): Query<TimelineQuery>,
155) -> Result<Json<TimelineResponse>, StatusCode> {
156    let mut events = Vec::new();
157
158    let event_type_filter = params.event_type.as_deref().unwrap_or("all");
159
160    // Get structural drift incidents
161    if event_type_filter == "all" || event_type_filter == "structural" {
162        let mut query = mockforge_core::incidents::types::IncidentQuery::default();
163        query.workspace_id = params.workspace_id.clone();
164        query.endpoint = params.endpoint.clone();
165        query.method = params.method.clone();
166
167        let incidents = state.incident_manager.query_incidents(query).await;
168        for incident in incidents {
169            events.push(TimelineEvent::StructuralDrift {
170                id: incident.id,
171                endpoint: incident.endpoint,
172                method: incident.method,
173                incident_type: format!("{:?}", incident.incident_type),
174                severity: format!("{:?}", incident.severity),
175                status: format!("{:?}", incident.status),
176                detected_at: incident.detected_at,
177                details: incident.details,
178            });
179        }
180    }
181
182    // Get semantic drift incidents
183    if event_type_filter == "all" || event_type_filter == "semantic" {
184        let status = None; // Get all statuses for timeline
185        let semantic_incidents = state
186            .semantic_manager
187            .list_incidents(
188                params.workspace_id.as_deref(),
189                params.endpoint.as_deref(),
190                params.method.as_deref(),
191                status,
192                params.limit,
193            )
194            .await;
195
196        for incident in semantic_incidents {
197            events.push(TimelineEvent::SemanticDrift {
198                id: incident.id,
199                endpoint: incident.endpoint,
200                method: incident.method,
201                change_type: format!("{:?}", incident.semantic_change_type),
202                severity: format!("{:?}", incident.severity),
203                status: format!("{:?}", incident.status),
204                semantic_confidence: incident.semantic_confidence,
205                soft_breaking_score: incident.soft_breaking_score,
206                detected_at: incident.detected_at,
207                details: incident.details,
208            });
209        }
210    }
211
212    // Add threat assessments and forecasts from database
213    #[cfg(feature = "database")]
214    {
215        use sqlx::Row;
216        if let Some(pool) = state.database.as_ref().and_then(|db| db.pool()) {
217            // Query threat assessments
218            if let Ok(ta_rows) = sqlx::query(
219            "SELECT id, workspace_id, service_id, service_name, endpoint, method, aggregation_level,
220             threat_level, threat_score, threat_categories, findings, remediation_suggestions, assessed_at
221             FROM contract_threat_assessments
222             WHERE workspace_id = $1 OR workspace_id IS NULL
223             ORDER BY assessed_at DESC LIMIT 50"
224        )
225        .bind(params.workspace_id.as_deref())
226        .fetch_all(pool)
227        .await
228        {
229            use mockforge_core::contract_drift::threat_modeling::{ThreatLevel, AggregationLevel};
230            for row in ta_rows {
231                let id: uuid::Uuid = match row.try_get("id") {
232                    Ok(id) => id,
233                    Err(_) => continue,
234                };
235                let threat_level_str: String = match row.try_get("threat_level") {
236                    Ok(s) => s,
237                    Err(_) => continue,
238                };
239                let threat_score: f64 = match row.try_get("threat_score") {
240                    Ok(s) => s,
241                    Err(_) => continue,
242                };
243                let assessed_at: DateTime<Utc> = match row.try_get("assessed_at") {
244                    Ok(dt) => dt,
245                    Err(_) => continue,
246                };
247                let endpoint: Option<String> = row.try_get("endpoint").ok();
248                let method: Option<String> = row.try_get("method").ok();
249
250                let threat_level = match threat_level_str.as_str() {
251                    "low" => ThreatLevel::Low,
252                    "medium" => ThreatLevel::Medium,
253                    "high" => ThreatLevel::High,
254                    "critical" => ThreatLevel::Critical,
255                    _ => continue,
256                };
257
258                // Count findings from the findings JSON field
259                let findings_count = row.try_get::<serde_json::Value, _>("findings")
260                    .ok()
261                    .and_then(|v| v.as_array().map(|arr| arr.len()))
262                    .unwrap_or(0);
263
264                events.push(TimelineEvent::ThreatAssessment {
265                    id: id.to_string(),
266                    endpoint,
267                    method,
268                    threat_level: format!("{:?}", threat_level),
269                    threat_score,
270                    assessed_at: assessed_at.timestamp(),
271                    findings_count,
272                });
273            }
274        }
275
276            // Query forecasts
277            if let Ok(forecast_rows) = sqlx::query(
278                "SELECT id, service_id, service_name, endpoint, method, forecast_window_days,
279             predicted_change_probability, predicted_break_probability, next_expected_change_date,
280             confidence, predicted_at
281             FROM api_change_forecasts
282             WHERE workspace_id = $1 OR workspace_id IS NULL
283             ORDER BY predicted_at DESC LIMIT 50",
284            )
285            .bind(params.workspace_id.as_deref())
286            .fetch_all(pool)
287            .await
288            {
289                use sqlx::Row;
290                for row in forecast_rows {
291                    let id: uuid::Uuid = match row.try_get("id") {
292                        Ok(id) => id,
293                        Err(_) => continue,
294                    };
295                    let endpoint: String = match row.try_get("endpoint") {
296                        Ok(e) => e,
297                        Err(_) => continue,
298                    };
299                    let method: String = match row.try_get("method") {
300                        Ok(m) => m,
301                        Err(_) => continue,
302                    };
303                    let forecast_window_days: i32 = match row.try_get("forecast_window_days") {
304                        Ok(d) => d,
305                        Err(_) => continue,
306                    };
307                    let predicted_change_probability: f64 =
308                        match row.try_get("predicted_change_probability") {
309                            Ok(p) => p,
310                            Err(_) => continue,
311                        };
312                    let predicted_break_probability: f64 =
313                        match row.try_get("predicted_break_probability") {
314                            Ok(p) => p,
315                            Err(_) => continue,
316                        };
317                    let next_expected_change_date: Option<DateTime<Utc>> =
318                        row.try_get("next_expected_change_date").ok();
319                    let predicted_at: DateTime<Utc> = match row.try_get("predicted_at") {
320                        Ok(dt) => dt,
321                        Err(_) => continue,
322                    };
323                    let confidence: f64 = match row.try_get("confidence") {
324                        Ok(c) => c,
325                        Err(_) => continue,
326                    };
327
328                    events.push(TimelineEvent::Forecast {
329                        id: id.to_string(),
330                        endpoint,
331                        method,
332                        window_days: forecast_window_days as u32,
333                        change_probability: predicted_change_probability,
334                        break_probability: predicted_break_probability,
335                        next_expected_change: next_expected_change_date.map(|d| d.timestamp()),
336                        confidence,
337                        predicted_at: predicted_at.timestamp(),
338                    });
339                }
340            }
341        }
342    }
343
344    // Sort by timestamp (most recent first)
345    events.sort_by_key(|e| {
346        std::cmp::Reverse(match e {
347            TimelineEvent::StructuralDrift { detected_at, .. } => *detected_at,
348            TimelineEvent::SemanticDrift { detected_at, .. } => *detected_at,
349            TimelineEvent::ThreatAssessment { assessed_at, .. } => *assessed_at,
350            TimelineEvent::Forecast { predicted_at, .. } => *predicted_at,
351        })
352    });
353
354    // Apply limit
355    let total = events.len();
356    if let Some(limit) = params.limit {
357        events.truncate(limit);
358    }
359
360    Ok(Json(TimelineResponse { events, total }))
361}
362
363/// Create router for contract health endpoints
364pub fn contract_health_router(state: ContractHealthState) -> axum::Router {
365    use axum::routing::get;
366    use axum::Router;
367
368    Router::new()
369        .route("/api/v1/contract-health/timeline", get(get_timeline))
370        .with_state(state)
371}