use axum::{
extract::{Query, State},
http::StatusCode,
response::Json,
};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[cfg(feature = "database")]
use chrono::{DateTime, Utc};
#[derive(Clone)]
pub struct ContractHealthState {
pub incident_manager: Arc<mockforge_core::incidents::IncidentManager>,
pub semantic_manager: Arc<mockforge_core::incidents::SemanticIncidentManager>,
pub database: Option<crate::database::Database>,
}
#[derive(Debug, Deserialize)]
pub struct TimelineQuery {
pub workspace_id: Option<String>,
pub endpoint: Option<String>,
pub method: Option<String>,
pub start_date: Option<String>,
pub end_date: Option<String>,
pub event_type: Option<String>,
pub limit: Option<usize>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum TimelineEvent {
#[serde(rename = "structural_drift")]
StructuralDrift {
id: String,
endpoint: String,
method: String,
incident_type: String,
severity: String,
status: String,
detected_at: i64,
details: serde_json::Value,
},
#[serde(rename = "semantic_drift")]
SemanticDrift {
id: String,
endpoint: String,
method: String,
change_type: String,
severity: String,
status: String,
semantic_confidence: f64,
soft_breaking_score: f64,
detected_at: i64,
details: serde_json::Value,
},
#[serde(rename = "threat_assessment")]
ThreatAssessment {
id: String,
endpoint: Option<String>,
method: Option<String>,
threat_level: String,
threat_score: f64,
assessed_at: i64,
findings_count: usize,
},
#[serde(rename = "forecast")]
Forecast {
id: String,
endpoint: String,
method: String,
window_days: u32,
change_probability: f64,
break_probability: f64,
next_expected_change: Option<i64>,
confidence: f64,
predicted_at: i64,
},
}
#[derive(Debug, Serialize)]
pub struct TimelineResponse {
pub events: Vec<TimelineEvent>,
pub total: usize,
}
pub async fn get_timeline(
State(state): State<ContractHealthState>,
Query(params): Query<TimelineQuery>,
) -> Result<Json<TimelineResponse>, StatusCode> {
let mut events = Vec::new();
let event_type_filter = params.event_type.as_deref().unwrap_or("all");
if event_type_filter == "all" || event_type_filter == "structural" {
let query = mockforge_core::incidents::types::IncidentQuery {
workspace_id: params.workspace_id.clone(),
endpoint: params.endpoint.clone(),
method: params.method.clone(),
..mockforge_core::incidents::types::IncidentQuery::default()
};
let incidents = state.incident_manager.query_incidents(query).await;
for incident in incidents {
events.push(TimelineEvent::StructuralDrift {
id: incident.id,
endpoint: incident.endpoint,
method: incident.method,
incident_type: format!("{:?}", incident.incident_type),
severity: format!("{:?}", incident.severity),
status: format!("{:?}", incident.status),
detected_at: incident.detected_at,
details: incident.details,
});
}
}
if event_type_filter == "all" || event_type_filter == "semantic" {
let status = None; let semantic_incidents = state
.semantic_manager
.list_incidents(
params.workspace_id.as_deref(),
params.endpoint.as_deref(),
params.method.as_deref(),
status,
params.limit,
)
.await;
for incident in semantic_incidents {
events.push(TimelineEvent::SemanticDrift {
id: incident.id,
endpoint: incident.endpoint,
method: incident.method,
change_type: format!("{:?}", incident.semantic_change_type),
severity: format!("{:?}", incident.severity),
status: format!("{:?}", incident.status),
semantic_confidence: incident.semantic_confidence,
soft_breaking_score: incident.soft_breaking_score,
detected_at: incident.detected_at,
details: incident.details,
});
}
}
#[cfg(feature = "database")]
{
use sqlx::Row;
if let Some(pool) = state.database.as_ref().and_then(|db| db.pool()) {
if let Ok(ta_rows) = sqlx::query(
"SELECT id, workspace_id, service_id, service_name, endpoint, method, aggregation_level,
threat_level, threat_score, threat_categories, findings, remediation_suggestions, assessed_at
FROM contract_threat_assessments
WHERE workspace_id = $1 OR workspace_id IS NULL
ORDER BY assessed_at DESC LIMIT 50"
)
.bind(params.workspace_id.as_deref())
.fetch_all(pool)
.await
{
use mockforge_core::contract_drift::threat_modeling::ThreatLevel;
for row in ta_rows {
let id: uuid::Uuid = match row.try_get("id") {
Ok(id) => id,
Err(_) => continue,
};
let threat_level_str: String = match row.try_get("threat_level") {
Ok(s) => s,
Err(_) => continue,
};
let threat_score: f64 = match row.try_get("threat_score") {
Ok(s) => s,
Err(_) => continue,
};
let assessed_at: DateTime<Utc> = match row.try_get("assessed_at") {
Ok(dt) => dt,
Err(_) => continue,
};
let endpoint: Option<String> = row.try_get("endpoint").ok();
let method: Option<String> = row.try_get("method").ok();
let threat_level = match threat_level_str.as_str() {
"low" => ThreatLevel::Low,
"medium" => ThreatLevel::Medium,
"high" => ThreatLevel::High,
"critical" => ThreatLevel::Critical,
_ => continue,
};
let findings_count = row.try_get::<serde_json::Value, _>("findings")
.ok()
.and_then(|v| v.as_array().map(|arr| arr.len()))
.unwrap_or(0);
events.push(TimelineEvent::ThreatAssessment {
id: id.to_string(),
endpoint,
method,
threat_level: format!("{:?}", threat_level),
threat_score,
assessed_at: assessed_at.timestamp(),
findings_count,
});
}
}
if let Ok(forecast_rows) = sqlx::query(
"SELECT id, service_id, service_name, endpoint, method, forecast_window_days,
predicted_change_probability, predicted_break_probability, next_expected_change_date,
confidence, predicted_at
FROM api_change_forecasts
WHERE workspace_id = $1 OR workspace_id IS NULL
ORDER BY predicted_at DESC LIMIT 50",
)
.bind(params.workspace_id.as_deref())
.fetch_all(pool)
.await
{
use sqlx::Row;
for row in forecast_rows {
let id: uuid::Uuid = match row.try_get("id") {
Ok(id) => id,
Err(_) => continue,
};
let endpoint: String = match row.try_get("endpoint") {
Ok(e) => e,
Err(_) => continue,
};
let method: String = match row.try_get("method") {
Ok(m) => m,
Err(_) => continue,
};
let forecast_window_days: i32 = match row.try_get("forecast_window_days") {
Ok(d) => d,
Err(_) => continue,
};
let predicted_change_probability: f64 =
match row.try_get("predicted_change_probability") {
Ok(p) => p,
Err(_) => continue,
};
let predicted_break_probability: f64 =
match row.try_get("predicted_break_probability") {
Ok(p) => p,
Err(_) => continue,
};
let next_expected_change_date: Option<DateTime<Utc>> =
row.try_get("next_expected_change_date").ok();
let predicted_at: DateTime<Utc> = match row.try_get("predicted_at") {
Ok(dt) => dt,
Err(_) => continue,
};
let confidence: f64 = match row.try_get("confidence") {
Ok(c) => c,
Err(_) => continue,
};
events.push(TimelineEvent::Forecast {
id: id.to_string(),
endpoint,
method,
window_days: forecast_window_days as u32,
change_probability: predicted_change_probability,
break_probability: predicted_break_probability,
next_expected_change: next_expected_change_date.map(|d| d.timestamp()),
confidence,
predicted_at: predicted_at.timestamp(),
});
}
}
}
}
events.sort_by_key(|e| {
std::cmp::Reverse(match e {
TimelineEvent::StructuralDrift { detected_at, .. } => *detected_at,
TimelineEvent::SemanticDrift { detected_at, .. } => *detected_at,
TimelineEvent::ThreatAssessment { assessed_at, .. } => *assessed_at,
TimelineEvent::Forecast { predicted_at, .. } => *predicted_at,
})
});
let total = events.len();
if let Some(limit) = params.limit {
events.truncate(limit);
}
Ok(Json(TimelineResponse { events, total }))
}
pub fn contract_health_router(state: ContractHealthState) -> axum::Router {
use axum::routing::get;
use axum::Router;
Router::new()
.route("/api/v1/contract-health/timeline", get(get_timeline))
.with_state(state)
}