use axum::{
extract::{Json, Query, State},
http::StatusCode,
response::Json as ResponseJson,
};
use json_patch::{patch, Patch};
use jsonptr::PointerBuf;
use mockforge_core::ai_studio::{
get_conversation_store, initialize_conversation_store, ArtifactFreezer, BudgetConfig,
BudgetManager, ChatContext, ChatMessage, ChatOrchestrator, ChatRequest, ChatResponse,
ContractDiffHandler, ContractDiffQueryResult, DebugAnalyzer, DebugContextIntegrator,
DebugRequest, DebugResponse, FreezeRequest, FrozenArtifact, MockGenerator, OrgAiControlsConfig,
OrgControls, PersonaGenerationRequest, PersonaGenerationResponse, PersonaGenerator, UsageStats,
};
use mockforge_core::intelligent_behavior::IntelligentBehaviorConfig;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
use crate::handlers::AdminState;
use crate::models::ApiResponse;
use mockforge_core::ai_studio::config::DeterministicModeConfig;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatRequestPayload {
pub message: String,
pub conversation_id: Option<String>,
pub workspace_id: Option<String>,
}
pub async fn chat(
Json(request): Json<ChatRequestPayload>,
) -> Result<ResponseJson<ApiResponse<ChatResponse>>, StatusCode> {
if request.message.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let config = IntelligentBehaviorConfig::default();
let orchestrator = ChatOrchestrator::new(config);
let _ = initialize_conversation_store().await;
let context = if let Some(conv_id) = &request.conversation_id {
let store = get_conversation_store();
match store.get_context(conv_id).await {
Ok(Some(ctx)) => Some(ctx),
Ok(None) => {
Some(ChatContext {
history: vec![],
workspace_id: request.workspace_id.clone(),
})
}
Err(_) => {
Some(ChatContext {
history: vec![],
workspace_id: request.workspace_id.clone(),
})
}
}
} else {
None
};
let chat_request = ChatRequest {
message: request.message.clone(),
context,
workspace_id: request.workspace_id.clone(),
org_id: None,
user_id: None,
};
let response_result = orchestrator.process(&chat_request).await;
if let Some(conv_id) = &request.conversation_id {
let store = get_conversation_store();
let user_message = ChatMessage {
role: "user".to_string(),
content: request.message.clone(),
};
let _ = store.add_message(conv_id, user_message).await;
if let Ok(ref response) = response_result {
let assistant_message = ChatMessage {
role: "assistant".to_string(),
content: response.message.clone(),
};
let _ = store.add_message(conv_id, assistant_message).await;
}
} else {
let store = get_conversation_store();
if let Ok(conv_id) = store.create_conversation(request.workspace_id.clone()).await {
let user_message = ChatMessage {
role: "user".to_string(),
content: request.message.clone(),
};
let _ = store.add_message(&conv_id, user_message).await;
if let Ok(ref response) = response_result {
let assistant_message = ChatMessage {
role: "assistant".to_string(),
content: response.message.clone(),
};
let _ = store.add_message(&conv_id, assistant_message).await;
}
}
}
match response_result {
Ok(response) => Ok(ResponseJson(ApiResponse::success(response))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!("Failed to process chat: {}", e)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GenerateMockRequest {
pub description: String,
pub workspace_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GenerateMockResponse {
pub spec: Option<Value>,
pub message: String,
}
pub async fn generate_mock(
State(state): State<AdminState>,
Json(request): Json<GenerateMockRequest>,
) -> Result<ResponseJson<ApiResponse<GenerateMockResponse>>, StatusCode> {
if request.description.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let generator = MockGenerator::new();
let ai_mode = if let Some(workspace_id) = &request.workspace_id {
if let Ok(workspace) = state.workspace_persistence.load_workspace(workspace_id).await {
workspace.config.ai_mode
} else {
None }
} else {
None };
let deterministic_config = if let Some(workspace_id) = &request.workspace_id {
if let Ok(_workspace) = state.workspace_persistence.load_workspace(workspace_id).await {
Some(DeterministicModeConfig::default())
} else {
None
}
} else {
None
};
match generator
.generate(
&request.description,
request.workspace_id.as_deref(),
ai_mode,
deterministic_config.as_ref(),
)
.await
{
Ok(result) => {
let response = GenerateMockResponse {
spec: result.spec,
message: result.message,
};
Ok(ResponseJson(ApiResponse::success(response)))
}
Err(e) => Ok(ResponseJson(ApiResponse::error(format!("Failed to generate mock: {}", e)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DebugTestRequest {
pub test_logs: String,
pub test_name: Option<String>,
pub workspace_id: Option<String>,
}
pub async fn debug_test(
Json(request): Json<DebugTestRequest>,
) -> Result<ResponseJson<ApiResponse<DebugResponse>>, StatusCode> {
if request.test_logs.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let analyzer = DebugAnalyzer::new();
let debug_request = DebugRequest {
test_logs: request.test_logs,
test_name: request.test_name,
workspace_id: request.workspace_id,
};
match analyzer.analyze(&debug_request).await {
Ok(response) => Ok(ResponseJson(ApiResponse::success(response))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!(
"Failed to analyze test failure: {}",
e
)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DebugWithContextRequest {
pub test_logs: String,
pub test_name: Option<String>,
pub workspace_id: Option<String>,
pub org_id: Option<String>,
}
pub async fn debug_analyze_with_context(
Json(request): Json<DebugWithContextRequest>,
) -> Result<ResponseJson<ApiResponse<DebugResponse>>, StatusCode> {
if request.test_logs.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let analyzer = DebugAnalyzer::new();
let _integrator: Option<&DebugContextIntegrator> = None;
let debug_request = DebugRequest {
test_logs: request.test_logs,
test_name: request.test_name,
workspace_id: request.workspace_id,
};
match analyzer.analyze(&debug_request).await {
Ok(response) => Ok(ResponseJson(ApiResponse::success(response))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!(
"Failed to analyze test failure with context: {}",
e
)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GeneratePersonaRequest {
pub description: String,
pub base_persona_id: Option<String>,
pub workspace_id: Option<String>,
}
pub async fn generate_persona(
State(state): State<AdminState>,
Json(request): Json<GeneratePersonaRequest>,
) -> Result<ResponseJson<ApiResponse<PersonaGenerationResponse>>, StatusCode> {
if request.description.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let generator = PersonaGenerator::new();
let persona_request = PersonaGenerationRequest {
description: request.description.clone(),
base_persona_id: request.base_persona_id,
workspace_id: request.workspace_id.clone(),
};
let ai_mode = if let Some(workspace_id) = &request.workspace_id {
if let Ok(workspace) = state.workspace_persistence.load_workspace(workspace_id).await {
workspace.config.ai_mode
} else {
None
}
} else {
None
};
let deterministic_config = if let Some(workspace_id) = &request.workspace_id {
if let Ok(_workspace) = state.workspace_persistence.load_workspace(workspace_id).await {
Some(DeterministicModeConfig::default())
} else {
None
}
} else {
None
};
match generator
.generate(&persona_request, ai_mode, deterministic_config.as_ref())
.await
{
Ok(response) => Ok(ResponseJson(ApiResponse::success(response))),
Err(e) => {
Ok(ResponseJson(ApiResponse::error(format!("Failed to generate persona: {}", e))))
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FreezeArtifactRequest {
pub artifact_type: String,
pub content: Value,
pub format: String,
pub path: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FreezeArtifactRequestWithMetadata {
pub artifact_type: String,
pub content: Value,
pub format: String,
pub path: Option<String>,
pub metadata: Option<FreezeMetadataPayload>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FreezeMetadataPayload {
pub llm_provider: Option<String>,
pub llm_model: Option<String>,
pub llm_version: Option<String>,
pub prompt_hash: Option<String>,
pub original_prompt: Option<String>,
}
pub async fn freeze_artifact(
Json(request): Json<FreezeArtifactRequestWithMetadata>,
) -> Result<ResponseJson<ApiResponse<FrozenArtifact>>, StatusCode> {
let freezer = ArtifactFreezer::new();
let metadata = request.metadata.map(|m| {
use mockforge_core::ai_studio::FreezeMetadata;
FreezeMetadata {
llm_provider: m.llm_provider,
llm_model: m.llm_model,
llm_version: m.llm_version,
prompt_hash: m.prompt_hash,
output_hash: None, original_prompt: m.original_prompt,
}
});
let freeze_request = FreezeRequest {
artifact_type: request.artifact_type,
content: request.content,
format: request.format,
path: request.path,
metadata,
};
match freezer.freeze(&freeze_request).await {
Ok(artifact) => Ok(ResponseJson(ApiResponse::success(artifact))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!("Failed to freeze artifact: {}", e)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ListFrozenQuery {
pub artifact_type: Option<String>,
pub workspace_id: Option<String>,
}
pub async fn list_frozen(
Query(params): Query<ListFrozenQuery>,
) -> Result<ResponseJson<ApiResponse<Vec<FrozenArtifact>>>, StatusCode> {
let freezer = ArtifactFreezer::new();
let base_dir = freezer.base_dir().to_path_buf();
let mut artifacts = Vec::new();
if let Ok(mut entries) = tokio::fs::read_dir(&base_dir).await {
while let Ok(Some(entry)) = entries.next_entry().await {
let path = entry.path();
if path.is_file() {
let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if let Some(ref artifact_type) = params.artifact_type {
if !file_name.starts_with(&format!("{}_", artifact_type)) {
continue;
}
}
let content = match tokio::fs::read_to_string(&path).await {
Ok(c) => c,
Err(_) => continue,
};
let content_value: Value = if path.extension().and_then(|e| e.to_str())
== Some("yaml")
|| path.extension().and_then(|e| e.to_str()) == Some("yml")
{
match serde_yaml::from_str(&content) {
Ok(v) => v,
Err(_) => continue,
}
} else {
match serde_json::from_str(&content) {
Ok(v) => v,
Err(_) => continue,
}
};
let metadata = content_value
.get("_frozen_metadata")
.and_then(|m| serde_json::from_value(m.clone()).ok());
let output_hash = content_value
.get("_frozen_metadata")
.and_then(|m| m.get("output_hash"))
.and_then(|h| h.as_str())
.map(|s| s.to_string());
let artifact_type = content_value
.get("_frozen_metadata")
.and_then(|m| m.get("artifact_type"))
.and_then(|t| t.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| {
file_name.split('_').next().unwrap_or("unknown").to_string()
});
artifacts.push(FrozenArtifact {
artifact_type,
content: content_value,
format: if path.extension().and_then(|e| e.to_str()) == Some("yaml")
|| path.extension().and_then(|e| e.to_str()) == Some("yml")
{
"yaml".to_string()
} else {
"json".to_string()
},
path: path.to_string_lossy().to_string(),
metadata,
output_hash,
});
}
}
}
artifacts.sort_by(|a, b| b.path.cmp(&a.path));
Ok(ResponseJson(ApiResponse::success(artifacts)))
}
pub async fn get_usage(
Query(params): Query<HashMap<String, String>>,
) -> Result<ResponseJson<ApiResponse<UsageStats>>, StatusCode> {
let workspace_id = params.get("workspace_id").cloned().unwrap_or_default();
let budget_config = BudgetConfig::default();
let budget_manager = BudgetManager::new(budget_config);
match budget_manager.get_usage(&workspace_id).await {
Ok(stats) => Ok(ResponseJson(ApiResponse::success(stats))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!("Failed to get usage stats: {}", e)))),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApplyPatchRequest {
pub patch: Value,
pub config_path: Option<String>,
}
pub async fn apply_patch(
State(_state): State<AdminState>,
Json(request): Json<ApplyPatchRequest>,
) -> Result<ResponseJson<ApiResponse<Value>>, StatusCode> {
let config_path = request.config_path.unwrap_or_else(|| "mockforge.yaml".to_string());
let config_content = match tokio::fs::read_to_string(&config_path).await {
Ok(content) => content,
Err(e) => {
return Ok(ResponseJson(ApiResponse::error(format!(
"Failed to read config file {}: {}",
config_path, e
))));
}
};
let mut config_value: Value = if config_path.ends_with(".yaml") || config_path.ends_with(".yml")
{
serde_yaml::from_str(&config_content).map_err(|_e| StatusCode::BAD_REQUEST)?
} else {
serde_json::from_str(&config_content).map_err(|_e| StatusCode::BAD_REQUEST)?
};
let patch_ops: Patch =
if let Some(ops_array) = request.patch.get("operations").and_then(|v| v.as_array()) {
Patch(ops_array.iter().filter_map(|op| parse_patch_operation(op).ok()).collect())
} else {
Patch(vec![parse_patch_operation(&request.patch)?])
};
patch(&mut config_value, &patch_ops).map_err(|_e| StatusCode::BAD_REQUEST)?;
let updated_content = if config_path.ends_with(".yaml") || config_path.ends_with(".yml") {
serde_yaml::to_string(&config_value).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
} else {
serde_json::to_string_pretty(&config_value)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
};
tokio::fs::write(&config_path, updated_content)
.await
.map_err(|_e| StatusCode::INTERNAL_SERVER_ERROR)?;
Ok(ResponseJson(ApiResponse::success(serde_json::json!({
"message": "Patch applied successfully",
"config_path": config_path,
"updated_config": config_value
}))))
}
fn parse_patch_operation(op: &Value) -> Result<json_patch::PatchOperation, StatusCode> {
use json_patch::{AddOperation, PatchOperation, RemoveOperation, ReplaceOperation};
let op_type = op.get("op").and_then(|v| v.as_str()).ok_or(StatusCode::BAD_REQUEST)?;
let path_str = op.get("path").and_then(|v| v.as_str()).ok_or(StatusCode::BAD_REQUEST)?;
let path: PointerBuf = path_str.parse().map_err(|_| StatusCode::BAD_REQUEST)?;
match op_type {
"add" => {
let value = op.get("value").ok_or(StatusCode::BAD_REQUEST)?;
Ok(PatchOperation::Add(AddOperation {
path,
value: value.clone(),
}))
}
"remove" => Ok(PatchOperation::Remove(RemoveOperation { path })),
"replace" => {
let value = op.get("value").ok_or(StatusCode::BAD_REQUEST)?;
Ok(PatchOperation::Replace(ReplaceOperation {
path,
value: value.clone(),
}))
}
"copy" => {
let from = op.get("from").and_then(|v| v.as_str()).ok_or(StatusCode::BAD_REQUEST)?;
let from_path: PointerBuf = from.parse().map_err(|_| StatusCode::BAD_REQUEST)?;
Ok(PatchOperation::Copy(json_patch::CopyOperation {
path,
from: from_path,
}))
}
"move" => {
let from = op.get("from").and_then(|v| v.as_str()).ok_or(StatusCode::BAD_REQUEST)?;
let from_path: PointerBuf = from.parse().map_err(|_| StatusCode::BAD_REQUEST)?;
Ok(PatchOperation::Move(json_patch::MoveOperation {
path,
from: from_path,
}))
}
"test" => {
let value = op.get("value").ok_or(StatusCode::BAD_REQUEST)?;
Ok(PatchOperation::Test(json_patch::TestOperation {
path,
value: value.clone(),
}))
}
_ => Err(StatusCode::BAD_REQUEST),
}
}
pub async fn get_org_controls(
Query(params): Query<HashMap<String, String>>,
) -> Result<ResponseJson<ApiResponse<OrgAiControlsConfig>>, StatusCode> {
let org_id = params.get("org_id").cloned();
let workspace_id = params.get("workspace_id").cloned();
let org_controls = OrgControls::new(OrgAiControlsConfig::default());
match org_controls
.load_org_config(org_id.as_deref().unwrap_or("default"), workspace_id.as_deref())
.await
{
Ok(controls) => Ok(ResponseJson(ApiResponse::success(controls))),
Err(e) => {
Ok(ResponseJson(ApiResponse::error(format!("Failed to get org controls: {}", e))))
}
}
}
pub async fn update_org_controls(
Query(params): Query<HashMap<String, String>>,
Json(controls): Json<OrgAiControlsConfig>,
) -> Result<ResponseJson<ApiResponse<OrgAiControlsConfig>>, StatusCode> {
let _org_id = params.get("org_id").cloned();
let _workspace_id = params.get("workspace_id").cloned();
Ok(ResponseJson(ApiResponse::success(controls)))
}
pub async fn get_org_usage(
Query(params): Query<HashMap<String, String>>,
) -> Result<ResponseJson<ApiResponse<Value>>, StatusCode> {
let org_id = params.get("org_id").cloned();
let workspace_id = params.get("workspace_id").cloned();
let period_start = params
.get("period_start")
.and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok())
.map(|dt| dt.with_timezone(&chrono::Utc))
.unwrap_or_else(|| {
let now = chrono::Utc::now();
{
use chrono::{Datelike, TimeZone};
chrono::NaiveDate::from_ymd_opt(now.year(), now.month(), 1)
.and_then(|d| d.and_hms_opt(0, 0, 0))
.map(|dt| chrono::Utc.from_utc_datetime(&dt))
.unwrap()
}
});
let period_end = params
.get("period_end")
.and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok())
.map(|dt| dt.with_timezone(&chrono::Utc))
.unwrap_or_else(chrono::Utc::now);
Ok(ResponseJson(ApiResponse::success(serde_json::json!({
"org_id": org_id,
"workspace_id": workspace_id,
"period_start": period_start.to_rfc3339(),
"period_end": period_end.to_rfc3339(),
"total_tokens": 0,
"total_cost": 0.0,
"total_calls": 0,
"feature_breakdown": {},
"message": "Usage stats require database connection. Connect to registry server database to enable."
}))))
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ContractDiffQueryRequest {
pub query: String,
pub workspace_id: Option<String>,
pub org_id: Option<String>,
}
pub async fn contract_diff_query(
Json(request): Json<ContractDiffQueryRequest>,
) -> Result<ResponseJson<ApiResponse<ContractDiffQueryResult>>, StatusCode> {
if request.query.trim().is_empty() {
return Err(StatusCode::BAD_REQUEST);
}
let handler = ContractDiffHandler::new().map_err(|e| {
tracing::error!("Failed to create ContractDiffHandler: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
match handler.analyze_from_query(&request.query, None, None).await {
Ok(result) => Ok(ResponseJson(ApiResponse::success(result))),
Err(e) => Ok(ResponseJson(ApiResponse::error(format!(
"Failed to process contract diff query: {}",
e
)))),
}
}