use axum::{
extract::State,
http::{header, HeaderMap, HeaderValue, StatusCode},
response::{IntoResponse, Json},
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::sync::Arc;
use crate::proxy::{ToolCallResult, ToolInfo};
use crate::server::AppState;
#[derive(Serialize)]
pub struct ConfigResponse {
pub mcp_url: String,
pub theme: String,
pub locale: String,
pub initial_tool: Option<String>,
pub mode: String,
pub descriptor_keys: Vec<String>,
pub invocation_keys: Vec<String>,
}
pub async fn get_config(State(state): State<Arc<AppState>>) -> Json<ConfigResponse> {
use crate::server::PreviewMode;
let is_chatgpt = state.config.mode == PreviewMode::ChatGpt;
let descriptor_keys = if is_chatgpt {
vec![
"openai/outputTemplate".into(),
"openai/toolInvocation/invoking".into(),
"openai/toolInvocation/invoked".into(),
"openai/widgetAccessible".into(),
]
} else {
vec!["ui".into()]
};
let invocation_keys = if is_chatgpt {
vec![
"openai/toolInvocation/invoking".into(),
"openai/toolInvocation/invoked".into(),
]
} else {
vec![]
};
Json(ConfigResponse {
mcp_url: state.config.mcp_url.clone(),
theme: state.config.theme.clone(),
locale: state.config.locale.clone(),
initial_tool: state.config.initial_tool.clone(),
mode: state.config.mode.to_string(),
descriptor_keys,
invocation_keys,
})
}
#[derive(Serialize)]
pub struct ToolsResponse {
pub tools: Vec<ToolInfo>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}
pub async fn list_tools(
State(state): State<Arc<AppState>>,
) -> Result<Json<ToolsResponse>, (StatusCode, String)> {
match state.proxy.list_tools().await {
Ok(mut tools) => {
if state.config.mode == crate::server::PreviewMode::ChatGpt {
for tool in &mut tools {
enrich_meta_for_chatgpt(&mut tool.meta);
}
}
Ok(Json(ToolsResponse { tools, error: None }))
},
Err(e) => Ok(Json(ToolsResponse {
tools: vec![],
error: Some(e.to_string()),
})),
}
}
#[derive(Deserialize)]
pub struct CallToolRequest {
pub name: String,
#[serde(default)]
pub arguments: Value,
}
pub async fn call_tool(
State(state): State<Arc<AppState>>,
Json(request): Json<CallToolRequest>,
) -> Result<Json<ToolCallResult>, (StatusCode, String)> {
let mut result = state
.proxy
.call_tool(&request.name, request.arguments)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
if state.config.mode == crate::server::PreviewMode::ChatGpt {
enrich_meta_for_chatgpt(&mut result.meta);
}
Ok(Json(result))
}
#[derive(Deserialize)]
pub struct ReadResourceParams {
pub uri: String,
}
pub async fn list_resources(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut all_resources: Vec<serde_json::Value> = Vec::new();
if let Some(ref widgets_dir) = state.config.widgets_dir {
match std::fs::read_dir(widgets_dir) {
Ok(entries) => {
let mut widget_entries: Vec<_> = entries
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().and_then(|ext| ext.to_str()) == Some("html"))
.collect();
widget_entries.sort_by_key(|e| e.file_name());
for entry in widget_entries {
if let Some(stem) = entry
.path()
.file_stem()
.and_then(|s| s.to_str().map(String::from))
{
all_resources.push(json!({
"uri": format!("ui://app/{}", stem),
"name": stem,
"description": format!("Widget from {}", entry.path().display()),
"mimeType": "text/html"
}));
}
}
tracing::debug!(
"Discovered {} widget(s) from {}",
all_resources.len(),
widgets_dir.display()
);
},
Err(e) => {
tracing::warn!(
"Failed to read widgets directory {}: {}",
widgets_dir.display(),
e
);
},
}
}
match state.proxy.list_resources().await {
Ok(resources) => {
let ui_resources = resources.into_iter().filter(|r| {
let mime_match = r
.mime_type
.as_deref()
.is_some_and(|m| m.to_lowercase().contains("html"));
let uri_match = r.uri.starts_with("ui://");
mime_match || uri_match
});
for r in ui_resources {
let dominated = all_resources
.iter()
.any(|existing| existing.get("uri").and_then(|v| v.as_str()) == Some(&r.uri));
if !dominated {
all_resources.push(json!({
"uri": r.uri,
"name": r.name,
"description": r.description,
"mimeType": r.mime_type,
"_meta": r.meta
}));
}
}
},
Err(e) => {
tracing::warn!("Proxy list_resources failed: {}", e);
if all_resources.is_empty() {
return json_response(json!({ "resources": [], "error": e.to_string() }));
}
},
}
if state.config.mode == crate::server::PreviewMode::ChatGpt {
for resource in &mut all_resources {
enrich_value_meta_for_chatgpt(resource);
}
}
json_response(json!({ "resources": all_resources }))
}
pub async fn read_resource(
State(state): State<Arc<AppState>>,
axum::extract::Query(params): axum::extract::Query<ReadResourceParams>,
) -> Json<Value> {
if let Some(ref widgets_dir) = state.config.widgets_dir {
if let Some(widget_name) = params.uri.strip_prefix("ui://app/") {
let file_path = widgets_dir.join(format!("{}.html", widget_name));
let html = match std::fs::read_to_string(&file_path) {
Ok(content) => {
tracing::debug!(
"Reading widget file: {} ({} bytes)",
file_path.display(),
content.len()
);
inject_bridge_script(&content, "/assets/widget-runtime.mjs")
},
Err(err) => {
tracing::warn!("Failed to read widget {}: {}", file_path.display(), err);
widget_error_html(widget_name, &file_path, &err.to_string())
},
};
return json_response(json!({
"contents": [{
"uri": params.uri,
"text": html,
"mimeType": "text/html"
}]
}));
}
}
match state.proxy.read_resource(¶ms.uri).await {
Ok(result) => json_response(json!({
"contents": result.contents,
"_meta": result.meta
})),
Err(e) => json_response(json!({ "contents": null, "error": e.to_string() })),
}
}
fn inject_bridge_script(html: &str, bridge_url: &str) -> String {
pmcp_widget_utils::inject_bridge_script(html, bridge_url)
}
fn widget_error_html(name: &str, path: &std::path::Path, error: &str) -> String {
format!(
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Widget Error: {name}</title>
<style>
body {{
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background: #1a1a2e;
color: #eee;
display: flex;
align-items: center;
justify-content: center;
min-height: 100vh;
margin: 0;
padding: 20px;
}}
.error-card {{
background: #4a1515;
border: 1px solid #ff6b6b;
border-radius: 12px;
padding: 24px 32px;
max-width: 560px;
width: 100%;
}}
h2 {{ color: #ff6b6b; margin: 0 0 12px 0; font-size: 1.2rem; }}
.file-path {{
font-family: monospace;
font-size: 0.85rem;
color: #ffcc00;
background: rgba(0,0,0,0.3);
padding: 6px 10px;
border-radius: 6px;
word-break: break-all;
margin-bottom: 12px;
}}
.error-message {{ font-family: monospace; font-size: 0.85rem; color: #ff9999; }}
.hint {{ margin-top: 16px; font-size: 0.85rem; color: #888; }}
</style>
</head>
<body>
<div class="error-card">
<h2>Widget Load Error</h2>
<div class="file-path">{path}</div>
<div class="error-message">{error}</div>
<div class="hint">Create or fix the widget file and refresh the browser to retry.</div>
</div>
</body>
</html>"#,
name = name,
path = path.display(),
error = error,
)
}
pub async fn reconnect(State(state): State<Arc<AppState>>) -> Json<Value> {
state.proxy.reset_session().await;
match state.proxy.list_tools().await {
Ok(tools) => json_response(json!({
"success": true,
"toolCount": tools.len()
})),
Err(e) => json_response(json!({
"success": false,
"error": e.to_string()
})),
}
}
pub async fn status(State(state): State<Arc<AppState>>) -> Json<Value> {
let connected = state.proxy.is_connected().await;
json_response(json!({ "connected": connected }))
}
pub async fn forward_mcp(
State(state): State<Arc<AppState>>,
req_headers: HeaderMap,
body: String,
) -> impl IntoResponse {
use crate::proxy::{MCP_PROTOCOL_VERSION, MCP_SESSION_ID};
let session_id = req_headers
.get(MCP_SESSION_ID)
.and_then(|v| v.to_str().ok());
let protocol_version = req_headers
.get(MCP_PROTOCOL_VERSION)
.and_then(|v| v.to_str().ok());
match state
.proxy
.forward_raw(body, session_id, protocol_version)
.await
{
Ok(result) => {
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/json"),
);
if let Some(ref sid) = result.session_id {
if let Ok(val) = HeaderValue::from_str(sid) {
headers.insert(MCP_SESSION_ID, val);
}
}
if let Some(ref ver) = result.protocol_version {
if let Ok(val) = HeaderValue::from_str(ver) {
headers.insert(MCP_PROTOCOL_VERSION, val);
}
}
(StatusCode::OK, headers, result.body).into_response()
},
Err(e) => (StatusCode::BAD_GATEWAY, e.to_string()).into_response(),
}
}
fn enrich_meta_for_chatgpt(meta: &mut Option<Value>) {
let meta_obj = match meta {
Some(Value::Object(ref mut map)) => map,
_ => return,
};
let resource_uri = meta_obj
.get("ui")
.and_then(|ui| ui.get("resourceUri"))
.and_then(Value::as_str)
.or_else(|| meta_obj.get("ui/resourceUri").and_then(Value::as_str))
.map(String::from);
if let Some(uri) = resource_uri {
meta_obj
.entry("openai/outputTemplate")
.or_insert_with(|| Value::String(uri));
meta_obj
.entry("openai/widgetAccessible")
.or_insert_with(|| Value::Bool(true));
meta_obj
.entry("openai/toolInvocation/invoking")
.or_insert_with(|| Value::String("Running...".into()));
meta_obj
.entry("openai/toolInvocation/invoked")
.or_insert_with(|| Value::String("Done".into()));
}
}
fn enrich_value_meta_for_chatgpt(resource: &mut Value) {
if let Some(Value::Object(map)) = resource.get_mut("_meta") {
let resource_uri = map
.get("ui")
.and_then(|ui| ui.get("resourceUri"))
.and_then(Value::as_str)
.or_else(|| map.get("ui/resourceUri").and_then(Value::as_str))
.map(String::from);
if let Some(uri) = resource_uri {
map.entry("openai/outputTemplate")
.or_insert_with(|| Value::String(uri));
map.entry("openai/widgetAccessible")
.or_insert_with(|| Value::Bool(true));
map.entry("openai/toolInvocation/invoking")
.or_insert_with(|| Value::String("Running...".into()));
map.entry("openai/toolInvocation/invoked")
.or_insert_with(|| Value::String("Done".into()));
}
}
}
fn json_response(value: Value) -> Json<Value> {
Json(value)
}