use axum::{
body::Body,
extract::Query,
http::{header, StatusCode, Uri},
response::{IntoResponse, Response},
routing::get,
Router,
};
use ccboard_core::AlertSeverity;
use ccboard_core::DataStore;
use mime_guess::from_path;
use rust_embed::RustEmbed;
use serde::Deserialize;
use std::sync::Arc;
use tower_http::cors::{Any, CorsLayer};
#[derive(RustEmbed)]
#[folder = "dist/"]
struct DistAssets;
use crate::sse;
#[derive(Debug, Deserialize)]
struct SessionsQuery {
#[serde(default)]
page: usize,
#[serde(default = "default_page_size")]
limit: usize,
#[serde(default)]
search: Option<String>,
#[serde(default)]
project: Option<String>,
#[serde(default)]
model: Option<String>,
#[serde(default)]
since: Option<String>, #[serde(default = "default_sort")]
sort: String, #[serde(default = "default_order")]
order: String, }
fn default_page_size() -> usize {
50
}
fn default_sort() -> String {
"date".to_string()
}
fn default_order() -> String {
"desc".to_string()
}
#[derive(Debug, Deserialize)]
struct RecentQuery {
#[serde(default = "default_recent_limit")]
limit: usize,
}
fn default_recent_limit() -> usize {
5
}
#[derive(Debug, Deserialize)]
struct SearchQuery {
q: String,
#[serde(default = "default_search_limit")]
limit: usize,
}
fn default_search_limit() -> usize {
20
}
#[derive(Debug, Deserialize)]
struct ViolationsQuery {
#[serde(default = "default_violations_limit")]
limit: usize,
#[serde(default)]
min_severity: Option<String>,
}
fn default_violations_limit() -> usize {
50
}
pub fn has_real_frontend() -> bool {
match DistAssets::get("index.html") {
Some(f) => !f
.data
.windows(b"Web UI not available".len())
.any(|w| w == b"Web UI not available"),
None => false,
}
}
fn get_embedded_asset(path: &str) -> Option<Response> {
let asset = DistAssets::get(path)?;
let mime = from_path(path).first_or_octet_stream();
Some(
(
StatusCode::OK,
[(header::CONTENT_TYPE, mime.as_ref().to_string())],
Body::from(asset.data),
)
.into_response(),
)
}
async fn frontend_handler(uri: Uri) -> Response {
let path = uri.path().trim_start_matches('/');
let path = if path.is_empty() { "index.html" } else { path };
get_embedded_asset(path)
.or_else(|| get_embedded_asset("index.html"))
.unwrap_or_else(|| StatusCode::NOT_FOUND.into_response())
}
pub fn create_router(store: Arc<DataStore>) -> Router {
let cors = CorsLayer::new()
.allow_origin(Any)
.allow_methods(Any)
.allow_headers(Any);
Router::new()
.route("/api/search", get(search_handler))
.route("/api/stats", get(stats_handler))
.route("/api/quota", get(quota_handler))
.route("/api/sessions/recent", get(recent_sessions_handler)) .route("/api/sessions/live", get(live_sessions_handler)) .route("/api/sessions", get(sessions_handler))
.route("/api/config/merged", get(config_handler))
.route("/api/hooks", get(hooks_handler))
.route("/api/mcp", get(mcp_handler))
.route("/api/agents", get(agents_handler))
.route("/api/commands", get(commands_handler))
.route("/api/skills", get(skills_handler))
.route("/api/plugins", get(plugins_handler))
.route(
"/api/analytics/suggestions",
get(analytics_suggestions_handler),
)
.route("/api/task-graph", get(task_graph_handler))
.route("/api/health", get(health_handler))
.route("/api/activity/violations", get(activity_violations_handler))
.route("/api/activity/{session_id}", get(activity_session_handler))
.route("/api/events", get(sse_handler))
.fallback(frontend_handler)
.layer(cors)
.with_state(store)
}
async fn stats_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let stats = store.stats();
let sessions = store.all_sessions();
let analytics = ccboard_core::analytics::AnalyticsData::compute(
&sessions,
ccboard_core::analytics::Period::last_30d(),
);
let historical_tokens: Vec<u64> = analytics.trends.daily_tokens.clone();
let forecast_tokens: Vec<u64> = {
let mut forecast = Vec::new();
if analytics.trends.dates.len() >= 7 {
for i in 1..=30 {
let projected = analytics.forecast.next_30_days_tokens as f64 / 30.0 * i as f64;
forecast.push(projected as u64);
}
}
forecast
};
let projects_by_cost: Vec<serde_json::Value> = {
let mut project_costs: std::collections::HashMap<String, f64> =
std::collections::HashMap::new();
for session in &sessions {
let cost = calculate_session_cost(
session.input_tokens,
session.output_tokens,
session.cache_creation_tokens,
session.cache_read_tokens,
&session.models_used,
);
*project_costs
.entry(session.project_path.as_str().to_string())
.or_insert(0.0) += cost;
}
let mut sorted: Vec<_> = project_costs.into_iter().collect();
sorted.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
let total_cost: f64 = sorted.iter().map(|(_, c)| c).sum();
sorted
.iter()
.take(5)
.map(|(project, cost)| {
let percentage = if total_cost > 0.0 {
cost / total_cost * 100.0
} else {
0.0
};
serde_json::json!({
"project": project,
"cost": cost,
"percentage": percentage,
})
})
.collect()
};
let most_used_model = analytics
.trends
.model_usage_over_time
.iter()
.max_by_key(|(_, counts)| counts.iter().sum::<usize>())
.map(|(model, counts)| {
let total: usize = counts.iter().sum();
serde_json::json!({
"name": model,
"count": total,
})
});
match stats {
Some(s) => {
let mut value = serde_json::to_value(&s).unwrap_or(serde_json::Value::Null);
if let Some(obj) = value.as_object_mut() {
obj.insert(
"dailyTokens30d".to_string(),
serde_json::json!(historical_tokens),
);
obj.insert(
"forecastTokens30d".to_string(),
serde_json::json!(forecast_tokens),
);
obj.insert(
"forecastConfidence".to_string(),
serde_json::json!(analytics.forecast.confidence),
);
obj.insert(
"forecastCost30d".to_string(),
serde_json::json!(analytics.forecast.next_30_days_cost),
);
obj.insert(
"projectsByCost".to_string(),
serde_json::json!(projects_by_cost),
);
obj.insert(
"mostUsedModel".to_string(),
serde_json::json!(most_used_model),
);
let total_cost: f64 = sessions
.iter()
.map(|s| {
calculate_session_cost(
s.input_tokens,
s.output_tokens,
s.cache_creation_tokens,
s.cache_read_tokens,
&s.models_used,
)
})
.sum();
let avg_session_cost = if !sessions.is_empty() {
total_cost / sessions.len() as f64
} else {
0.0
};
obj.insert("thisMonthCost".to_string(), serde_json::json!(total_cost));
obj.insert(
"avgSessionCost".to_string(),
serde_json::json!(avg_session_cost),
);
let cache_hit_ratio = s.cache_ratio();
obj.insert(
"cacheHitRatio".to_string(),
serde_json::json!(cache_hit_ratio),
);
let mcp_count = store.mcp_config().map(|c| c.servers.len()).unwrap_or(0);
obj.insert("mcpServersCount".to_string(), serde_json::json!(mcp_count));
}
axum::Json(value)
}
None => axum::Json(serde_json::json!({"error": "Stats not loaded"})),
}
}
async fn quota_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
match store.quota_status() {
Some(quota) => {
let alert_level_str = match quota.alert_level {
ccboard_core::quota::AlertLevel::Safe => "safe",
ccboard_core::quota::AlertLevel::Warning => "warning",
ccboard_core::quota::AlertLevel::Critical => "critical",
ccboard_core::quota::AlertLevel::Exceeded => "exceeded",
};
axum::Json(serde_json::json!({
"current_cost": quota.current_cost,
"budget_limit": quota.budget_limit,
"usage_pct": quota.usage_pct,
"projected_monthly_cost": quota.projected_monthly_cost,
"projected_overage": quota.projected_overage,
"alert_level": alert_level_str,
}))
}
None => axum::Json(serde_json::json!({
"error": "No budget configured or stats not loaded",
})),
}
}
async fn recent_sessions_handler(
Query(params): Query<RecentQuery>,
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let mut all_sessions = store.all_sessions();
all_sessions.sort_by(|a, b| b.last_timestamp.cmp(&a.last_timestamp));
let sessions: Vec<_> = all_sessions
.iter()
.take(params.limit)
.map(|s| session_to_json(s))
.collect();
axum::Json(serde_json::json!({
"sessions": sessions,
"total": all_sessions.len() as u64,
}))
}
async fn live_sessions_handler() -> axum::Json<serde_json::Value> {
use ccboard_core::detect_live_sessions;
const MAX_LIVE_SESSIONS: usize = 20;
match detect_live_sessions() {
Ok(live_sessions) => {
let total = live_sessions.len();
let truncated = total > MAX_LIVE_SESSIONS;
let sessions: Vec<_> = live_sessions
.iter()
.take(MAX_LIVE_SESSIONS)
.map(|ls| {
serde_json::json!({
"pid": ls.pid,
"startTime": ls.start_time.to_rfc3339(),
"workingDirectory": ls.working_directory,
"command": ls.command,
"cpuPercent": ls.cpu_percent,
"memoryMb": ls.memory_mb,
"tokens": ls.tokens,
"sessionId": ls.session_id,
"sessionName": ls.session_name,
})
})
.collect();
axum::Json(serde_json::json!({
"sessions": sessions,
"total": total,
"displayed": sessions.len(),
"truncated": truncated,
}))
}
Err(e) => axum::Json(serde_json::json!({
"sessions": [],
"total": 0,
"error": format!("Failed to detect live sessions: {}", e),
})),
}
}
async fn sessions_handler(
Query(params): Query<SessionsQuery>,
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let mut all_sessions = store.all_sessions();
if let Some(ref search) = params.search {
let search_lower = search.to_lowercase();
all_sessions.retain(|s| {
s.id.to_lowercase().contains(&search_lower)
|| s.project_path.to_lowercase().contains(&search_lower)
|| s.first_user_message
.as_ref()
.map(|m| m.to_lowercase().contains(&search_lower))
.unwrap_or(false)
});
}
if let Some(ref project) = params.project {
all_sessions.retain(|s| s.project_path.contains(project));
}
if let Some(ref model) = params.model {
all_sessions.retain(|s| s.models_used.iter().any(|m| m.contains(model)));
}
if let Some(ref since) = params.since {
if let Some(cutoff) = parse_since(since) {
all_sessions.retain(|s| s.last_timestamp.map(|t| t >= cutoff).unwrap_or(false));
}
}
match params.sort.as_str() {
"date" => all_sessions.sort_by(|a, b| {
if params.order == "asc" {
a.last_timestamp.cmp(&b.last_timestamp)
} else {
b.last_timestamp.cmp(&a.last_timestamp)
}
}),
"tokens" => all_sessions.sort_by(|a, b| {
if params.order == "asc" {
a.total_tokens.cmp(&b.total_tokens)
} else {
b.total_tokens.cmp(&a.total_tokens)
}
}),
"cost" => all_sessions.sort_by(|a, b| {
let cost_a = calculate_session_cost(
a.input_tokens,
a.output_tokens,
a.cache_creation_tokens,
a.cache_read_tokens,
&a.models_used,
);
let cost_b = calculate_session_cost(
b.input_tokens,
b.output_tokens,
b.cache_creation_tokens,
b.cache_read_tokens,
&b.models_used,
);
if params.order == "asc" {
cost_a
.partial_cmp(&cost_b)
.unwrap_or(std::cmp::Ordering::Equal)
} else {
cost_b
.partial_cmp(&cost_a)
.unwrap_or(std::cmp::Ordering::Equal)
}
}),
_ => {} }
let total = all_sessions.len();
let page_size = params.limit.min(100); let offset = params.page * page_size;
let sessions: Vec<_> = all_sessions
.iter()
.skip(offset)
.take(page_size)
.map(|s| session_to_json(s))
.collect();
axum::Json(serde_json::json!({
"sessions": sessions,
"total": total as u64,
"page": params.page,
"page_size": page_size,
}))
}
fn session_to_json(s: &ccboard_core::models::SessionMetadata) -> serde_json::Value {
let cost = calculate_session_cost(
s.input_tokens,
s.output_tokens,
s.cache_creation_tokens,
s.cache_read_tokens,
&s.models_used,
);
serde_json::json!({
"id": s.id,
"date": s.last_timestamp.map(|t: chrono::DateTime<chrono::Utc>| t.to_rfc3339()),
"project": s.project_path,
"model": s.models_used.first().map(|s| s.as_str()).unwrap_or("unknown"),
"messages": s.message_count,
"tokens": s.total_tokens,
"input_tokens": s.input_tokens,
"output_tokens": s.output_tokens,
"cache_creation_tokens": s.cache_creation_tokens,
"cache_read_tokens": s.cache_read_tokens,
"cost": cost,
"status": "completed",
"first_timestamp": s.first_timestamp.map(|t: chrono::DateTime<chrono::Utc>| t.to_rfc3339()),
"duration_seconds": s.duration_seconds,
"preview": s.first_user_message,
})
}
fn parse_since(since: &str) -> Option<chrono::DateTime<chrono::Utc>> {
let now = chrono::Utc::now();
if let Some(days) = since.strip_suffix('d') {
if let Ok(d) = days.parse::<i64>() {
return Some(now - chrono::Duration::days(d));
}
}
if let Some(hours) = since.strip_suffix('h') {
if let Ok(h) = hours.parse::<i64>() {
return Some(now - chrono::Duration::hours(h));
}
}
None
}
async fn plugins_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
use ccboard_core::analytics::aggregate_plugin_usage;
use chrono::Utc;
let sessions = store.recent_sessions(10000);
let invocation_stats = store.invocation_stats();
let skills: Vec<String> = invocation_stats.skills.keys().cloned().collect();
let commands: Vec<String> = invocation_stats.commands.keys().cloned().collect();
let analytics = aggregate_plugin_usage(&sessions, &skills, &commands);
axum::Json(serde_json::json!({
"analytics": analytics,
"generated_at": Utc::now().to_rfc3339(),
}))
}
async fn analytics_suggestions_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
use ccboard_core::analytics::{aggregate_plugin_usage, generate_cost_suggestions};
use chrono::Utc;
let sessions = store.recent_sessions(10000);
let mut tool_token_usage: std::collections::HashMap<String, u64> =
std::collections::HashMap::new();
for session in &sessions {
for (tool, &tokens) in &session.tool_token_usage {
*tool_token_usage.entry(tool.clone()).or_default() += tokens;
}
}
let invocation_stats = store.invocation_stats();
let skills: Vec<String> = invocation_stats.skills.keys().cloned().collect();
let commands: Vec<String> = invocation_stats.commands.keys().cloned().collect();
let plugin_analytics = aggregate_plugin_usage(&sessions, &skills, &commands);
let total_cost: f64 = sessions
.iter()
.map(|s| {
calculate_session_cost(
s.input_tokens,
s.output_tokens,
s.cache_creation_tokens,
s.cache_read_tokens,
&s.models_used,
)
})
.sum();
let suggestions = generate_cost_suggestions(&plugin_analytics, &tool_token_usage, total_cost);
axum::Json(serde_json::json!({
"suggestions": suggestions,
"total_cost_analyzed": total_cost,
"sessions_analyzed": sessions.len(),
"generated_at": Utc::now().to_rfc3339(),
}))
}
fn calculate_session_cost(
input_tokens: u64,
output_tokens: u64,
cache_creation_tokens: u64,
cache_read_tokens: u64,
models: &[String],
) -> f64 {
let is_opus = models.iter().any(|m| m.contains("opus"));
let is_haiku = models.iter().any(|m| m.contains("haiku"));
let (input_price, output_price, cache_write_price, cache_read_price) = if is_opus {
(15.0, 75.0, 18.75, 1.5) } else if is_haiku {
(0.8, 4.0, 1.0, 0.08) } else {
(3.0, 15.0, 3.75, 0.3) };
let input_cost = (input_tokens as f64 / 1_000_000.0) * input_price;
let output_cost = (output_tokens as f64 / 1_000_000.0) * output_price;
let cache_write_cost = (cache_creation_tokens as f64 / 1_000_000.0) * cache_write_price;
let cache_read_cost = (cache_read_tokens as f64 / 1_000_000.0) * cache_read_price;
input_cost + output_cost + cache_write_cost + cache_read_cost
}
async fn config_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let settings = store.settings();
axum::Json(serde_json::to_value(&settings).unwrap_or_default())
}
async fn health_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let state = store.degraded_state();
axum::Json(serde_json::json!({
"status": if state.is_healthy() { "healthy" } else { "degraded" },
"sessions": store.session_count(),
"stats_loaded": store.stats().is_some(),
}))
}
async fn search_handler(
Query(params): Query<SearchQuery>,
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let results = store.search_sessions(¶ms.q, params.limit);
let items: Vec<_> = results
.iter()
.map(|r| {
serde_json::json!({
"session_id": r.session_id,
"path": r.path.to_string_lossy(),
"project": r.project,
"first_user_message": r.first_user_message,
"snippet": r.snippet,
"rank": r.rank,
})
})
.collect();
axum::Json(serde_json::json!({
"results": items,
"total": items.len(),
"query": params.q,
}))
}
async fn sse_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::response::Sse<
impl futures::stream::Stream<Item = Result<axum::response::sse::Event, std::convert::Infallible>>,
> {
let event_bus = store.event_bus().clone();
sse::create_sse_stream(event_bus)
}
async fn hooks_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let settings = store.settings();
let mut hooks_list = Vec::new();
if let Some(hooks_map) = &settings.merged.hooks {
for (event_name, hook_groups) in hooks_map {
for (group_idx, hook_group) in hook_groups.iter().enumerate() {
for (hook_idx, hook_def) in hook_group.hooks.iter().enumerate() {
let hook_name = if hook_groups.len() == 1 && hook_group.hooks.len() == 1 {
event_name.clone()
} else {
format!("{}-{}-{}", event_name, group_idx, hook_idx)
};
let (script_path, script_content) = if hook_def.command.ends_with(".sh") {
let path = std::path::Path::new(&hook_def.command);
let content = std::fs::read_to_string(path).ok();
(Some(hook_def.command.clone()), content)
} else {
(None, None)
};
hooks_list.push(serde_json::json!({
"name": hook_name,
"event": event_name,
"command": hook_def.command,
"description": extract_description(&hook_def.command, script_content.as_deref()),
"async": hook_def.r#async.unwrap_or(false),
"timeout": hook_def.timeout,
"cwd": hook_def.cwd,
"matcher": hook_group.matcher,
"scriptPath": script_path,
"scriptContent": script_content,
}));
}
}
}
}
axum::Json(serde_json::json!({
"hooks": hooks_list,
"total": hooks_list.len(),
}))
}
fn extract_description(command: &str, script_content: Option<&str>) -> Option<String> {
if let Some(content) = script_content {
for line in content.lines().take(20) {
let trimmed = line.trim();
if trimmed.starts_with("# Description:") {
return Some(
trimmed
.trim_start_matches("# Description:")
.trim()
.to_string(),
);
}
}
}
Some(command.to_string())
}
async fn mcp_handler(
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
match store.mcp_config() {
Some(config) => {
let mut servers_list = Vec::new();
for (name, server) in &config.servers {
servers_list.push(serde_json::json!({
"name": name,
"command": server.display_command(),
"serverType": if server.is_http() { "http" } else { "stdio" },
"url": server.url,
"args": server.args,
"env": server.env,
"hasEnv": !server.env.is_empty(),
}));
}
axum::Json(serde_json::json!({
"servers": servers_list,
"total": servers_list.len(),
}))
}
None => axum::Json(serde_json::json!({
"servers": [],
"total": 0,
})),
}
}
fn scan_markdown_files(dir_path: &std::path::Path) -> Vec<serde_json::Value> {
let mut items = Vec::new();
if !dir_path.exists() {
return items;
}
if let Ok(entries) = std::fs::read_dir(dir_path) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) == Some("md") {
if let Ok(content) = std::fs::read_to_string(&path) {
let (frontmatter, body) = parse_frontmatter(&content);
let name = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string();
items.push(serde_json::json!({
"name": name,
"frontmatter": frontmatter,
"body": body,
"path": path.to_string_lossy(),
}));
}
}
}
}
items.sort_by(|a, b| {
let a_name = a.get("name").and_then(|v| v.as_str()).unwrap_or("");
let b_name = b.get("name").and_then(|v| v.as_str()).unwrap_or("");
a_name.cmp(b_name)
});
items
}
fn parse_frontmatter(content: &str) -> (serde_json::Value, String) {
let lines: Vec<&str> = content.lines().collect();
if lines.first() != Some(&"---") {
return (serde_json::json!({}), content.to_string());
}
if let Some(end_idx) = lines[1..].iter().position(|&line| line == "---") {
let yaml_lines = &lines[1..=end_idx];
let body_lines = &lines[end_idx + 2..];
let yaml_str = yaml_lines.join("\n");
let frontmatter: serde_json::Value =
serde_yaml::from_str(&yaml_str).unwrap_or_else(|_| serde_json::json!({}));
let body = body_lines.join("\n");
(frontmatter, body)
} else {
(serde_json::json!({}), content.to_string())
}
}
async fn agents_handler() -> axum::Json<serde_json::Value> {
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
let agents_dir = std::path::Path::new(&home).join(".claude/agents");
let agents = scan_markdown_files(&agents_dir);
axum::Json(serde_json::json!({
"items": agents,
"total": agents.len(),
}))
}
async fn commands_handler() -> axum::Json<serde_json::Value> {
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
let commands_dir = std::path::Path::new(&home).join(".claude/commands");
let commands = scan_markdown_files(&commands_dir);
axum::Json(serde_json::json!({
"items": commands,
"total": commands.len(),
}))
}
async fn skills_handler() -> axum::Json<serde_json::Value> {
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
let skills_dir = std::path::Path::new(&home).join(".claude/skills");
let skills = scan_skills_recursive(&skills_dir);
axum::Json(serde_json::json!({
"items": skills,
"total": skills.len(),
}))
}
fn scan_skills_recursive(dir_path: &std::path::Path) -> Vec<serde_json::Value> {
let mut items = Vec::new();
if !dir_path.exists() {
return items;
}
if let Ok(entries) = std::fs::read_dir(dir_path) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let skill_file = path.join("SKILL.md");
if skill_file.exists() {
if let Ok(content) = std::fs::read_to_string(&skill_file) {
let (frontmatter, body) = parse_frontmatter(&content);
let name = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string();
items.push(serde_json::json!({
"name": name,
"frontmatter": frontmatter,
"body": body,
"path": skill_file.to_string_lossy(),
}));
}
}
}
}
}
items.sort_by(|a, b| {
let a_name = a.get("name").and_then(|v| v.as_str()).unwrap_or("");
let b_name = b.get("name").and_then(|v| v.as_str()).unwrap_or("");
a_name.cmp(b_name)
});
items
}
async fn task_graph_handler(
axum::extract::State(_store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
use ccboard_core::graph::TaskGraph;
use ccboard_core::models::plan::PhaseStatus;
use ccboard_core::parsers::PlanParser;
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
let cwd = std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from("."));
let possible_paths = vec![
cwd.join("claudedocs/PLAN.md"),
cwd.join(".claude/PLAN.md"),
std::path::PathBuf::from(&home).join(".claude/claudedocs/PLAN.md"),
];
let plan_path = possible_paths
.into_iter()
.find(|p| p.exists())
.unwrap_or_else(|| cwd.join("PLAN.md"));
let plan_result = PlanParser::parse_file(&plan_path);
match plan_result {
Ok(Some(plan)) => {
let mut graph = TaskGraph::new();
for phase in &plan.phases {
for task in &phase.tasks {
graph.add_task(task.clone());
}
}
if let Ok(plan_content) = std::fs::read_to_string(&plan_path) {
for phase in &plan.phases {
for task in &phase.tasks {
let task_header = format!("#### Task {}:", task.id);
if let Some(pos) = plan_content.find(&task_header) {
let rest = &plan_content[pos..];
let skip_offset = rest
.char_indices()
.nth(100)
.map(|(i, _)| i)
.unwrap_or(rest.len());
let end_pos = rest
.get(skip_offset..)
.and_then(|s| s.find("\n####").map(|p| p + skip_offset))
.or_else(|| {
rest.get(skip_offset..)
.and_then(|s| s.find("\n###").map(|p| p + skip_offset))
})
.or_else(|| {
rest.get(skip_offset..)
.and_then(|s| s.find("\n##").map(|p| p + skip_offset))
})
.unwrap_or(rest.len());
let task_content = &rest[..end_pos];
let deps = extract_dependencies(task_content);
for dep_id in deps {
let _ = graph.add_dependency(&dep_id, &task.id);
}
}
}
}
}
let nodes: Vec<_> = plan
.phases
.iter()
.flat_map(|phase| {
phase.tasks.iter().map(move |task| {
let status = match phase.status {
PhaseStatus::Complete => "Complete",
PhaseStatus::InProgress => "InProgress",
PhaseStatus::Future => "Future",
};
serde_json::json!({
"id": task.id,
"label": task.title,
"phase": phase.id,
"status": status,
"duration": task.duration,
"description": task.description,
"priority": task.priority,
"difficulty": task.difficulty,
"crateName": task.crate_name,
"issue": task.issue,
})
})
})
.collect();
let mut edges = Vec::new();
for task in graph.tasks() {
let dependents = graph.dependents(&task.id);
for dependent in dependents {
edges.push(serde_json::json!({
"source": task.id,
"target": dependent,
"type": "blocks",
}));
}
}
axum::Json(serde_json::json!({
"nodes": nodes,
"edges": edges,
}))
}
Ok(None) => {
axum::Json(serde_json::json!({
"nodes": [],
"edges": [],
"error": "PLAN.md not found",
}))
}
Err(e) => {
axum::Json(serde_json::json!({
"nodes": [],
"edges": [],
"error": format!("Failed to parse PLAN.md: {}", e),
}))
}
}
}
async fn activity_violations_handler(
Query(params): Query<ViolationsQuery>,
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
let mut violations = store.all_violations();
if let Some(ref min_sev) = params.min_severity {
violations.retain(|a| match min_sev.as_str() {
"Critical" => matches!(a.severity, AlertSeverity::Critical),
"Warning" => !matches!(a.severity, AlertSeverity::Info),
_ => true, });
}
let total = violations.len();
let critical_count = violations
.iter()
.filter(|a| matches!(a.severity, AlertSeverity::Critical))
.count();
let warning_count = violations
.iter()
.filter(|a| matches!(a.severity, AlertSeverity::Warning))
.count();
let info_count = violations
.iter()
.filter(|a| matches!(a.severity, AlertSeverity::Info))
.count();
violations.truncate(params.limit);
let serialized: Vec<_> = violations
.iter()
.map(|a| {
serde_json::json!({
"session_id": a.session_id,
"timestamp": a.timestamp.to_rfc3339(),
"severity": format!("{:?}", a.severity),
"category": format!("{:?}", a.category),
"detail": a.detail,
"action_hint": a.category.action_hint(),
})
})
.collect();
axum::Json(serde_json::json!({
"violations": serialized,
"total": total,
"displayed": serialized.len(),
"critical_count": critical_count,
"warning_count": warning_count,
"info_count": info_count,
}))
}
async fn activity_session_handler(
axum::extract::Path(session_id): axum::extract::Path<String>,
axum::extract::State(store): axum::extract::State<Arc<DataStore>>,
) -> axum::Json<serde_json::Value> {
match store.analyze_session(&session_id).await {
Ok(summary) => {
let alerts: Vec<_> = summary
.alerts
.iter()
.map(|a| {
serde_json::json!({
"session_id": a.session_id,
"timestamp": a.timestamp.to_rfc3339(),
"severity": format!("{:?}", a.severity),
"category": format!("{:?}", a.category),
"detail": a.detail,
"action_hint": a.category.action_hint(),
})
})
.collect();
axum::Json(serde_json::json!({
"session_id": session_id,
"file_accesses": summary.file_accesses,
"bash_commands": summary.bash_commands,
"network_calls": summary.network_calls,
"alerts": alerts,
"stats": {
"file_accesses": summary.file_accesses.len(),
"bash_commands": summary.bash_commands.len(),
"network_calls": summary.network_calls.len(),
"alerts": alerts.len(),
}
}))
}
Err(e) => axum::Json(serde_json::json!({
"error": format!("Failed to analyze session: {}", e),
})),
}
}
fn extract_dependencies(description: &str) -> Vec<String> {
let mut deps = Vec::new();
if let Some(start) = description.find("Depends on:") {
let rest = &description[start + "Depends on:".len()..];
let dep_text = rest.lines().next().unwrap_or("");
for part in dep_text.split(',') {
let trimmed = part.trim();
if trimmed
.chars()
.next()
.map(|c| c.is_alphabetic())
.unwrap_or(false)
{
if let Some(task_id) = trimmed.split_whitespace().next() {
deps.push(task_id.to_string());
}
}
}
}
if let Some(start) = description.find("Blocked by:") {
let rest = &description[start + "Blocked by:".len()..];
let dep_text = rest.lines().next().unwrap_or("");
for part in dep_text.split(',') {
let trimmed = part.trim();
if let Some(task_num) = trimmed.strip_prefix('#') {
if let Ok(_num) = task_num.parse::<u32>() {
}
}
}
}
deps
}