do_memory_mcp/server/tools/
monitoring.rs1use crate::unified_sandbox::SandboxBackend;
6use anyhow::Result;
7use serde_json::json;
8
9impl crate::server::MemoryMCPServer {
10 pub async fn health_check(&self) -> Result<serde_json::Value> {
16 self.track_tool_usage("health_check").await;
17
18 let request_id = format!(
20 "health_check_{}",
21 std::time::SystemTime::now()
22 .duration_since(std::time::UNIX_EPOCH)
23 .unwrap_or_default()
24 .as_nanos()
25 );
26 self.monitoring
27 .start_request(request_id.clone(), "health_check".to_string())
28 .await;
29
30 let mut result = self.monitoring_endpoints.health_check().await?;
31
32 let backend = self.sandbox.backend();
34 let unified_metrics = self.sandbox.get_metrics().await;
35 let health = self.sandbox.get_health_status().await;
36
37 let sandbox_json = json!({
38 "backend": match backend {
39 SandboxBackend::NodeJs => "nodejs",
40 SandboxBackend::Wasm => "wasm",
41 SandboxBackend::Hybrid { .. } => "hybrid",
42 },
43 "wasmtime_pool": health.wasmtime_pool_stats.map(|m| json!({
44 "total_executions": m.total_executions,
45 "successful_executions": m.successful_executions,
46 "failed_executions": m.failed_executions,
47 "timeout_count": m.timeout_count,
48 "security_violations": m.security_violations,
49 "avg_execution_time_ms": m.avg_execution_time_ms,
50 "peak_memory_bytes": m.peak_memory_bytes,
51 })),
52 "routing": json!({
53 "total_executions": unified_metrics.total_executions,
54 "node_executions": unified_metrics.node_executions,
55 "wasm_executions": unified_metrics.wasm_executions,
56 "node_success_rate": unified_metrics.node_success_rate,
57 "wasm_success_rate": unified_metrics.wasm_success_rate,
58 "node_avg_latency_ms": unified_metrics.node_avg_latency_ms,
59 "wasm_avg_latency_ms": unified_metrics.wasm_avg_latency_ms,
60 })
61 });
62
63 if let Some(obj) = result.as_object_mut() {
64 obj.insert("sandbox".to_string(), sandbox_json);
65 }
66
67 self.monitoring.end_request(&request_id, true, None).await;
69
70 Ok(result)
71 }
72
73 pub async fn get_metrics(&self, metric_type: Option<String>) -> Result<serde_json::Value> {
83 self.track_tool_usage("get_metrics").await;
84
85 let request_id = format!(
87 "get_metrics_{}",
88 std::time::SystemTime::now()
89 .duration_since(std::time::UNIX_EPOCH)
90 .unwrap_or_default()
91 .as_nanos()
92 );
93 self.monitoring
94 .start_request(request_id.clone(), "get_metrics".to_string())
95 .await;
96
97 let result = match metric_type.as_deref() {
98 Some("performance") => self.monitoring_endpoints.performance_metrics().await,
99 Some("episodes") => self.monitoring_endpoints.episode_metrics().await,
100 Some("system") => self.monitoring_endpoints.system_info().await,
101 _ => self.monitoring_endpoints.metrics().await,
102 };
103
104 self.monitoring
106 .end_request(&request_id, result.is_ok(), None)
107 .await;
108
109 result
110 }
111}