agentox_core/checks/security/
resource_exhaustion.rs1use crate::checks::runner::{Check, CheckContext};
4use crate::checks::security::constants::exhaustion_probe_profile;
5use crate::checks::types::{CheckCategory, CheckResult, Severity};
6use crate::protocol::jsonrpc::JsonRpcRequest;
7use std::time::Instant;
8
9pub struct ResourceExhaustionGuardrail;
10
11#[async_trait::async_trait]
12impl Check for ResourceExhaustionGuardrail {
13 fn id(&self) -> &str {
14 "SEC-004"
15 }
16
17 fn name(&self) -> &str {
18 "Resource-exhaustion guardrail"
19 }
20
21 fn category(&self) -> CheckCategory {
22 CheckCategory::Security
23 }
24
25 async fn run(&self, ctx: &mut CheckContext) -> Vec<CheckResult> {
26 let desc = "Server should remain responsive under bounded high-volume/large-input probes";
27 let profile = exhaustion_probe_profile(ctx.request_timeout);
28 let timeout_ms = ctx.request_timeout.as_millis();
29
30 let mut session = match ctx.disposable_session().await {
31 Ok(s) => s,
32 Err(e) => {
33 return vec![CheckResult::fail(
34 self.id(),
35 self.name(),
36 self.category(),
37 Severity::Critical,
38 desc,
39 format!("Could not create disposable session for burst probe: {e}"),
40 )];
41 }
42 };
43
44 let large = "A".repeat(profile.large_arg_bytes);
45 let mut max_latency_ms = 0_u128;
46
47 for i in 0..profile.burst_requests {
48 let req = JsonRpcRequest::new(
49 50_000 + i as i64,
50 "tools/list",
51 Some(serde_json::json!({ "cursor": large })),
52 );
53 let started = Instant::now();
54 match session.send_request(&req).await {
55 Ok(_) => {
56 max_latency_ms = max_latency_ms.max(started.elapsed().as_millis());
57 }
58 Err(e) => {
59 let _ = session.shutdown().await;
60 return vec![CheckResult::fail(
61 self.id(),
62 self.name(),
63 self.category(),
64 Severity::High,
65 desc,
66 format!("Server became unstable during bounded burst probe: {e}"),
67 )
68 .with_evidence(serde_json::json!({
69 "request_index": i,
70 "burst_requests": profile.burst_requests,
71 "arg_bytes": profile.large_arg_bytes,
72 "timeout_ms": timeout_ms
73 }))];
74 }
75 }
76 }
77
78 if max_latency_ms > timeout_ms + 50 {
79 let _ = session.shutdown().await;
80 return vec![CheckResult::fail(
81 self.id(),
82 self.name(),
83 self.category(),
84 Severity::Medium,
85 desc,
86 format!(
87 "Latency exceeded deterministic timeout budget: max={}ms budget={}ms",
88 max_latency_ms, timeout_ms
89 ),
90 )
91 .with_evidence(serde_json::json!({
92 "burst_requests": profile.burst_requests,
93 "arg_bytes": profile.large_arg_bytes,
94 "max_latency_ms": max_latency_ms,
95 "timeout_ms": timeout_ms
96 }))];
97 }
98
99 let ping = JsonRpcRequest::new(90_001, "tools/list", Some(serde_json::json!({})));
101 let result = match session.send_request(&ping).await {
102 Ok(_) => CheckResult::pass(self.id(), self.name(), self.category(), desc)
103 .with_evidence(serde_json::json!({
104 "burst_requests": profile.burst_requests,
105 "arg_bytes": profile.large_arg_bytes,
106 "max_latency_ms": max_latency_ms,
107 "timeout_ms": timeout_ms
108 })),
109 Err(e) => CheckResult::fail(
110 self.id(),
111 self.name(),
112 self.category(),
113 Severity::Critical,
114 desc,
115 format!("Server did not recover after burst probe: {e}"),
116 )
117 .with_evidence(serde_json::json!({
118 "burst_requests": profile.burst_requests,
119 "arg_bytes": profile.large_arg_bytes,
120 "timeout_ms": timeout_ms
121 })),
122 };
123
124 let _ = session.shutdown().await;
125 vec![result]
126 }
127}