strange_loop/nano_agent/
critic.rs1use super::{NanoAgent, NanoBus, Message, TickResult};
4use super::bus::MessageData;
5use std::collections::HashMap;
6use std::sync::Arc;
7use parking_lot::RwLock;
8
9#[derive(Debug, Clone)]
11pub struct PolicyUpdate {
12 pub agent_name: &'static str,
13 pub parameter: String,
14 pub value: PolicyValue,
15}
16
17#[derive(Debug, Clone)]
18pub enum PolicyValue {
19 U64(u64),
20 F64(f64),
21 Bool(bool),
22}
23
24pub struct CriticReflector {
26 trace_window: usize,
27 recent_traces: Vec<AgentPerformance>,
28 policy_cache: Arc<RwLock<HashMap<String, PolicyValue>>>,
29 learning_rate: f64,
30}
31
32#[derive(Debug, Clone)]
33struct AgentPerformance {
34 agent_name: &'static str,
35 avg_latency_ns: f64,
36 message_rate: f64,
37 budget_violations: u32,
38}
39
40impl CriticReflector {
41 pub fn new(trace_window: usize, learning_rate: f64) -> Self {
42 Self {
43 trace_window,
44 recent_traces: Vec::with_capacity(trace_window),
45 policy_cache: Arc::new(RwLock::new(HashMap::new())),
46 learning_rate,
47 }
48 }
49
50 pub fn analyze(&mut self, traces: &[AgentPerformance]) -> Vec<PolicyUpdate> {
52 let mut updates = Vec::new();
53
54 for trace in traces {
55 if trace.budget_violations > 0 {
57 updates.push(PolicyUpdate {
59 agent_name: trace.agent_name,
60 parameter: "tick_frequency".to_string(),
61 value: PolicyValue::F64(0.9), });
63 }
64
65 if trace.avg_latency_ns > 1000.0 {
67 updates.push(PolicyUpdate {
69 agent_name: trace.agent_name,
70 parameter: "complexity".to_string(),
71 value: PolicyValue::U64(1), });
73 }
74
75 if trace.message_rate > 1000.0 {
77 updates.push(PolicyUpdate {
79 agent_name: trace.agent_name,
80 parameter: "batch_size".to_string(),
81 value: PolicyValue::U64(10),
82 });
83 }
84 }
85
86 self.recent_traces.extend_from_slice(traces);
88 if self.recent_traces.len() > self.trace_window {
89 self.recent_traces.drain(0..self.recent_traces.len() - self.trace_window);
90 }
91
92 updates
93 }
94
95 pub fn learn(&mut self) -> f64 {
97 if self.recent_traces.is_empty() {
98 return 0.0;
99 }
100
101 let total_latency: f64 = self.recent_traces.iter()
103 .map(|t| t.avg_latency_ns)
104 .sum();
105 let avg_latency = total_latency / self.recent_traces.len() as f64;
106
107 let improvement = if self.recent_traces.len() > 10 {
109 let recent_avg = self.recent_traces[self.recent_traces.len() - 5..]
110 .iter()
111 .map(|t| t.avg_latency_ns)
112 .sum::<f64>() / 5.0;
113
114 let past_avg = self.recent_traces[..5]
115 .iter()
116 .map(|t| t.avg_latency_ns)
117 .sum::<f64>() / 5.0;
118
119 (past_avg - recent_avg) / past_avg
120 } else {
121 0.0
122 };
123
124 if improvement > 0.0 {
126 self.learning_rate *= 1.1; } else {
128 self.learning_rate *= 0.9; }
130
131 improvement
132 }
133
134 pub fn get_policy(&self, key: &str) -> Option<PolicyValue> {
136 self.policy_cache.read().get(key).cloned()
137 }
138
139 pub fn set_policy(&mut self, key: String, value: PolicyValue) {
141 self.policy_cache.write().insert(key, value);
142 }
143}
144
145impl NanoAgent for CriticReflector {
146 fn name(&self) -> &'static str {
147 "critic_reflector"
148 }
149
150 fn tick(&mut self, now_ns: u128, bus: &NanoBus) -> TickResult {
151 let mut messages_recv = 0u32;
152 let mut messages_sent = 0u32;
153
154 for _ in 0..16 {
156 if let Some(msg) = bus.try_recv() {
157 messages_recv += 1;
158
159 if msg.topic == "metrics:agent" {
160 if let MessageData::F64(latency) = msg.data {
162 }
165 }
166 } else {
167 break;
168 }
169 }
170
171 if now_ns % 1_000_000_000 == 0 {
173 let improvement = self.learn();
175
176 bus.publish(Message {
178 topic: "critic:improvement",
179 data: MessageData::F64(improvement),
180 timestamp_ns: now_ns,
181 });
182 messages_sent += 1;
183 }
184
185 TickResult {
186 cycles: 0,
187 messages_sent,
188 messages_recv,
189 budget_used_ns: 100, }
191 }
192
193 fn budget_ns(&self) -> u128 {
194 10_000 }
196
197 fn reflect(&mut self, update: PolicyUpdate) {
198 self.set_policy(update.parameter, update.value);
200 }
201}