ruvector_mincut/snn/
strange_loop.rs

1//! # Layer 2: Strange Loop Self-Modification Protocol
2//!
3//! Implements recursive self-observation for meta-cognitive graph optimization.
4//!
5//! ## Hierarchical Levels
6//!
7//! - **Level 0**: Object Graph - computational units and data flow
8//! - **Level 1**: Meta-Graph - observes Level 0 statistics
9//! - **Level 2**: Meta-Meta-Graph - observes learning dynamics
10//!
11//! The "strange loop" closes when Level 2 actions modify Level 0 structure,
12//! which changes Level 1 observations, which triggers Level 2 re-evaluation.
13
14use super::{
15    network::{LayerConfig, NetworkConfig, SpikingNetwork},
16    neuron::{LIFNeuron, NeuronConfig, NeuronPopulation},
17    SimTime, Spike,
18};
19use crate::graph::{DynamicGraph, VertexId};
20use std::collections::VecDeque;
21
22/// Configuration for strange loop system
23#[derive(Debug, Clone)]
24pub struct StrangeLoopConfig {
25    /// Number of Level 0 neurons (matches graph vertices)
26    pub level0_size: usize,
27    /// Number of Level 1 observer neurons
28    pub level1_size: usize,
29    /// Number of Level 2 meta-neurons
30    pub level2_size: usize,
31    /// Time step for simulation
32    pub dt: f64,
33    /// Threshold for strengthen action
34    pub strengthen_threshold: f64,
35    /// Threshold for prune action
36    pub prune_threshold: f64,
37    /// Minimum mincut contribution to keep edge
38    pub prune_weight_threshold: f64,
39    /// History window for observations
40    pub observation_window: usize,
41}
42
43impl Default for StrangeLoopConfig {
44    fn default() -> Self {
45        Self {
46            level0_size: 100,
47            level1_size: 20,
48            level2_size: 5,
49            dt: 1.0,
50            strengthen_threshold: 0.7,
51            prune_threshold: 0.3,
52            prune_weight_threshold: 0.1,
53            observation_window: 100,
54        }
55    }
56}
57
58/// Meta-level in the hierarchy
59#[derive(Debug, Clone, Copy, PartialEq)]
60pub enum MetaLevel {
61    /// Level 0: Object graph being optimized
62    Object,
63    /// Level 1: Observer SNN watching Level 0
64    Observer,
65    /// Level 2: Meta-neuron modulating observer
66    Meta,
67}
68
69/// Actions that Level 2 can take to modify Level 0
70#[derive(Debug, Clone)]
71pub enum MetaAction {
72    /// Strengthen edges where observer activity is high
73    Strengthen(f64),
74    /// Remove edges below mincut contribution threshold
75    Prune(f64),
76    /// Radical reorganization using current mincut as seed
77    Restructure,
78    /// No action needed
79    NoOp,
80}
81
82/// Cross-level influence matrix
83#[derive(Debug, Clone)]
84pub struct CrossLevelInfluence {
85    /// Level 0 → Level 1 influence weights
86    pub l0_to_l1: Vec<Vec<f64>>,
87    /// Level 1 → Level 2 influence weights
88    pub l1_to_l2: Vec<Vec<f64>>,
89    /// Level 2 → Level 0 influence (the strange part)
90    pub l2_to_l0: Vec<Vec<f64>>,
91}
92
93/// Meta-neuron for Level 2 decisions
94#[derive(Debug, Clone)]
95pub struct MetaNeuron {
96    /// ID of this meta-neuron
97    pub id: usize,
98    /// Internal state
99    pub state: f64,
100    /// Decision threshold
101    pub threshold: f64,
102    /// History of observer summaries
103    history: VecDeque<f64>,
104    /// Window size for decisions
105    window: usize,
106}
107
108impl MetaNeuron {
109    /// Create a new meta-neuron
110    pub fn new(id: usize, window: usize) -> Self {
111        Self {
112            id,
113            state: 0.0,
114            threshold: 0.5,
115            history: VecDeque::with_capacity(window),
116            window,
117        }
118    }
119
120    /// Process observer summary and produce modulation signal
121    pub fn modulate(&mut self, observer_summary: f64) -> MetaAction {
122        // Update history
123        self.history.push_back(observer_summary);
124        if self.history.len() > self.window {
125            self.history.pop_front();
126        }
127
128        // Compute trend
129        let mean: f64 = self.history.iter().sum::<f64>() / self.history.len() as f64;
130        let recent_mean: f64 = self.history.iter().rev().take(10).sum::<f64>()
131            / 10.0f64.min(self.history.len() as f64);
132
133        self.state = recent_mean - mean;
134
135        // Decide action based on state
136        if self.state > self.threshold {
137            MetaAction::Strengthen(observer_summary)
138        } else if self.state < -self.threshold {
139            MetaAction::Prune(observer_summary.abs())
140        } else if observer_summary.abs() > 2.0 * self.threshold {
141            MetaAction::Restructure
142        } else {
143            MetaAction::NoOp
144        }
145    }
146
147    /// Reset meta-neuron state
148    pub fn reset(&mut self) {
149        self.state = 0.0;
150        self.history.clear();
151    }
152}
153
154/// Meta-Cognitive MinCut with Strange Loop
155pub struct MetaCognitiveMinCut {
156    /// Level 0: Object graph being optimized
157    object_graph: DynamicGraph,
158    /// Level 1: SNN observing object graph statistics
159    observer_snn: SpikingNetwork,
160    /// Level 2: Meta-neurons modulating observer behavior
161    meta_neurons: Vec<MetaNeuron>,
162    /// Cross-level influence matrix
163    influence: CrossLevelInfluence,
164    /// Configuration
165    config: StrangeLoopConfig,
166    /// Current simulation time
167    time: SimTime,
168    /// History of mincut values
169    mincut_history: VecDeque<f64>,
170    /// History of actions taken
171    action_history: Vec<MetaAction>,
172}
173
174impl MetaCognitiveMinCut {
175    /// Create a new meta-cognitive mincut system
176    pub fn new(graph: DynamicGraph, config: StrangeLoopConfig) -> Self {
177        let n = graph.num_vertices();
178
179        // Level 1: Observer SNN
180        let observer_config = NetworkConfig {
181            layers: vec![LayerConfig::new(config.level1_size)],
182            ..NetworkConfig::default()
183        };
184        let observer_snn = SpikingNetwork::new(observer_config);
185
186        // Level 2: Meta-neurons
187        let meta_neurons: Vec<_> = (0..config.level2_size)
188            .map(|i| MetaNeuron::new(i, config.observation_window))
189            .collect();
190
191        // Initialize cross-level influence
192        let influence = CrossLevelInfluence {
193            l0_to_l1: vec![vec![0.1; config.level1_size]; n],
194            l1_to_l2: vec![vec![0.1; config.level2_size]; config.level1_size],
195            l2_to_l0: vec![vec![0.1; n]; config.level2_size],
196        };
197
198        let observation_window = config.observation_window;
199
200        Self {
201            object_graph: graph,
202            observer_snn,
203            meta_neurons,
204            influence,
205            config,
206            time: 0.0,
207            mincut_history: VecDeque::with_capacity(observation_window),
208            action_history: Vec::new(),
209        }
210    }
211
212    /// Encode graph state as spike pattern for Level 1
213    fn encode_graph_state(&self) -> Vec<f64> {
214        let vertices = self.object_graph.vertices();
215        let mut encoding = vec![0.0; self.config.level1_size];
216
217        for (i, v) in vertices.iter().enumerate() {
218            let degree = self.object_graph.degree(*v) as f64;
219            let weight_sum: f64 = self
220                .object_graph
221                .neighbors(*v)
222                .iter()
223                .filter_map(|(_, _)| Some(1.0))
224                .sum();
225
226            // Project to observer neurons
227            for j in 0..encoding.len() {
228                if i < self.influence.l0_to_l1.len() && j < self.influence.l0_to_l1[i].len() {
229                    encoding[j] += self.influence.l0_to_l1[i][j] * (degree + weight_sum);
230                }
231            }
232        }
233
234        encoding
235    }
236
237    /// Get population rate as observer summary
238    fn observer_summary(&self) -> f64 {
239        self.observer_snn.layer_rate(0, 100.0)
240    }
241
242    /// Find high-correlation pairs in observer SNN
243    fn high_correlation_pairs(&self, threshold: f64) -> Vec<(VertexId, VertexId)> {
244        let sync_matrix = self.observer_snn.synchrony_matrix();
245        let vertices = self.object_graph.vertices();
246        let mut pairs = Vec::new();
247
248        for i in 0..sync_matrix.len().min(vertices.len()) {
249            for j in (i + 1)..sync_matrix[i].len().min(vertices.len()) {
250                if sync_matrix[i][j] > threshold {
251                    pairs.push((vertices[i], vertices[j]));
252                }
253            }
254        }
255
256        pairs
257    }
258
259    /// Compute mincut contribution for each edge (simplified)
260    fn mincut_contribution(&self, edge: &crate::graph::Edge) -> f64 {
261        // Simplified: degree-based contribution
262        let src_degree = self.object_graph.degree(edge.source) as f64;
263        let tgt_degree = self.object_graph.degree(edge.target) as f64;
264
265        edge.weight / (src_degree + tgt_degree).max(1.0)
266    }
267
268    /// Rebuild graph from partition (simplified)
269    fn rebuild_from_partition(&mut self, vertices: &[VertexId]) {
270        // Keep only edges within the partition
271        let vertex_set: std::collections::HashSet<_> = vertices.iter().collect();
272
273        let edges_to_remove: Vec<_> = self
274            .object_graph
275            .edges()
276            .iter()
277            .filter(|e| !vertex_set.contains(&e.source) || !vertex_set.contains(&e.target))
278            .map(|e| (e.source, e.target))
279            .collect();
280
281        for (u, v) in edges_to_remove {
282            let _ = self.object_graph.delete_edge(u, v);
283        }
284    }
285
286    /// Execute one strange loop iteration
287    pub fn strange_loop_step(&mut self) -> MetaAction {
288        // Level 0 → Level 1: Encode graph state as spike patterns
289        let graph_state = self.encode_graph_state();
290        self.observer_snn.inject_current(&graph_state);
291
292        // Level 1 dynamics: Observer SNN processes graph state
293        let _observer_spikes = self.observer_snn.step();
294
295        // Level 1 → Level 2: Meta-neuron receives observer output
296        let observer_summary = self.observer_summary();
297
298        // Level 2 decision: Aggregate meta-neuron decisions
299        let mut actions = Vec::new();
300        for meta_neuron in &mut self.meta_neurons {
301            actions.push(meta_neuron.modulate(observer_summary));
302        }
303
304        // Select dominant action (simplified: first non-NoOp)
305        let action = actions
306            .into_iter()
307            .find(|a| !matches!(a, MetaAction::NoOp))
308            .unwrap_or(MetaAction::NoOp);
309
310        // Level 2 → Level 0: Close the strange loop
311        match &action {
312            MetaAction::Strengthen(threshold) => {
313                // Add edges where observer activity is high
314                let hot_pairs = self.high_correlation_pairs(*threshold);
315                for (u, v) in hot_pairs {
316                    if !self.object_graph.has_edge(u, v) {
317                        let _ = self.object_graph.insert_edge(u, v, 1.0);
318                    } else {
319                        // Strengthen existing edge
320                        if let Some(edge) = self.object_graph.get_edge(u, v) {
321                            let _ = self
322                                .object_graph
323                                .update_edge_weight(u, v, edge.weight * 1.1);
324                        }
325                    }
326                }
327            }
328            MetaAction::Prune(threshold) => {
329                // Remove edges below mincut contribution threshold
330                let weak_edges: Vec<_> = self
331                    .object_graph
332                    .edges()
333                    .iter()
334                    .filter(|e| self.mincut_contribution(e) < *threshold)
335                    .map(|e| (e.source, e.target))
336                    .collect();
337
338                for (u, v) in weak_edges {
339                    let _ = self.object_graph.delete_edge(u, v);
340                }
341            }
342            MetaAction::Restructure => {
343                // Use largest connected component
344                let components = self.object_graph.connected_components();
345                if let Some(largest) = components.iter().max_by_key(|c| c.len()) {
346                    if largest.len() < self.object_graph.num_vertices() {
347                        self.rebuild_from_partition(largest);
348                    }
349                }
350            }
351            MetaAction::NoOp => {}
352        }
353
354        self.time += self.config.dt;
355        self.action_history.push(action.clone());
356
357        action
358    }
359
360    /// Get object graph
361    pub fn graph(&self) -> &DynamicGraph {
362        &self.object_graph
363    }
364
365    /// Get mutable object graph
366    pub fn graph_mut(&mut self) -> &mut DynamicGraph {
367        &mut self.object_graph
368    }
369
370    /// Get observer SNN
371    pub fn observer(&self) -> &SpikingNetwork {
372        &self.observer_snn
373    }
374
375    /// Get action history
376    pub fn action_history(&self) -> &[MetaAction] {
377        &self.action_history
378    }
379
380    /// Get meta-level state summary
381    pub fn level_summary(&self) -> (f64, f64, f64) {
382        let l0 = self.object_graph.num_edges() as f64;
383        let l1 = self.observer_summary();
384        let l2 =
385            self.meta_neurons.iter().map(|m| m.state).sum::<f64>() / self.meta_neurons.len() as f64;
386
387        (l0, l1, l2)
388    }
389
390    /// Reset the system
391    pub fn reset(&mut self) {
392        self.observer_snn.reset();
393        for meta in &mut self.meta_neurons {
394            meta.reset();
395        }
396        self.time = 0.0;
397        self.mincut_history.clear();
398        self.action_history.clear();
399    }
400
401    /// Run multiple strange loop iterations
402    pub fn run(&mut self, steps: usize) -> Vec<MetaAction> {
403        let mut actions = Vec::new();
404        for _ in 0..steps {
405            actions.push(self.strange_loop_step());
406        }
407        actions
408    }
409}
410
411#[cfg(test)]
412mod tests {
413    use super::*;
414
415    #[test]
416    fn test_meta_neuron() {
417        let mut neuron = MetaNeuron::new(0, 10);
418
419        // Feed increasing summaries
420        for i in 0..15 {
421            let _ = neuron.modulate(0.1 * i as f64);
422        }
423
424        // Should have accumulated state
425        assert!(neuron.history.len() == 10);
426    }
427
428    #[test]
429    fn test_strange_loop_creation() {
430        let graph = DynamicGraph::new();
431        for i in 0..10 {
432            graph.insert_edge(i, (i + 1) % 10, 1.0).unwrap();
433        }
434
435        let config = StrangeLoopConfig::default();
436        let system = MetaCognitiveMinCut::new(graph, config);
437
438        let (l0, l1, l2) = system.level_summary();
439        assert!(l0 > 0.0);
440    }
441
442    #[test]
443    fn test_strange_loop_step() {
444        let graph = DynamicGraph::new();
445        for i in 0..10 {
446            for j in (i + 1)..10 {
447                graph.insert_edge(i, j, 1.0).unwrap();
448            }
449        }
450
451        let config = StrangeLoopConfig::default();
452        let mut system = MetaCognitiveMinCut::new(graph, config);
453
454        // Run a few steps
455        let actions = system.run(5);
456        assert_eq!(actions.len(), 5);
457    }
458}