ruvector_mincut/snn/
strange_loop.rs

1//! # Layer 2: Strange Loop Self-Modification Protocol
2//!
3//! Implements recursive self-observation for meta-cognitive graph optimization.
4//!
5//! ## Hierarchical Levels
6//!
7//! - **Level 0**: Object Graph - computational units and data flow
8//! - **Level 1**: Meta-Graph - observes Level 0 statistics
9//! - **Level 2**: Meta-Meta-Graph - observes learning dynamics
10//!
11//! The "strange loop" closes when Level 2 actions modify Level 0 structure,
12//! which changes Level 1 observations, which triggers Level 2 re-evaluation.
13
14use super::{
15    neuron::{LIFNeuron, NeuronConfig, NeuronPopulation},
16    network::{SpikingNetwork, NetworkConfig, LayerConfig},
17    SimTime, Spike,
18};
19use crate::graph::{DynamicGraph, VertexId};
20use std::collections::VecDeque;
21
22/// Configuration for strange loop system
23#[derive(Debug, Clone)]
24pub struct StrangeLoopConfig {
25    /// Number of Level 0 neurons (matches graph vertices)
26    pub level0_size: usize,
27    /// Number of Level 1 observer neurons
28    pub level1_size: usize,
29    /// Number of Level 2 meta-neurons
30    pub level2_size: usize,
31    /// Time step for simulation
32    pub dt: f64,
33    /// Threshold for strengthen action
34    pub strengthen_threshold: f64,
35    /// Threshold for prune action
36    pub prune_threshold: f64,
37    /// Minimum mincut contribution to keep edge
38    pub prune_weight_threshold: f64,
39    /// History window for observations
40    pub observation_window: usize,
41}
42
43impl Default for StrangeLoopConfig {
44    fn default() -> Self {
45        Self {
46            level0_size: 100,
47            level1_size: 20,
48            level2_size: 5,
49            dt: 1.0,
50            strengthen_threshold: 0.7,
51            prune_threshold: 0.3,
52            prune_weight_threshold: 0.1,
53            observation_window: 100,
54        }
55    }
56}
57
58/// Meta-level in the hierarchy
59#[derive(Debug, Clone, Copy, PartialEq)]
60pub enum MetaLevel {
61    /// Level 0: Object graph being optimized
62    Object,
63    /// Level 1: Observer SNN watching Level 0
64    Observer,
65    /// Level 2: Meta-neuron modulating observer
66    Meta,
67}
68
69/// Actions that Level 2 can take to modify Level 0
70#[derive(Debug, Clone)]
71pub enum MetaAction {
72    /// Strengthen edges where observer activity is high
73    Strengthen(f64),
74    /// Remove edges below mincut contribution threshold
75    Prune(f64),
76    /// Radical reorganization using current mincut as seed
77    Restructure,
78    /// No action needed
79    NoOp,
80}
81
82/// Cross-level influence matrix
83#[derive(Debug, Clone)]
84pub struct CrossLevelInfluence {
85    /// Level 0 → Level 1 influence weights
86    pub l0_to_l1: Vec<Vec<f64>>,
87    /// Level 1 → Level 2 influence weights
88    pub l1_to_l2: Vec<Vec<f64>>,
89    /// Level 2 → Level 0 influence (the strange part)
90    pub l2_to_l0: Vec<Vec<f64>>,
91}
92
93/// Meta-neuron for Level 2 decisions
94#[derive(Debug, Clone)]
95pub struct MetaNeuron {
96    /// ID of this meta-neuron
97    pub id: usize,
98    /// Internal state
99    pub state: f64,
100    /// Decision threshold
101    pub threshold: f64,
102    /// History of observer summaries
103    history: VecDeque<f64>,
104    /// Window size for decisions
105    window: usize,
106}
107
108impl MetaNeuron {
109    /// Create a new meta-neuron
110    pub fn new(id: usize, window: usize) -> Self {
111        Self {
112            id,
113            state: 0.0,
114            threshold: 0.5,
115            history: VecDeque::with_capacity(window),
116            window,
117        }
118    }
119
120    /// Process observer summary and produce modulation signal
121    pub fn modulate(&mut self, observer_summary: f64) -> MetaAction {
122        // Update history
123        self.history.push_back(observer_summary);
124        if self.history.len() > self.window {
125            self.history.pop_front();
126        }
127
128        // Compute trend
129        let mean: f64 = self.history.iter().sum::<f64>() / self.history.len() as f64;
130        let recent_mean: f64 = self.history.iter().rev().take(10)
131            .sum::<f64>() / 10.0f64.min(self.history.len() as f64);
132
133        self.state = recent_mean - mean;
134
135        // Decide action based on state
136        if self.state > self.threshold {
137            MetaAction::Strengthen(observer_summary)
138        } else if self.state < -self.threshold {
139            MetaAction::Prune(observer_summary.abs())
140        } else if observer_summary.abs() > 2.0 * self.threshold {
141            MetaAction::Restructure
142        } else {
143            MetaAction::NoOp
144        }
145    }
146
147    /// Reset meta-neuron state
148    pub fn reset(&mut self) {
149        self.state = 0.0;
150        self.history.clear();
151    }
152}
153
154/// Meta-Cognitive MinCut with Strange Loop
155pub struct MetaCognitiveMinCut {
156    /// Level 0: Object graph being optimized
157    object_graph: DynamicGraph,
158    /// Level 1: SNN observing object graph statistics
159    observer_snn: SpikingNetwork,
160    /// Level 2: Meta-neurons modulating observer behavior
161    meta_neurons: Vec<MetaNeuron>,
162    /// Cross-level influence matrix
163    influence: CrossLevelInfluence,
164    /// Configuration
165    config: StrangeLoopConfig,
166    /// Current simulation time
167    time: SimTime,
168    /// History of mincut values
169    mincut_history: VecDeque<f64>,
170    /// History of actions taken
171    action_history: Vec<MetaAction>,
172}
173
174impl MetaCognitiveMinCut {
175    /// Create a new meta-cognitive mincut system
176    pub fn new(graph: DynamicGraph, config: StrangeLoopConfig) -> Self {
177        let n = graph.num_vertices();
178
179        // Level 1: Observer SNN
180        let observer_config = NetworkConfig {
181            layers: vec![LayerConfig::new(config.level1_size)],
182            ..NetworkConfig::default()
183        };
184        let observer_snn = SpikingNetwork::new(observer_config);
185
186        // Level 2: Meta-neurons
187        let meta_neurons: Vec<_> = (0..config.level2_size)
188            .map(|i| MetaNeuron::new(i, config.observation_window))
189            .collect();
190
191        // Initialize cross-level influence
192        let influence = CrossLevelInfluence {
193            l0_to_l1: vec![vec![0.1; config.level1_size]; n],
194            l1_to_l2: vec![vec![0.1; config.level2_size]; config.level1_size],
195            l2_to_l0: vec![vec![0.1; n]; config.level2_size],
196        };
197
198        let observation_window = config.observation_window;
199
200        Self {
201            object_graph: graph,
202            observer_snn,
203            meta_neurons,
204            influence,
205            config,
206            time: 0.0,
207            mincut_history: VecDeque::with_capacity(observation_window),
208            action_history: Vec::new(),
209        }
210    }
211
212    /// Encode graph state as spike pattern for Level 1
213    fn encode_graph_state(&self) -> Vec<f64> {
214        let vertices = self.object_graph.vertices();
215        let mut encoding = vec![0.0; self.config.level1_size];
216
217        for (i, v) in vertices.iter().enumerate() {
218            let degree = self.object_graph.degree(*v) as f64;
219            let weight_sum: f64 = self.object_graph.neighbors(*v)
220                .iter()
221                .filter_map(|(_, _)| Some(1.0))
222                .sum();
223
224            // Project to observer neurons
225            for j in 0..encoding.len() {
226                if i < self.influence.l0_to_l1.len() && j < self.influence.l0_to_l1[i].len() {
227                    encoding[j] += self.influence.l0_to_l1[i][j] * (degree + weight_sum);
228                }
229            }
230        }
231
232        encoding
233    }
234
235    /// Get population rate as observer summary
236    fn observer_summary(&self) -> f64 {
237        self.observer_snn.layer_rate(0, 100.0)
238    }
239
240    /// Find high-correlation pairs in observer SNN
241    fn high_correlation_pairs(&self, threshold: f64) -> Vec<(VertexId, VertexId)> {
242        let sync_matrix = self.observer_snn.synchrony_matrix();
243        let vertices = self.object_graph.vertices();
244        let mut pairs = Vec::new();
245
246        for i in 0..sync_matrix.len().min(vertices.len()) {
247            for j in (i + 1)..sync_matrix[i].len().min(vertices.len()) {
248                if sync_matrix[i][j] > threshold {
249                    pairs.push((vertices[i], vertices[j]));
250                }
251            }
252        }
253
254        pairs
255    }
256
257    /// Compute mincut contribution for each edge (simplified)
258    fn mincut_contribution(&self, edge: &crate::graph::Edge) -> f64 {
259        // Simplified: degree-based contribution
260        let src_degree = self.object_graph.degree(edge.source) as f64;
261        let tgt_degree = self.object_graph.degree(edge.target) as f64;
262
263        edge.weight / (src_degree + tgt_degree).max(1.0)
264    }
265
266    /// Rebuild graph from partition (simplified)
267    fn rebuild_from_partition(&mut self, vertices: &[VertexId]) {
268        // Keep only edges within the partition
269        let vertex_set: std::collections::HashSet<_> = vertices.iter().collect();
270
271        let edges_to_remove: Vec<_> = self.object_graph.edges()
272            .iter()
273            .filter(|e| !vertex_set.contains(&e.source) || !vertex_set.contains(&e.target))
274            .map(|e| (e.source, e.target))
275            .collect();
276
277        for (u, v) in edges_to_remove {
278            let _ = self.object_graph.delete_edge(u, v);
279        }
280    }
281
282    /// Execute one strange loop iteration
283    pub fn strange_loop_step(&mut self) -> MetaAction {
284        // Level 0 → Level 1: Encode graph state as spike patterns
285        let graph_state = self.encode_graph_state();
286        self.observer_snn.inject_current(&graph_state);
287
288        // Level 1 dynamics: Observer SNN processes graph state
289        let _observer_spikes = self.observer_snn.step();
290
291        // Level 1 → Level 2: Meta-neuron receives observer output
292        let observer_summary = self.observer_summary();
293
294        // Level 2 decision: Aggregate meta-neuron decisions
295        let mut actions = Vec::new();
296        for meta_neuron in &mut self.meta_neurons {
297            actions.push(meta_neuron.modulate(observer_summary));
298        }
299
300        // Select dominant action (simplified: first non-NoOp)
301        let action = actions.into_iter()
302            .find(|a| !matches!(a, MetaAction::NoOp))
303            .unwrap_or(MetaAction::NoOp);
304
305        // Level 2 → Level 0: Close the strange loop
306        match &action {
307            MetaAction::Strengthen(threshold) => {
308                // Add edges where observer activity is high
309                let hot_pairs = self.high_correlation_pairs(*threshold);
310                for (u, v) in hot_pairs {
311                    if !self.object_graph.has_edge(u, v) {
312                        let _ = self.object_graph.insert_edge(u, v, 1.0);
313                    } else {
314                        // Strengthen existing edge
315                        if let Some(edge) = self.object_graph.get_edge(u, v) {
316                            let _ = self.object_graph.update_edge_weight(u, v, edge.weight * 1.1);
317                        }
318                    }
319                }
320            }
321            MetaAction::Prune(threshold) => {
322                // Remove edges below mincut contribution threshold
323                let weak_edges: Vec<_> = self.object_graph.edges()
324                    .iter()
325                    .filter(|e| self.mincut_contribution(e) < *threshold)
326                    .map(|e| (e.source, e.target))
327                    .collect();
328
329                for (u, v) in weak_edges {
330                    let _ = self.object_graph.delete_edge(u, v);
331                }
332            }
333            MetaAction::Restructure => {
334                // Use largest connected component
335                let components = self.object_graph.connected_components();
336                if let Some(largest) = components.iter().max_by_key(|c| c.len()) {
337                    if largest.len() < self.object_graph.num_vertices() {
338                        self.rebuild_from_partition(largest);
339                    }
340                }
341            }
342            MetaAction::NoOp => {}
343        }
344
345        self.time += self.config.dt;
346        self.action_history.push(action.clone());
347
348        action
349    }
350
351    /// Get object graph
352    pub fn graph(&self) -> &DynamicGraph {
353        &self.object_graph
354    }
355
356    /// Get mutable object graph
357    pub fn graph_mut(&mut self) -> &mut DynamicGraph {
358        &mut self.object_graph
359    }
360
361    /// Get observer SNN
362    pub fn observer(&self) -> &SpikingNetwork {
363        &self.observer_snn
364    }
365
366    /// Get action history
367    pub fn action_history(&self) -> &[MetaAction] {
368        &self.action_history
369    }
370
371    /// Get meta-level state summary
372    pub fn level_summary(&self) -> (f64, f64, f64) {
373        let l0 = self.object_graph.num_edges() as f64;
374        let l1 = self.observer_summary();
375        let l2 = self.meta_neurons.iter()
376            .map(|m| m.state)
377            .sum::<f64>() / self.meta_neurons.len() as f64;
378
379        (l0, l1, l2)
380    }
381
382    /// Reset the system
383    pub fn reset(&mut self) {
384        self.observer_snn.reset();
385        for meta in &mut self.meta_neurons {
386            meta.reset();
387        }
388        self.time = 0.0;
389        self.mincut_history.clear();
390        self.action_history.clear();
391    }
392
393    /// Run multiple strange loop iterations
394    pub fn run(&mut self, steps: usize) -> Vec<MetaAction> {
395        let mut actions = Vec::new();
396        for _ in 0..steps {
397            actions.push(self.strange_loop_step());
398        }
399        actions
400    }
401}
402
403#[cfg(test)]
404mod tests {
405    use super::*;
406
407    #[test]
408    fn test_meta_neuron() {
409        let mut neuron = MetaNeuron::new(0, 10);
410
411        // Feed increasing summaries
412        for i in 0..15 {
413            let _ = neuron.modulate(0.1 * i as f64);
414        }
415
416        // Should have accumulated state
417        assert!(neuron.history.len() == 10);
418    }
419
420    #[test]
421    fn test_strange_loop_creation() {
422        let graph = DynamicGraph::new();
423        for i in 0..10 {
424            graph.insert_edge(i, (i + 1) % 10, 1.0).unwrap();
425        }
426
427        let config = StrangeLoopConfig::default();
428        let system = MetaCognitiveMinCut::new(graph, config);
429
430        let (l0, l1, l2) = system.level_summary();
431        assert!(l0 > 0.0);
432    }
433
434    #[test]
435    fn test_strange_loop_step() {
436        let graph = DynamicGraph::new();
437        for i in 0..10 {
438            for j in (i + 1)..10 {
439                graph.insert_edge(i, j, 1.0).unwrap();
440            }
441        }
442
443        let config = StrangeLoopConfig::default();
444        let mut system = MetaCognitiveMinCut::new(graph, config);
445
446        // Run a few steps
447        let actions = system.run(5);
448        assert_eq!(actions.len(), 5);
449    }
450}