use super::{
network::{LayerConfig, NetworkConfig, SpikingNetwork},
neuron::{LIFNeuron, NeuronConfig, NeuronPopulation},
SimTime, Spike,
};
use crate::graph::{DynamicGraph, VertexId};
use std::collections::VecDeque;
#[derive(Debug, Clone)]
pub struct StrangeLoopConfig {
pub level0_size: usize,
pub level1_size: usize,
pub level2_size: usize,
pub dt: f64,
pub strengthen_threshold: f64,
pub prune_threshold: f64,
pub prune_weight_threshold: f64,
pub observation_window: usize,
}
impl Default for StrangeLoopConfig {
fn default() -> Self {
Self {
level0_size: 100,
level1_size: 20,
level2_size: 5,
dt: 1.0,
strengthen_threshold: 0.7,
prune_threshold: 0.3,
prune_weight_threshold: 0.1,
observation_window: 100,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum MetaLevel {
Object,
Observer,
Meta,
}
#[derive(Debug, Clone)]
pub enum MetaAction {
Strengthen(f64),
Prune(f64),
Restructure,
NoOp,
}
#[derive(Debug, Clone)]
pub struct CrossLevelInfluence {
pub l0_to_l1: Vec<Vec<f64>>,
pub l1_to_l2: Vec<Vec<f64>>,
pub l2_to_l0: Vec<Vec<f64>>,
}
#[derive(Debug, Clone)]
pub struct MetaNeuron {
pub id: usize,
pub state: f64,
pub threshold: f64,
history: VecDeque<f64>,
window: usize,
}
impl MetaNeuron {
pub fn new(id: usize, window: usize) -> Self {
Self {
id,
state: 0.0,
threshold: 0.5,
history: VecDeque::with_capacity(window),
window,
}
}
pub fn modulate(&mut self, observer_summary: f64) -> MetaAction {
self.history.push_back(observer_summary);
if self.history.len() > self.window {
self.history.pop_front();
}
let mean: f64 = self.history.iter().sum::<f64>() / self.history.len() as f64;
let recent_mean: f64 = self.history.iter().rev().take(10).sum::<f64>()
/ 10.0f64.min(self.history.len() as f64);
self.state = recent_mean - mean;
if self.state > self.threshold {
MetaAction::Strengthen(observer_summary)
} else if self.state < -self.threshold {
MetaAction::Prune(observer_summary.abs())
} else if observer_summary.abs() > 2.0 * self.threshold {
MetaAction::Restructure
} else {
MetaAction::NoOp
}
}
pub fn reset(&mut self) {
self.state = 0.0;
self.history.clear();
}
}
pub struct MetaCognitiveMinCut {
object_graph: DynamicGraph,
observer_snn: SpikingNetwork,
meta_neurons: Vec<MetaNeuron>,
influence: CrossLevelInfluence,
config: StrangeLoopConfig,
time: SimTime,
mincut_history: VecDeque<f64>,
action_history: Vec<MetaAction>,
}
impl MetaCognitiveMinCut {
pub fn new(graph: DynamicGraph, config: StrangeLoopConfig) -> Self {
let n = graph.num_vertices();
let observer_config = NetworkConfig {
layers: vec![LayerConfig::new(config.level1_size)],
..NetworkConfig::default()
};
let observer_snn = SpikingNetwork::new(observer_config);
let meta_neurons: Vec<_> = (0..config.level2_size)
.map(|i| MetaNeuron::new(i, config.observation_window))
.collect();
let influence = CrossLevelInfluence {
l0_to_l1: vec![vec![0.1; config.level1_size]; n],
l1_to_l2: vec![vec![0.1; config.level2_size]; config.level1_size],
l2_to_l0: vec![vec![0.1; n]; config.level2_size],
};
let observation_window = config.observation_window;
Self {
object_graph: graph,
observer_snn,
meta_neurons,
influence,
config,
time: 0.0,
mincut_history: VecDeque::with_capacity(observation_window),
action_history: Vec::new(),
}
}
fn encode_graph_state(&self) -> Vec<f64> {
let vertices = self.object_graph.vertices();
let mut encoding = vec![0.0; self.config.level1_size];
for (i, v) in vertices.iter().enumerate() {
let degree = self.object_graph.degree(*v) as f64;
let weight_sum: f64 = self
.object_graph
.neighbors(*v)
.iter()
.filter_map(|(_, _)| Some(1.0))
.sum();
for j in 0..encoding.len() {
if i < self.influence.l0_to_l1.len() && j < self.influence.l0_to_l1[i].len() {
encoding[j] += self.influence.l0_to_l1[i][j] * (degree + weight_sum);
}
}
}
encoding
}
fn observer_summary(&self) -> f64 {
self.observer_snn.layer_rate(0, 100.0)
}
fn high_correlation_pairs(&self, threshold: f64) -> Vec<(VertexId, VertexId)> {
let sync_matrix = self.observer_snn.synchrony_matrix();
let vertices = self.object_graph.vertices();
let mut pairs = Vec::new();
for i in 0..sync_matrix.len().min(vertices.len()) {
for j in (i + 1)..sync_matrix[i].len().min(vertices.len()) {
if sync_matrix[i][j] > threshold {
pairs.push((vertices[i], vertices[j]));
}
}
}
pairs
}
fn mincut_contribution(&self, edge: &crate::graph::Edge) -> f64 {
let src_degree = self.object_graph.degree(edge.source) as f64;
let tgt_degree = self.object_graph.degree(edge.target) as f64;
edge.weight / (src_degree + tgt_degree).max(1.0)
}
fn rebuild_from_partition(&mut self, vertices: &[VertexId]) {
let vertex_set: std::collections::HashSet<_> = vertices.iter().collect();
let edges_to_remove: Vec<_> = self
.object_graph
.edges()
.iter()
.filter(|e| !vertex_set.contains(&e.source) || !vertex_set.contains(&e.target))
.map(|e| (e.source, e.target))
.collect();
for (u, v) in edges_to_remove {
let _ = self.object_graph.delete_edge(u, v);
}
}
pub fn strange_loop_step(&mut self) -> MetaAction {
let graph_state = self.encode_graph_state();
self.observer_snn.inject_current(&graph_state);
let _observer_spikes = self.observer_snn.step();
let observer_summary = self.observer_summary();
let mut actions = Vec::new();
for meta_neuron in &mut self.meta_neurons {
actions.push(meta_neuron.modulate(observer_summary));
}
let action = actions
.into_iter()
.find(|a| !matches!(a, MetaAction::NoOp))
.unwrap_or(MetaAction::NoOp);
match &action {
MetaAction::Strengthen(threshold) => {
let hot_pairs = self.high_correlation_pairs(*threshold);
for (u, v) in hot_pairs {
if !self.object_graph.has_edge(u, v) {
let _ = self.object_graph.insert_edge(u, v, 1.0);
} else {
if let Some(edge) = self.object_graph.get_edge(u, v) {
let _ = self
.object_graph
.update_edge_weight(u, v, edge.weight * 1.1);
}
}
}
}
MetaAction::Prune(threshold) => {
let weak_edges: Vec<_> = self
.object_graph
.edges()
.iter()
.filter(|e| self.mincut_contribution(e) < *threshold)
.map(|e| (e.source, e.target))
.collect();
for (u, v) in weak_edges {
let _ = self.object_graph.delete_edge(u, v);
}
}
MetaAction::Restructure => {
let components = self.object_graph.connected_components();
if let Some(largest) = components.iter().max_by_key(|c| c.len()) {
if largest.len() < self.object_graph.num_vertices() {
self.rebuild_from_partition(largest);
}
}
}
MetaAction::NoOp => {}
}
self.time += self.config.dt;
self.action_history.push(action.clone());
action
}
pub fn graph(&self) -> &DynamicGraph {
&self.object_graph
}
pub fn graph_mut(&mut self) -> &mut DynamicGraph {
&mut self.object_graph
}
pub fn observer(&self) -> &SpikingNetwork {
&self.observer_snn
}
pub fn action_history(&self) -> &[MetaAction] {
&self.action_history
}
pub fn level_summary(&self) -> (f64, f64, f64) {
let l0 = self.object_graph.num_edges() as f64;
let l1 = self.observer_summary();
let l2 =
self.meta_neurons.iter().map(|m| m.state).sum::<f64>() / self.meta_neurons.len() as f64;
(l0, l1, l2)
}
pub fn reset(&mut self) {
self.observer_snn.reset();
for meta in &mut self.meta_neurons {
meta.reset();
}
self.time = 0.0;
self.mincut_history.clear();
self.action_history.clear();
}
pub fn run(&mut self, steps: usize) -> Vec<MetaAction> {
let mut actions = Vec::new();
for _ in 0..steps {
actions.push(self.strange_loop_step());
}
actions
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_meta_neuron() {
let mut neuron = MetaNeuron::new(0, 10);
for i in 0..15 {
let _ = neuron.modulate(0.1 * i as f64);
}
assert!(neuron.history.len() == 10);
}
#[test]
fn test_strange_loop_creation() {
let graph = DynamicGraph::new();
for i in 0..10 {
graph.insert_edge(i, (i + 1) % 10, 1.0).unwrap();
}
let config = StrangeLoopConfig::default();
let system = MetaCognitiveMinCut::new(graph, config);
let (l0, l1, l2) = system.level_summary();
assert!(l0 > 0.0);
}
#[test]
fn test_strange_loop_step() {
let graph = DynamicGraph::new();
for i in 0..10 {
for j in (i + 1)..10 {
graph.insert_edge(i, j, 1.0).unwrap();
}
}
let config = StrangeLoopConfig::default();
let mut system = MetaCognitiveMinCut::new(graph, config);
let actions = system.run(5);
assert_eq!(actions.len(), 5);
}
}