Skip to main content

st/mem8/
consciousness.rs

1//! Consciousness simulation framework for MEM8
2//! Implements awareness, attention allocation, and sensory arbitration
3
4use crate::mem8::wave::{FrequencyBand, MemoryWave, WaveGrid};
5use std::collections::HashMap;
6use std::sync::{Arc, RwLock};
7use std::time::{Duration, Instant};
8
9/// Consciousness state at time t
10pub struct ConsciousnessState {
11    /// Current attention weights for different memory regions
12    pub attention_weights: HashMap<MemoryRegion, f32>,
13    /// Active memories in consciousness
14    pub active_memories: Vec<Arc<MemoryWave>>,
15    /// Reflexive response components
16    pub reflexive_responses: Vec<ReflexiveComponent>,
17    /// Current awareness level (0.0 to 1.0)
18    pub awareness_level: f32,
19    /// Last update timestamp
20    pub last_update: Instant,
21}
22
23impl Default for ConsciousnessState {
24    fn default() -> Self {
25        Self::new()
26    }
27}
28
29impl ConsciousnessState {
30    pub fn new() -> Self {
31        Self {
32            attention_weights: HashMap::new(),
33            active_memories: Vec::new(),
34            reflexive_responses: Vec::new(),
35            awareness_level: 0.5,
36            last_update: Instant::now(),
37        }
38    }
39
40    /// Update consciousness state with new memories and responses
41    pub fn update(&mut self, memories: Vec<Arc<MemoryWave>>, responses: Vec<ReflexiveComponent>) {
42        self.active_memories = memories;
43        self.reflexive_responses = responses;
44        self.last_update = Instant::now();
45
46        // Update awareness based on activity
47        self.awareness_level = self.calculate_awareness();
48    }
49
50    /// Calculate current awareness level based on activity
51    fn calculate_awareness(&self) -> f32 {
52        let memory_activity = (self.active_memories.len() as f32 / 100.0).min(1.0);
53        let attention_focus = self.attention_weights.values().sum::<f32>()
54            / self.attention_weights.len().max(1) as f32;
55
56        (memory_activity + attention_focus) / 2.0
57    }
58}
59
60/// Memory region identifiers for attention allocation
61#[derive(Debug, Clone, Hash, Eq, PartialEq)]
62pub enum MemoryRegion {
63    Visual(u8, u8),    // x, y coordinates
64    Auditory(u16),     // frequency band
65    Temporal(u16),     // time layer (z-axis)
66    Semantic(String),  // semantic category
67    Emotional(String), // emotional category
68}
69
70/// Reflexive response component
71#[derive(Clone)]
72pub struct ReflexiveComponent {
73    pub trigger: String,
74    pub response: String,
75    pub strength: f32,
76}
77
78/// Multi-grid sensor architecture
79pub struct SensorGrid {
80    /// Grid identifier
81    pub id: String,
82    /// Grid type (e.g., "color_r", "motion_h", "edge_0")
83    pub grid_type: SensorGridType,
84    /// The wave grid itself
85    pub grid: Arc<RwLock<WaveGrid>>,
86    /// Temporal blanket configuration
87    pub temporal_blanket: TemporalBlanket,
88}
89
90/// Types of sensor grids
91#[derive(Debug, Clone)]
92pub enum SensorGridType {
93    // Visual grids (10-15 per eye)
94    ColorChannel(ColorChannel),
95    Motion(MotionDirection),
96    EdgeDetection(u16), // Angle in degrees
97    Depth,
98    Saliency,
99    Luminance,
100
101    // Audio grids
102    FrequencyBand(f32, f32), // Min, max frequency
103    Amplitude,
104    Phase,
105
106    // Other modalities
107    Temporal,
108    Context,
109    Semantic,
110}
111
112#[derive(Debug, Clone)]
113pub enum ColorChannel {
114    Red,
115    Green,
116    Blue,
117}
118
119#[derive(Debug, Clone)]
120pub enum MotionDirection {
121    Horizontal,
122    Vertical,
123}
124
125/// Temporal blanket for environmental adaptation
126pub struct TemporalBlanket {
127    /// Interest-based adjustment factor
128    pub alpha: f32,
129    /// Attention-based decay rate
130    pub lambda: f32,
131    /// Environmental calibration
132    pub beta_calib: f32,
133    /// Hard blankets (fixed calibration patterns)
134    pub hard_blankets: Vec<CalibrationPattern>,
135    /// Soft blankets (adaptive filters)
136    pub soft_blankets: Vec<AdaptiveFilter>,
137}
138
139impl Default for TemporalBlanket {
140    fn default() -> Self {
141        Self::new()
142    }
143}
144
145impl TemporalBlanket {
146    pub fn new() -> Self {
147        Self {
148            alpha: 1.0,
149            lambda: 0.1,
150            beta_calib: 0.0,
151            hard_blankets: Vec::new(),
152            soft_blankets: Vec::new(),
153        }
154    }
155
156    /// Calculate blanket value at time t
157    pub fn calculate(&self, t: f32, interest: f32) -> f32 {
158        self.alpha * (-self.lambda * interest * t).exp() + self.beta_calib
159    }
160
161    /// Apply environmental adaptation
162    pub fn adapt_to_environment(&mut self, env_changes: &[(String, f32)]) {
163        let mut delta_sum = 0.0;
164
165        for (change_type, magnitude) in env_changes {
166            let weight = match change_type.as_str() {
167                "lighting" => 0.4,
168                "motion" => 0.3,
169                "noise" => 0.2,
170                _ => 0.1,
171            };
172            delta_sum += weight * magnitude;
173        }
174
175        self.beta_calib = self.beta_calib * 0.9 + delta_sum * 0.1;
176    }
177}
178
179#[derive(Clone)]
180pub struct CalibrationPattern {
181    pub name: String,
182    pub pattern: Vec<f32>,
183}
184
185#[derive(Clone)]
186pub struct AdaptiveFilter {
187    pub name: String,
188    pub strength: f32,
189    pub adaptation_rate: f32,
190}
191
192/// Sensor arbitration system with human-AI control
193pub struct SensorArbitrator {
194    /// Human control weight (0.0 to 1.0)
195    pub human_weight: f32,
196    /// AI control weight (0.0 to 1.0)
197    pub ai_weight: f32,
198    /// Sensor grids
199    pub sensor_grids: HashMap<String, SensorGrid>,
200    /// AI interest weights
201    pub ai_interests: HashMap<String, f32>,
202    /// Subconscious influence weights
203    pub subconscious_weights: HashMap<String, f32>,
204}
205
206impl SensorArbitrator {
207    pub fn new(human_weight: f32, ai_weight: f32) -> Self {
208        assert!(
209            (human_weight + ai_weight - 1.0).abs() < 0.001,
210            "Weights must sum to 1.0"
211        );
212
213        Self {
214            human_weight,
215            ai_weight,
216            sensor_grids: HashMap::new(),
217            ai_interests: HashMap::new(),
218            subconscious_weights: HashMap::new(),
219        }
220    }
221
222    /// Calculate weighted sensor output
223    pub fn arbitrate(&self, _sensor_id: &str, human_value: f32, ai_value: f32) -> f32 {
224        self.human_weight * human_value + self.ai_weight * ai_value
225    }
226
227    /// Calculate weighted interest for a sensor
228    pub fn calculate_weighted_interest(&self, sensor_id: &str, base_interest: f32) -> f32 {
229        let subconscious_weight = self.subconscious_weights.get(sensor_id).unwrap_or(&0.0);
230        let ai_weight = self.ai_interests.get(sensor_id).unwrap_or(&0.0);
231
232        base_interest + 0.3 * base_interest * subconscious_weight + 0.7 * base_interest * ai_weight
233    }
234
235    /// Check if AI can override noise floor
236    pub fn should_process(&self, sensor_id: &str, signal_strength: f32, noise_floor: f32) -> bool {
237        let ai_weight = self.ai_interests.get(sensor_id).unwrap_or(&0.0);
238
239        // AI override when weight > 0.8
240        if *ai_weight > 0.8 {
241            return true;
242        }
243
244        // Normal processing
245        let weighted_interest = self.calculate_weighted_interest(sensor_id, signal_strength);
246        weighted_interest > noise_floor
247    }
248}
249
250/// Consciousness simulation engine
251pub struct ConsciousnessEngine {
252    /// Wave grid for memory storage
253    pub wave_grid: Arc<RwLock<WaveGrid>>,
254    /// Current consciousness state
255    pub state: RwLock<ConsciousnessState>,
256    /// Sensor arbitrator
257    pub arbitrator: SensorArbitrator,
258    /// Attention allocation strategy
259    pub attention_strategy: AttentionStrategy,
260}
261
262impl ConsciousnessEngine {
263    pub fn new(wave_grid: Arc<RwLock<WaveGrid>>) -> Self {
264        Self {
265            wave_grid,
266            state: RwLock::new(ConsciousnessState::new()),
267            arbitrator: SensorArbitrator::new(0.3, 0.7), // 30% human, 70% AI control
268            attention_strategy: AttentionStrategy::default(),
269        }
270    }
271
272    /// Update consciousness state based on current memories
273    pub fn update(&self) {
274        let grid = self.wave_grid.read().unwrap();
275        let mut state = self.state.write().unwrap();
276
277        // Collect active memories based on attention
278        let active_memories = self.collect_active_memories(&grid);
279
280        // Update attention weights
281        self.update_attention_weights(&mut state, &active_memories);
282
283        // Generate reflexive responses
284        let reflexive = self.generate_reflexive_responses(&active_memories);
285
286        state.update(active_memories, reflexive);
287    }
288
289    /// Collect memories that are currently active in consciousness
290    fn collect_active_memories(&self, grid: &WaveGrid) -> Vec<Arc<MemoryWave>> {
291        let mut active = Vec::new();
292        let attention_threshold = 0.3;
293
294        // Sample based on attention weights
295        let state = self.state.read().unwrap();
296
297        for (region, &weight) in &state.attention_weights {
298            if weight > attention_threshold {
299                // Sample memories from this region
300                match region {
301                    MemoryRegion::Visual(x, y) => {
302                        // Sample around visual coordinates
303                        for z in 0..100 {
304                            if let Some(wave) = grid.get(*x, *y, z) {
305                                if wave.calculate_decay() > 0.1 {
306                                    active.push(wave.clone());
307                                }
308                            }
309                        }
310                    }
311                    MemoryRegion::Temporal(z) => {
312                        // Sample from temporal layer
313                        for x in 0..16 {
314                            for y in 0..16 {
315                                if let Some(wave) = grid.get(x * 16, y * 16, *z) {
316                                    if wave.calculate_decay() > 0.1 {
317                                        active.push(wave.clone());
318                                    }
319                                }
320                            }
321                        }
322                    }
323                    _ => {} // Handle other regions
324                }
325            }
326        }
327
328        active
329    }
330
331    /// Update attention weights based on current activity
332    fn update_attention_weights(
333        &self,
334        state: &mut ConsciousnessState,
335        memories: &[Arc<MemoryWave>],
336    ) {
337        // Decay existing weights
338        for weight in state.attention_weights.values_mut() {
339            *weight *= 0.95;
340        }
341
342        // Boost weights for active memory regions
343        for memory in memories {
344            // Determine region based on frequency
345            let band = FrequencyBand::from_frequency(memory.frequency);
346            let region = match band {
347                FrequencyBand::Delta => MemoryRegion::Semantic("delta_deep".to_string()),
348                FrequencyBand::Theta => MemoryRegion::Semantic("theta_integration".to_string()),
349                FrequencyBand::Alpha => MemoryRegion::Semantic("alpha_flow".to_string()),
350                FrequencyBand::Beta => MemoryRegion::Semantic("beta_active".to_string()),
351                FrequencyBand::Gamma => MemoryRegion::Semantic("gamma_binding".to_string()),
352                FrequencyBand::HyperGamma => MemoryRegion::Semantic("hypergamma_peak".to_string()),
353                // Legacy mappings
354                FrequencyBand::DeepStructural => MemoryRegion::Semantic("structural".to_string()),
355                FrequencyBand::Conversational => {
356                    MemoryRegion::Semantic("conversational".to_string())
357                }
358                FrequencyBand::Technical => MemoryRegion::Semantic("technical".to_string()),
359                FrequencyBand::Implementation => {
360                    MemoryRegion::Semantic("implementation".to_string())
361                }
362                FrequencyBand::Abstract => MemoryRegion::Semantic("abstract".to_string()),
363            };
364
365            *state.attention_weights.entry(region).or_insert(0.0) += 0.1;
366        }
367
368        // Normalize weights
369        let sum: f32 = state.attention_weights.values().sum();
370        if sum > 0.0 {
371            for weight in state.attention_weights.values_mut() {
372                *weight /= sum;
373            }
374        }
375    }
376
377    /// Generate reflexive responses based on active memories
378    fn generate_reflexive_responses(
379        &self,
380        memories: &[Arc<MemoryWave>],
381    ) -> Vec<ReflexiveComponent> {
382        let mut responses = Vec::new();
383
384        for memory in memories {
385            // High arousal memories trigger reflexive responses
386            if memory.arousal > 0.7 {
387                responses.push(ReflexiveComponent {
388                    trigger: format!("High arousal memory ({}Hz)", memory.frequency),
389                    response: "Heightened attention".to_string(),
390                    strength: memory.arousal,
391                });
392            }
393
394            // Negative valence with high amplitude
395            if memory.valence < -0.5 && memory.amplitude > 0.8 {
396                responses.push(ReflexiveComponent {
397                    trigger: "Negative high-amplitude memory".to_string(),
398                    response: "Defensive stance".to_string(),
399                    strength: memory.amplitude * memory.valence.abs(),
400                });
401            }
402        }
403
404        responses
405    }
406}
407
408/// Attention allocation strategies
409#[derive(Debug, Clone, Default)]
410pub enum AttentionStrategy {
411    /// Focus on high-amplitude memories
412    AmplitudeBased,
413    /// Focus on emotionally salient memories
414    EmotionBased,
415    /// Focus on novel/unfamiliar patterns
416    NoveltyBased,
417    /// Balanced across all factors
418    #[default]
419    Balanced,
420}
421
422/// Subliminal forgetting processor
423pub struct ForgettingProcessor {
424    /// Processing frequency (Hz)
425    pub frequency: f32,
426    /// Forgetting curves
427    pub curves: HashMap<String, ForgetCurve>,
428}
429
430impl Default for ForgettingProcessor {
431    fn default() -> Self {
432        Self::new()
433    }
434}
435
436impl ForgettingProcessor {
437    pub fn new() -> Self {
438        let mut curves = HashMap::new();
439
440        // Define standard forgetting curves
441        curves.insert(
442            "flash".to_string(),
443            ForgetCurve::Flash(Duration::from_millis(500)),
444        );
445        curves.insert(
446            "fade".to_string(),
447            ForgetCurve::Fade(Duration::from_secs(5)),
448        );
449        curves.insert(
450            "linger".to_string(),
451            ForgetCurve::Linger(Duration::from_secs(30)),
452        );
453        curves.insert(
454            "persist".to_string(),
455            ForgetCurve::Persist(Duration::from_secs(300)),
456        );
457        curves.insert("consolidate".to_string(), ForgetCurve::Consolidate);
458
459        Self {
460            frequency: 100.0, // 100Hz processing
461            curves,
462        }
463    }
464
465    /// Process memory for context-aware forgetting
466    pub fn process(&self, _memory: &mut MemoryWave, context: &str) -> ForgetCurve {
467        match context {
468            "transient_detail" => ForgetCurve::Flash(Duration::from_millis(500)),
469            "resolved_threat" => ForgetCurve::Fade(Duration::from_secs(5)),
470            "familiar_anomaly" => ForgetCurve::Linger(Duration::from_secs(30)),
471            "actionable_info" => ForgetCurve::Persist(Duration::from_secs(300)),
472            "learned_pattern" => ForgetCurve::Consolidate,
473            _ => ForgetCurve::Fade(Duration::from_secs(10)),
474        }
475    }
476}
477
478/// Forgetting curve types
479#[derive(Debug, Clone)]
480pub enum ForgetCurve {
481    Flash(Duration),   // Very short retention
482    Fade(Duration),    // Quick fade
483    Linger(Duration),  // Medium retention
484    Persist(Duration), // Long retention
485    Consolidate,       // Permanent memory
486}
487
488#[cfg(test)]
489mod tests {
490    use super::*;
491
492    #[test]
493    fn test_consciousness_state() {
494        let mut state = ConsciousnessState::new();
495        assert_eq!(state.awareness_level, 0.5);
496
497        // Add some attention weights
498        state
499            .attention_weights
500            .insert(MemoryRegion::Visual(128, 128), 0.8);
501        state
502            .attention_weights
503            .insert(MemoryRegion::Temporal(1000), 0.6);
504
505        // Update with active memories (need enough to push awareness above 0.5)
506        let waves: Vec<Arc<MemoryWave>> = (0..60)
507            .map(|i| Arc::new(MemoryWave::new(440.0 + i as f32, 0.8)))
508            .collect();
509        state.update(waves, vec![]);
510
511        assert!(state.awareness_level > 0.5);
512    }
513
514    #[test]
515    fn test_sensor_arbitration() {
516        let arbitrator = SensorArbitrator::new(0.3, 0.7);
517
518        let human_value = 0.5;
519        let ai_value = 0.8;
520
521        let result = arbitrator.arbitrate("test_sensor", human_value, ai_value);
522        assert!((result - (0.3 * 0.5 + 0.7 * 0.8)).abs() < 0.001);
523    }
524
525    #[test]
526    fn test_ai_override() {
527        let mut arbitrator = SensorArbitrator::new(0.3, 0.7);
528        arbitrator
529            .ai_interests
530            .insert("critical_sensor".to_string(), 0.9);
531
532        // AI override should process even below noise floor
533        assert!(arbitrator.should_process("critical_sensor", 0.05, 0.1));
534    }
535}