eenn 0.1.0

A hybrid neural-symbolic constraint solver with cognitive reasoning capabilities
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
//! Self-Learning Lightning Strike
//!
//! This module implements a self-improving Lightning Strike engine that learns
//! from its own SMT solving experience, getting faster over time by training
//! a neural surrogate on successful constraint solutions.

use anyhow::Result;
use std::collections::{HashMap, VecDeque};
use std::sync::{Arc, Mutex};
use std::time::Instant;

// Import EENN neural components (no circular dependency here)
use crate::{Adam, Dataset, MSELoss, TrainableNeuron, Trainer, TrainingConfig, TrainingExample};

// Import theory core components
use theory_core::{
    CognitiveConfig, ConstraintIR, ConstraintLearner, Domain, LearningConfig,
    LightningStrikeEngine, MetaReasoningConfig, MetaReasoningEngine, OptimizationConfig,
    PerformanceOptimizer, SolutionResult, SolutionValue,
};

/// Configuration for self-learning Lightning Strike
#[derive(Debug, Clone)]
pub struct SelfLearningConfig {
    /// Lightning Strike cognitive configuration
    pub cognitive_config: CognitiveConfig,
    /// Maximum training examples to keep in memory
    pub max_training_examples: usize,
    /// Minimum examples before neural training starts  
    pub min_examples_for_training: usize,
    /// How often to retrain neural network (every N examples)
    pub retrain_frequency: usize,
    /// Neural network architecture layers
    pub network_layers: Vec<usize>,
    /// Neural training configuration
    pub training_config: TrainingConfig,
    /// Confidence threshold for using neural predictions
    pub confidence_threshold: f32,
    /// Path to save/load neural network weights
    pub neural_save_path: Option<std::path::PathBuf>,
    /// Enable verbose output (solver steps, meta-reasoning, etc.)
    pub verbose: bool,
}

impl Default for SelfLearningConfig {
    fn default() -> Self {
        Self {
            cognitive_config: CognitiveConfig::fastest(),
            max_training_examples: 1000,
            min_examples_for_training: 25,
            retrain_frequency: 10,
            network_layers: vec![6, 12, 8, 2], // Features -> hidden -> satisfiable + time
            training_config: TrainingConfig {
                epochs: 30,
                batch_size: 16,
                learning_rate: 0.001,
                validation_split: 0.2,
                early_stopping_patience: Some(8),
                verbose: false,
            },
            confidence_threshold: 0.25, // 25% confidence threshold - reasonable for production use
            neural_save_path: Some(std::path::PathBuf::from("neural_weights.json")),
            verbose: false, // Default to quiet operation
        }
    }
}

/// Training data for neural surrogate
#[derive(Debug, Clone)]
struct ConstraintTrainingData {
    features: Vec<f32>,
    satisfiable: bool,
    solving_time_ms: f32,
}

/// Self-learning Lightning Strike engine that improves over time
pub struct SelfLearningLightningStrike {
    /// Core Lightning Strike engine
    lightning_strike: LightningStrikeEngine,
    /// Neural network for fast constraint prediction
    neural_predictor: Arc<Mutex<TrainableNeuron>>,
    /// Training data from SMT solving experience
    training_data: Arc<Mutex<VecDeque<ConstraintTrainingData>>>,
    /// Configuration
    config: SelfLearningConfig,
    /// Training statistics
    examples_since_training: usize,
    /// Whether neural network has been trained
    is_neural_trained: bool,
    /// Performance statistics
    neural_predictions_used: usize,
    smt_fallbacks_used: usize,
}

impl SelfLearningLightningStrike {
    /// Create new self-learning Lightning Strike with default configuration
    pub fn new() -> Result<Self> {
        Self::with_config(SelfLearningConfig::default())
    }

    /// Create with custom configuration
    pub fn with_config(config: SelfLearningConfig) -> Result<Self> {
        // Create Lightning Strike engine
        let learner = ConstraintLearner::new(LearningConfig::default());
        let meta_reasoner = MetaReasoningEngine::new(MetaReasoningConfig::default(), learner);
        let optimizer = PerformanceOptimizer::new(OptimizationConfig::default());
        let lightning_strike =
            LightningStrikeEngine::new(config.cognitive_config.clone(), meta_reasoner, optimizer);

        // Create or load neural network from persistent storage
        let neural_predictor = if let Some(save_path) = &config.neural_save_path {
            TrainableNeuron::new_or_load(config.network_layers.clone(), save_path, config.verbose)
        } else {
            TrainableNeuron::new(config.network_layers.clone())
        };

        Ok(Self {
            lightning_strike,
            neural_predictor: Arc::new(Mutex::new(neural_predictor)),
            training_data: Arc::new(Mutex::new(VecDeque::new())),
            config,
            examples_since_training: 0,
            is_neural_trained: false,
            neural_predictions_used: 0,
            smt_fallbacks_used: 0,
        })
    }

    /// Solve constraint problem with self-learning capability
    pub fn solve_and_learn(
        &mut self,
        constraint_ir: &ConstraintIR,
        verbose: bool,
    ) -> Result<SolutionResult> {
        let start_time = Instant::now();

        // Try neural prediction first if trained and confident enough
        if self.is_neural_trained {
            if let Ok(neural_result) = self.try_neural_prediction(constraint_ir) {
                if neural_result.confidence >= self.config.confidence_threshold {
                    self.neural_predictions_used += 1;
                    return Ok(neural_result);
                }
            }
        }

        // Fall back to Lightning Strike SMT solving
        let smt_result = self.lightning_strike.solve(constraint_ir, verbose)?;
        let solving_duration = start_time.elapsed();

        self.smt_fallbacks_used += 1;

        // Learn from this SMT result
        self.learn_from_smt_result(
            constraint_ir,
            &smt_result,
            solving_duration.as_millis() as f32,
        )?;

        Ok(smt_result)
    }

    /// Try neural prediction (returns low-confidence result if untrained)
    fn try_neural_prediction(&self, constraint_ir: &ConstraintIR) -> Result<SolutionResult> {
        let features = self.extract_constraint_features(constraint_ir);
        let primary_input = features[0];

        let (satisfiable_pred, time_pred) = {
            let mut network = self.neural_predictor.lock().unwrap();
            let output = network.forward(primary_input);

            // Interpret output as satisfiability probability
            let satisfiable = output > 0.5;
            let estimated_time = if satisfiable {
                // Estimate based on complexity features
                (features[1] * features[2] * 20.0).max(5.0).min(500.0)
            } else {
                2.0 // Quick unsatisfiable detection
            };

            (satisfiable, estimated_time)
        };

        // Generate simple assignment if satisfiable
        let mut assignment = HashMap::new();
        if satisfiable_pred {
            for (i, var) in constraint_ir.variables.values().enumerate() {
                let var_key = format!("v{}", i);
                let value = match &var.domain {
                    Domain::Real { min, max } => {
                        let val = match (min, max) {
                            (Some(min_val), Some(max_val)) => (min_val + max_val) / 2.0,
                            (Some(min_val), None) => min_val + 1.0,
                            (None, Some(max_val)) => max_val - 1.0,
                            (None, None) => 0.0,
                        };
                        SolutionValue::Real(val)
                    }
                    Domain::Integer { min, max } => {
                        let val = match (min, max) {
                            (Some(min_val), Some(max_val)) => (min_val + max_val) / 2,
                            (Some(min_val), None) => min_val + 1,
                            (None, Some(max_val)) => max_val - 1,
                            (None, None) => 0,
                        };
                        SolutionValue::Integer(val)
                    }
                    Domain::Boolean => SolutionValue::Bool(true),
                    Domain::BitVector { width } => SolutionValue::BitVector {
                        value: 0,
                        width: *width,
                    },
                    Domain::String { .. } => SolutionValue::String("neural".to_string()),
                    Domain::Array { .. } => SolutionValue::String("array".to_string()),
                    Domain::Enum { values } => {
                        SolutionValue::String(values.first().cloned().unwrap_or_default())
                    }
                };
                assignment.insert(var_key, value);
            }
        }

        let confidence = if self.is_neural_trained {
            // Use accumulated experience across all runs instead of just current session
            let training_size = self.training_data.lock().unwrap().len();
            let total_experience = self.neural_predictions_used + self.smt_fallbacks_used;

            // Base confidence on total problems solved, not just current training batch
            let experience_confidence = (total_experience as f32 / 50.0).min(0.95); // 95% confidence after 50 problems
            let training_confidence = (training_size as f32 / 10.0).min(0.9); // 90% confidence after 10 training examples

            // Use the higher of the two confidences
            experience_confidence.max(training_confidence)
        } else {
            0.1
        };

        Ok(SolutionResult {
            satisfiable: satisfiable_pred,
            assignment,
            confidence,
            estimated_time_ms: time_pred as f64,
            method: "NeuralPredictor".to_string(),
            winning_branch_id: Some("NeuralPredictor".to_string()),
            winning_strategy: Some("Neural".to_string()),
            winning_backend: Some("CPU".to_string()),
            variable_ranges: None,
        })
    }

    /// Learn from SMT solver result
    fn learn_from_smt_result(
        &mut self,
        constraint_ir: &ConstraintIR,
        smt_result: &SolutionResult,
        solving_time_ms: f32,
    ) -> Result<()> {
        let features = self.extract_constraint_features(constraint_ir);
        let training_example = ConstraintTrainingData {
            features,
            satisfiable: smt_result.satisfiable,
            solving_time_ms,
        };

        // Add to training data
        {
            let mut data = self.training_data.lock().unwrap();
            data.push_back(training_example);
            if data.len() > self.config.max_training_examples {
                data.pop_front();
            }
        }

        // Check if we should retrain
        self.examples_since_training += 1;
        if self.examples_since_training >= self.config.retrain_frequency {
            let data_size = self.training_data.lock().unwrap().len();
            if data_size >= self.config.min_examples_for_training {
                self.retrain_neural_network()?;
                self.examples_since_training = 0;
            }
        }

        Ok(())
    }

    /// Extract numerical features from constraint IR
    fn extract_constraint_features(&self, constraint_ir: &ConstraintIR) -> Vec<f32> {
        let num_vars = constraint_ir.variables.len() as f32;
        let num_constraints = constraint_ir.constraints.len() as f32;
        let num_theories = constraint_ir.theory_tags.len() as f32;

        // Calculate domain complexity
        let avg_domain_size: f32 = constraint_ir
            .variables
            .values()
            .map(|var| match &var.domain {
                Domain::Real { min, max } => match (min, max) {
                    (Some(min_val), Some(max_val)) => (max_val - min_val) as f32,
                    _ => 100.0,
                },
                Domain::Integer { min, max } => match (min, max) {
                    (Some(min_val), Some(max_val)) => (max_val - min_val) as f32,
                    _ => 1000.0,
                },
                Domain::Boolean => 2.0,
                Domain::BitVector { width } => (*width as f32).exp2(),
                Domain::String { max_length } => max_length.unwrap_or(255) as f32,
                Domain::Array { .. } => 100.0,
                Domain::Enum { values } => values.len() as f32,
            })
            .sum::<f32>()
            / num_vars.max(1.0);

        vec![
            num_vars.ln(),                              // Log variable count
            num_constraints.ln(),                       // Log constraint count
            (num_constraints / num_vars.max(1.0)).ln(), // Constraint density
            avg_domain_size.ln(),                       // Log domain complexity
            num_theories,                               // Theory diversity
            (num_vars * num_constraints).ln(),          // Problem size estimate
        ]
    }

    /// Retrain the neural network on accumulated training data
    fn retrain_neural_network(&mut self) -> Result<()> {
        let training_data = {
            let data = self.training_data.lock().unwrap();
            data.clone().into_iter().collect::<Vec<_>>()
        };

        if training_data.is_empty() {
            return Ok(());
        }

        // Prepare training examples for satisfiability prediction
        let sat_examples: Vec<TrainingExample> = training_data
            .iter()
            .map(|example| TrainingExample {
                input: example.features[0], // Use primary feature
                target: if example.satisfiable { 1.0 } else { 0.0 },
            })
            .collect();

        if sat_examples.is_empty() {
            return Ok(());
        }

        // Train neural network
        {
            let mut network = self.neural_predictor.lock().unwrap();
            let dataset = Dataset::new(sat_examples);
            let loss_fn = MSELoss;
            let mut optimizer = Adam::new(self.config.training_config.learning_rate);
            let trainer = Trainer::new(self.config.training_config.clone());

            let _history = trainer.train(&mut *network, &dataset, &loss_fn, &mut optimizer);
            self.is_neural_trained = true;
        }

        if self.config.verbose {
            println!(
                "🧠 Neural surrogate retrained on {} examples",
                training_data.len()
            );
        }

        Ok(())
    }

    /// Get performance statistics
    pub fn get_performance_stats(&self) -> PerformanceStats {
        let training_examples = self.training_data.lock().unwrap().len();
        let total_solves = self.neural_predictions_used + self.smt_fallbacks_used;

        PerformanceStats {
            total_problems_solved: total_solves,
            neural_predictions_used: self.neural_predictions_used,
            smt_fallbacks_used: self.smt_fallbacks_used,
            neural_success_rate: if total_solves > 0 {
                self.neural_predictions_used as f32 / total_solves as f32
            } else {
                0.0
            },
            training_examples_collected: training_examples,
            is_neural_trained: self.is_neural_trained,
        }
    }

    /// Force retrain neural network
    pub fn force_retrain(&mut self) -> Result<()> {
        self.retrain_neural_network()
    }
}

/// Performance statistics for self-learning engine
#[derive(Debug, Clone)]
pub struct PerformanceStats {
    pub total_problems_solved: usize,
    pub neural_predictions_used: usize,
    pub smt_fallbacks_used: usize,
    pub neural_success_rate: f32,
    pub training_examples_collected: usize,
    pub is_neural_trained: bool,
}

impl PerformanceStats {
    pub fn summary(&self) -> String {
        if self.is_neural_trained {
            format!(
                "Solved {} problems ({:.1}% neural, {:.1}% SMT) - Trained on {} examples",
                self.total_problems_solved,
                self.neural_success_rate * 100.0,
                (1.0 - self.neural_success_rate) * 100.0,
                self.training_examples_collected
            )
        } else {
            format!(
                "Collecting training data: {}/{} examples needed",
                self.training_examples_collected,
                25 // min_examples_for_training default
            )
        }
    }
}

impl SelfLearningLightningStrike {
    /// Manually save neural network weights to persistent storage
    pub fn save_neural_weights(&self) -> Result<()> {
        if let Some(save_path) = &self.config.neural_save_path {
            let network = self.neural_predictor.lock().unwrap();
            network
                .save_to_file(save_path)
                .map_err(|e| anyhow::anyhow!("Failed to save neural weights: {}", e))?;
            if self.config.verbose {
                println!("💾 Neural network weights saved to {:?}", save_path);
            }
        }
        Ok(())
    }
}

impl Drop for SelfLearningLightningStrike {
    fn drop(&mut self) {
        // Auto-save neural weights when the engine is destroyed
        if let Err(e) = self.save_neural_weights() {
            eprintln!("Warning: Failed to auto-save neural weights: {}", e);
        }
    }
}