Skip to main content

scirs2_datasets/
neuromorphic_data_processor.rs

1//! Neuromorphic Data Processing Engine
2//!
3//! This module provides bio-inspired neuromorphic computing capabilities for
4//! advanced dataset processing, featuring spiking neural networks, synaptic
5//! plasticity, and brain-inspired learning algorithms.
6
7use crate::error::{DatasetsError, Result};
8use crate::utils::Dataset;
9use scirs2_core::ndarray::{s, Array1, Array2, Array3};
10use scirs2_core::random::prelude::*;
11use scirs2_core::random::rand_distributions::Uniform;
12use statrs::statistics::Statistics;
13use std::time::{Duration, Instant};
14
15/// Neuromorphic data processor using spiking neural networks
16#[derive(Debug, Clone)]
17pub struct NeuromorphicProcessor {
18    /// Network topology configuration
19    network_config: NetworkTopology,
20    /// Synaptic plasticity parameters
21    plasticity_config: SynapticPlasticity,
22    /// Spike timing dependent plasticity (STDP) enabled
23    stdp_enabled: bool,
24    /// Membrane potential decay rate
25    membrane_decay: f64,
26    /// Spike threshold voltage
27    spike_threshold: f64,
28    /// Learning rate for synaptic updates
29    learning_rate: f64,
30}
31
32/// Network topology configuration for neuromorphic processing
33#[derive(Debug, Clone)]
34pub struct NetworkTopology {
35    /// Number of input neurons
36    pub input_neurons: usize,
37    /// Number of hidden layer neurons
38    pub hidden_neurons: usize,
39    /// Number of output neurons
40    pub output_neurons: usize,
41    /// Connection probability between layers
42    pub connection_probability: f64,
43    /// Enable recurrent connections
44    pub recurrent_connections: bool,
45}
46
47/// Synaptic plasticity configuration
48#[derive(Debug, Clone)]
49pub struct SynapticPlasticity {
50    /// Hebbian learning strength
51    pub hebbian_strength: f64,
52    /// Anti-Hebbian learning strength  
53    pub anti_hebbian_strength: f64,
54    /// Synaptic weight decay rate
55    pub weight_decay: f64,
56    /// Maximum synaptic weight
57    pub max_weight: f64,
58    /// Minimum synaptic weight
59    pub min_weight: f64,
60}
61
62/// Spiking neuron state
63#[derive(Debug, Clone)]
64#[allow(dead_code)]
65struct NeuronState {
66    /// Current membrane potential
67    membrane_potential: f64,
68    /// Last spike time
69    last_spike_time: Option<Instant>,
70    /// Refractory period remaining
71    refractory_time: Duration,
72    /// Adaptive threshold
73    adaptive_threshold: f64,
74}
75
76/// Synaptic connection with STDP
77#[derive(Debug, Clone)]
78#[allow(dead_code)]
79struct Synapse {
80    /// Synaptic weight
81    weight: f64,
82    /// Pre-synaptic neuron index
83    pre_neuron: usize,
84    /// Post-synaptic neuron index
85    post_neuron: usize,
86    /// Synaptic delay
87    delay: Duration,
88    /// Spike trace for STDP
89    spike_trace: f64,
90}
91
92/// Neuromorphic dataset transformation results
93#[derive(Debug, Clone)]
94pub struct NeuromorphicTransform {
95    /// Transformed feature patterns
96    pub spike_patterns: Array3<f64>, // (time, neurons, features)
97    /// Synaptic connectivity matrix
98    pub connectivity_matrix: Array2<f64>,
99    /// Learning trajectory over time
100    pub learning_trajectory: Vec<f64>,
101    /// Emergent feature representations
102    pub emergent_features: Array2<f64>,
103}
104
105impl Default for NetworkTopology {
106    fn default() -> Self {
107        Self {
108            input_neurons: 100,
109            hidden_neurons: 256,
110            output_neurons: 10,
111            connection_probability: 0.15,
112            recurrent_connections: true,
113        }
114    }
115}
116
117impl Default for SynapticPlasticity {
118    fn default() -> Self {
119        Self {
120            hebbian_strength: 0.01,
121            anti_hebbian_strength: 0.005,
122            weight_decay: 0.001,
123            max_weight: 1.0,
124            min_weight: -1.0,
125        }
126    }
127}
128
129impl Default for NeuromorphicProcessor {
130    fn default() -> Self {
131        Self {
132            network_config: NetworkTopology::default(),
133            plasticity_config: SynapticPlasticity::default(),
134            stdp_enabled: true,
135            membrane_decay: 0.95,
136            spike_threshold: 1.0,
137            learning_rate: 0.001,
138        }
139    }
140}
141
142impl NeuromorphicProcessor {
143    /// Create a new neuromorphic processor
144    pub fn new(network_config: NetworkTopology, plasticity_config: SynapticPlasticity) -> Self {
145        Self {
146            network_config,
147            plasticity_config,
148            stdp_enabled: true,
149            membrane_decay: 0.95,
150            spike_threshold: 1.0,
151            learning_rate: 0.001,
152        }
153    }
154
155    /// Configure spike timing dependent plasticity
156    pub fn with_stdp(mut self, enabled: bool) -> Self {
157        self.stdp_enabled = enabled;
158        self
159    }
160
161    /// Set membrane dynamics parameters
162    pub fn with_membrane_dynamics(mut self, decay: f64, threshold: f64) -> Self {
163        self.membrane_decay = decay;
164        self.spike_threshold = threshold;
165        self
166    }
167
168    /// Transform dataset using neuromorphic processing
169    pub fn transform_dataset(
170        &self,
171        dataset: &Dataset,
172        simulation_time: Duration,
173        random_seed: Option<u64>,
174    ) -> Result<NeuromorphicTransform> {
175        let data = &dataset.data;
176        let n_samples = data.nrows();
177        let n_features = data.ncols();
178
179        if n_samples == 0 || n_features == 0 {
180            return Err(DatasetsError::InvalidFormat(
181                "Dataset must have samples and features".to_string(),
182            ));
183        }
184
185        let mut rng = match random_seed {
186            Some(_seed) => StdRng::seed_from_u64(_seed),
187            None => StdRng::from_rng(&mut thread_rng()),
188        };
189
190        // Initialize neuromorphic network
191        let mut network = self.initialize_network(&mut rng)?;
192
193        // Process each sample through the spiking network
194        let time_steps = (simulation_time.as_millis() as usize) / 10; // 10ms resolution
195        let mut spike_patterns =
196            Array3::zeros((time_steps, self.network_config.hidden_neurons, n_samples));
197        let mut learning_trajectory = Vec::with_capacity(n_samples);
198
199        for sample_idx in 0..n_samples {
200            let sample = data.row(sample_idx);
201            let (sample_spikes, learning_score) =
202                self.process_sample_neuromorphic(&sample, &mut network, time_steps, &mut rng)?;
203
204            // Store spike patterns for this sample
205            for time_idx in 0..time_steps {
206                for neuron_idx in 0..self.network_config.hidden_neurons {
207                    spike_patterns[[time_idx, neuron_idx, sample_idx]] =
208                        sample_spikes[[time_idx, neuron_idx]];
209                }
210            }
211
212            learning_trajectory.push(learning_score);
213        }
214
215        // Extract connectivity matrix
216        let connectivity_matrix = self.extract_connectivity_matrix(&network)?;
217
218        // Generate emergent feature representations
219        let emergent_features = self.extract_emergent_features(&spike_patterns)?;
220
221        Ok(NeuromorphicTransform {
222            spike_patterns,
223            connectivity_matrix,
224            learning_trajectory,
225            emergent_features,
226        })
227    }
228
229    /// Generate neuromorphic-enhanced dataset using bio-inspired processes
230    pub fn generate_bioinspired_dataset(
231        &self,
232        n_samples: usize,
233        n_features: usize,
234        adaptation_cycles: usize,
235        random_seed: Option<u64>,
236    ) -> Result<Dataset> {
237        let mut rng = match random_seed {
238            Some(_seed) => StdRng::seed_from_u64(_seed),
239            None => StdRng::from_rng(&mut thread_rng()),
240        };
241
242        // Initialize adaptive neural network
243        let mut network = self.initialize_network(&mut rng)?;
244
245        let mut data = Array2::zeros((n_samples, n_features));
246        let mut targets = Array1::zeros(n_samples);
247
248        // Generate _samples through neuromorphic adaptation
249        for sample_idx in 0..n_samples {
250            // Neural network driven feature generation
251            let neural_features = self.generate_neural_features(n_features, &network, &mut rng)?;
252
253            // Bio-inspired target assignment using competitive learning
254            let target = self.competitive_learning_assignment(&neural_features, &mut rng)?;
255
256            // Store generated sample
257            for feature_idx in 0..n_features {
258                data[[sample_idx, feature_idx]] = neural_features[feature_idx];
259            }
260            targets[sample_idx] = target;
261
262            // Adapt network based on generated sample (Hebbian plasticity)
263            if sample_idx % adaptation_cycles == 0 {
264                self.adapt_network_hebbian(&mut network, &neural_features)?;
265            }
266        }
267
268        Ok(Dataset::new(data, Some(targets)))
269    }
270
271    /// Process temporal sequences using spike timing
272    pub fn process_temporal_sequence(
273        &self,
274        sequence_data: &Array3<f64>, // (time, samples, features)
275        stdp_learning: bool,
276        random_seed: Option<u64>,
277    ) -> Result<NeuromorphicTransform> {
278        let (time_steps, n_samples, n_features) = sequence_data.dim();
279
280        if time_steps == 0 || n_samples == 0 || n_features == 0 {
281            return Err(DatasetsError::InvalidFormat(
282                "Sequence _data must have time, samples, and features".to_string(),
283            ));
284        }
285
286        let mut rng = match random_seed {
287            Some(_seed) => StdRng::seed_from_u64(_seed),
288            None => StdRng::from_rng(&mut thread_rng()),
289        };
290
291        let mut network = self.initialize_network(&mut rng)?;
292        let mut spike_patterns =
293            Array3::zeros((time_steps, self.network_config.hidden_neurons, n_samples));
294        let mut learning_trajectory = Vec::with_capacity(time_steps);
295
296        // Process temporal sequence with spike timing dependent plasticity
297        for time_idx in 0..time_steps {
298            let mut time_step_learning = 0.0;
299
300            for sample_idx in 0..n_samples {
301                let current_input = sequence_data.slice(s![time_idx, sample_idx, ..]);
302                let current_input_array = current_input.to_owned();
303
304                // Convert to spike trains and process
305                let spike_response = self.temporal_spike_processing(
306                    &current_input_array,
307                    &mut network,
308                    time_idx,
309                    &mut rng,
310                )?;
311
312                // Store spike responses
313                for neuron_idx in 0..self.network_config.hidden_neurons {
314                    spike_patterns[[time_idx, neuron_idx, sample_idx]] = spike_response[neuron_idx];
315                }
316
317                // Apply STDP _learning if enabled
318                if stdp_learning && self.stdp_enabled {
319                    let learning_change = self.apply_stdp_learning(&mut network, time_idx)?;
320                    time_step_learning += learning_change;
321                }
322            }
323
324            learning_trajectory.push(time_step_learning / n_samples as f64);
325        }
326
327        let connectivity_matrix = self.extract_connectivity_matrix(&network)?;
328        let emergent_features = self.extract_emergent_features(&spike_patterns)?;
329
330        Ok(NeuromorphicTransform {
331            spike_patterns,
332            connectivity_matrix,
333            learning_trajectory,
334            emergent_features,
335        })
336    }
337
338    // Private helper methods for neuromorphic processing
339
340    #[allow(clippy::needless_range_loop)]
341    fn initialize_network(&self, rng: &mut StdRng) -> Result<Vec<Vec<Synapse>>> {
342        let total_neurons = self.network_config.input_neurons
343            + self.network_config.hidden_neurons
344            + self.network_config.output_neurons;
345
346        let mut network = vec![Vec::new(); total_neurons];
347
348        // Create synaptic connections based on topology
349        for pre_idx in 0..self.network_config.input_neurons {
350            for post_idx in self.network_config.input_neurons
351                ..(self.network_config.input_neurons + self.network_config.hidden_neurons)
352            {
353                if rng.random::<f64>() < self.network_config.connection_probability {
354                    let weight =
355                        (rng.random::<f64>() - 0.5) * 2.0 * self.plasticity_config.max_weight;
356                    let delay = Duration::from_millis(
357                        rng.sample(Uniform::new(1, 5).expect("Operation failed")),
358                    );
359
360                    network[pre_idx].push(Synapse {
361                        weight,
362                        pre_neuron: pre_idx,
363                        post_neuron: post_idx,
364                        delay,
365                        spike_trace: 0.0,
366                    });
367                }
368            }
369        }
370
371        // Add recurrent connections if enabled
372        if self.network_config.recurrent_connections {
373            self.add_recurrent_connections(&mut network, rng)?;
374        }
375
376        Ok(network)
377    }
378
379    fn process_sample_neuromorphic(
380        &self,
381        sample: &scirs2_core::ndarray::ArrayView1<f64>,
382        network: &mut [Vec<Synapse>],
383        time_steps: usize,
384        _rng: &mut StdRng,
385    ) -> Result<(Array2<f64>, f64)> {
386        let n_neurons = self.network_config.hidden_neurons;
387        let mut spike_pattern = Array2::zeros((time_steps, n_neurons));
388        let mut neuron_states = vec![
389            NeuronState {
390                membrane_potential: 0.0,
391                last_spike_time: None,
392                refractory_time: Duration::ZERO,
393                adaptive_threshold: self.spike_threshold,
394            };
395            n_neurons
396        ];
397
398        let mut learning_score = 0.0;
399
400        // Simulate network dynamics over time
401        for time_idx in 0..time_steps {
402            // Apply input stimulus
403            self.apply_input_stimulus(sample, &mut neuron_states, time_idx)?;
404
405            // Update neuron dynamics
406            for neuron_idx in 0..n_neurons {
407                // Membrane potential decay
408                neuron_states[neuron_idx].membrane_potential *= self.membrane_decay;
409
410                // Check for spike generation
411                if neuron_states[neuron_idx].membrane_potential
412                    > neuron_states[neuron_idx].adaptive_threshold
413                {
414                    spike_pattern[[time_idx, neuron_idx]] = 1.0;
415                    neuron_states[neuron_idx].membrane_potential = 0.0; // Reset
416                    neuron_states[neuron_idx].last_spike_time = Some(Instant::now());
417
418                    // Adaptive threshold increase
419                    neuron_states[neuron_idx].adaptive_threshold *= 1.05;
420
421                    learning_score += 0.1; // Reward spiking activity
422                }
423
424                // Threshold decay
425                neuron_states[neuron_idx].adaptive_threshold =
426                    (neuron_states[neuron_idx].adaptive_threshold * 0.99).max(self.spike_threshold);
427            }
428
429            // Synaptic transmission
430            self.propagate_spikes(network, &spike_pattern, &mut neuron_states, time_idx)?;
431        }
432
433        Ok((spike_pattern, learning_score / time_steps as f64))
434    }
435
436    fn apply_input_stimulus(
437        &self,
438        sample: &scirs2_core::ndarray::ArrayView1<f64>,
439        neuron_states: &mut [NeuronState],
440        _time_idx: usize,
441    ) -> Result<()> {
442        // Convert input features to spike trains using rate encoding
443        for (feature_idx, &feature_value) in sample.iter().enumerate() {
444            if feature_idx < self.network_config.input_neurons {
445                // Rate encoding: higher values = higher spike probability
446                let spike_probability = (feature_value.abs().tanh() + 1.0) / 2.0;
447                let spike_current = if thread_rng().random::<f64>() < spike_probability {
448                    0.5 * feature_value.signum()
449                } else {
450                    0.0
451                };
452
453                // Apply to corresponding hidden neurons
454                if feature_idx < neuron_states.len() {
455                    neuron_states[feature_idx].membrane_potential += spike_current;
456                }
457            }
458        }
459
460        Ok(())
461    }
462
463    fn propagate_spikes(
464        &self,
465        network: &mut [Vec<Synapse>],
466        spike_pattern: &Array2<f64>,
467        neuron_states: &mut [NeuronState],
468        time_idx: usize,
469    ) -> Result<()> {
470        // Propagate spikes through synaptic connections
471        for (pre_neuron_idx, synapses) in network.iter().enumerate() {
472            if pre_neuron_idx < spike_pattern.ncols() {
473                let spike_strength = spike_pattern[[time_idx, pre_neuron_idx]];
474
475                if spike_strength > 0.0 {
476                    for synapse in synapses {
477                        let post_neuron_idx = synapse.post_neuron;
478                        if post_neuron_idx < neuron_states.len() {
479                            // Apply synaptic current with delay consideration
480                            let synaptic_current = spike_strength * synapse.weight;
481                            neuron_states[post_neuron_idx].membrane_potential += synaptic_current;
482                        }
483                    }
484                }
485            }
486        }
487
488        Ok(())
489    }
490
491    fn extract_connectivity_matrix(&self, network: &[Vec<Synapse>]) -> Result<Array2<f64>> {
492        let n_neurons = self.network_config.hidden_neurons;
493        let mut connectivity = Array2::zeros((n_neurons, n_neurons));
494
495        for (pre_idx, synapses) in network.iter().enumerate() {
496            for synapse in synapses {
497                if pre_idx < n_neurons && synapse.post_neuron < n_neurons {
498                    connectivity[[pre_idx, synapse.post_neuron]] = synapse.weight;
499                }
500            }
501        }
502
503        Ok(connectivity)
504    }
505
506    fn extract_emergent_features(&self, spike_patterns: &Array3<f64>) -> Result<Array2<f64>> {
507        let (time_steps, n_neurons, n_samples) = spike_patterns.dim();
508        let mut features = Array2::zeros((n_samples, n_neurons));
509
510        // Extract temporal spike statistics as emergent features
511        for sample_idx in 0..n_samples {
512            for neuron_idx in 0..n_neurons {
513                let neuron_spikes = spike_patterns.slice(s![.., neuron_idx, sample_idx]);
514
515                // Compute spike rate and temporal _patterns
516                let spike_rate = neuron_spikes.sum() / time_steps as f64;
517                let spike_variance = neuron_spikes.variance();
518
519                // Combine metrics for emergent feature
520                features[[sample_idx, neuron_idx]] = spike_rate + 0.1 * spike_variance;
521            }
522        }
523
524        Ok(features)
525    }
526
527    fn generate_neural_features(
528        &self,
529        n_features: usize,
530        network: &[Vec<Synapse>],
531        rng: &mut StdRng,
532    ) -> Result<Array1<f64>> {
533        let mut features = Array1::zeros(n_features);
534
535        // Use network weights to influence feature generation
536        for feature_idx in 0..n_features {
537            let mut feature_value = rng.random::<f64>() - 0.5;
538
539            // Neural network influence
540            if feature_idx < network.len() {
541                let synaptic_influence: f64 = network[feature_idx]
542                    .iter()
543                    .map(|synapse| synapse.weight)
544                    .sum::<f64>()
545                    / network[feature_idx].len().max(1) as f64;
546
547                feature_value += 0.3 * synaptic_influence;
548            }
549
550            features[feature_idx] = feature_value.tanh(); // Bounded activation
551        }
552
553        Ok(features)
554    }
555
556    fn competitive_learning_assignment(
557        &self,
558        features: &Array1<f64>,
559        rng: &mut StdRng,
560    ) -> Result<f64> {
561        // Winner-take-all competitive learning for target assignment
562        let max_feature_idx = features
563            .iter()
564            .enumerate()
565            .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
566            .map(|(idx, _)| idx)
567            .unwrap_or(0);
568
569        // Add some noise for variability
570        let noise = rng.random::<f64>() * 0.1 - 0.05;
571        Ok(max_feature_idx as f64 + noise)
572    }
573
574    fn adapt_network_hebbian(
575        &self,
576        network: &mut [Vec<Synapse>],
577        features: &Array1<f64>,
578    ) -> Result<()> {
579        // Apply Hebbian learning: "neurons that fire together, wire together"
580        for (pre_idx, synapses) in network.iter_mut().enumerate() {
581            if pre_idx < features.len() {
582                let pre_activity = features[pre_idx];
583
584                for synapse in synapses {
585                    if synapse.post_neuron < features.len() {
586                        let post_activity = features[synapse.post_neuron];
587
588                        // Hebbian update: Δw = η * pre * post
589                        let hebbian_change =
590                            self.plasticity_config.hebbian_strength * pre_activity * post_activity;
591
592                        // Weight decay
593                        let decay_change = -self.plasticity_config.weight_decay * synapse.weight;
594
595                        // Update weight with bounds checking
596                        synapse.weight += hebbian_change + decay_change;
597                        synapse.weight = synapse.weight.clamp(
598                            self.plasticity_config.min_weight,
599                            self.plasticity_config.max_weight,
600                        );
601                    }
602                }
603            }
604        }
605
606        Ok(())
607    }
608
609    fn temporal_spike_processing(
610        &self,
611        input: &Array1<f64>,
612        _network: &mut [Vec<Synapse>],
613        time_idx: usize,
614        _rng: &mut StdRng,
615    ) -> Result<Array1<f64>> {
616        let n_neurons = self.network_config.hidden_neurons;
617        let mut spike_response = Array1::zeros(n_neurons);
618
619        // Convert temporal input to spike response
620        for (neuron_idx, &input_val) in input.iter().enumerate().take(n_neurons) {
621            // Temporal encoding with time dependency
622            let temporal_factor = 1.0 - (time_idx as f64 / 100.0).min(0.9);
623            let spike_probability = (input_val.abs() * temporal_factor).tanh();
624
625            spike_response[neuron_idx] = if spike_probability > 0.5 { 1.0 } else { 0.0 };
626        }
627
628        Ok(spike_response)
629    }
630
631    fn apply_stdp_learning(&self, network: &mut [Vec<Synapse>], time_idx: usize) -> Result<f64> {
632        let mut total_learning_change = 0.0;
633
634        // Spike Timing Dependent Plasticity (STDP)
635        for synapses in network.iter_mut() {
636            for synapse in synapses {
637                // Simplified STDP: recent activity strengthens connections
638                let time_factor = 1.0 / (1.0 + time_idx as f64 * 0.01);
639                let stdp_change = self.learning_rate * time_factor * synapse.spike_trace;
640
641                synapse.weight += stdp_change;
642                synapse.weight = synapse.weight.clamp(
643                    self.plasticity_config.min_weight,
644                    self.plasticity_config.max_weight,
645                );
646
647                // Decay spike trace
648                synapse.spike_trace *= 0.95;
649
650                total_learning_change += stdp_change.abs();
651            }
652        }
653
654        Ok(total_learning_change)
655    }
656
657    #[allow(clippy::needless_range_loop)]
658    fn add_recurrent_connections(
659        &self,
660        network: &mut [Vec<Synapse>],
661        rng: &mut StdRng,
662    ) -> Result<()> {
663        let start_hidden = self.network_config.input_neurons;
664        let end_hidden = start_hidden + self.network_config.hidden_neurons;
665
666        // Add recurrent connections within hidden layer
667        for pre_idx in start_hidden..end_hidden {
668            for post_idx in start_hidden..end_hidden {
669                if pre_idx != post_idx
670                    && rng.random::<f64>() < self.network_config.connection_probability * 0.5
671                {
672                    let weight =
673                        (rng.random::<f64>() - 0.5) * self.plasticity_config.max_weight * 0.5;
674                    let delay = Duration::from_millis(
675                        rng.sample(Uniform::new(2, 10).expect("Operation failed")),
676                    );
677
678                    network[pre_idx].push(Synapse {
679                        weight,
680                        pre_neuron: pre_idx,
681                        post_neuron: post_idx,
682                        delay,
683                        spike_trace: 0.0,
684                    });
685                }
686            }
687        }
688
689        Ok(())
690    }
691}
692
693/// Convenience function to create neuromorphic processor with default settings
694#[allow(dead_code)]
695pub fn create_neuromorphic_processor() -> NeuromorphicProcessor {
696    NeuromorphicProcessor::default()
697}
698
699/// Convenience function to create neuromorphic processor with custom topology
700#[allow(dead_code)]
701pub fn create_neuromorphic_processor_with_topology(
702    input_neurons: usize,
703    hidden_neurons: usize,
704    output_neurons: usize,
705) -> NeuromorphicProcessor {
706    let topology = NetworkTopology {
707        input_neurons,
708        hidden_neurons,
709        output_neurons,
710        connection_probability: 0.15,
711        recurrent_connections: true,
712    };
713
714    NeuromorphicProcessor::new(topology, SynapticPlasticity::default())
715}
716
717#[cfg(test)]
718mod tests {
719    use super::*;
720    use scirs2_core::ndarray::Array2;
721    use scirs2_core::random::Uniform;
722
723    #[test]
724    fn test_neuromorphic_dataset_transformation() {
725        let data = Array2::from_shape_vec((10, 4), (0..40).map(|x| x as f64).collect())
726            .expect("Operation failed");
727        let targets = Array1::from((0..10).map(|x| (x % 2) as f64).collect::<Vec<_>>());
728        let dataset = Dataset::new(data, Some(targets));
729
730        let processor = NeuromorphicProcessor::default();
731        let transform = processor
732            .transform_dataset(&dataset, Duration::from_millis(100), Some(42))
733            .expect("Operation failed");
734
735        assert_eq!(transform.spike_patterns.dim().0, 10); // 100ms / 10ms = 10 time steps
736        assert_eq!(transform.spike_patterns.dim().1, 256); // Default hidden neurons
737        assert_eq!(transform.spike_patterns.dim().2, 10); // 10 samples
738        assert_eq!(transform.connectivity_matrix.dim(), (256, 256));
739        assert_eq!(transform.learning_trajectory.len(), 10);
740        assert_eq!(transform.emergent_features.dim(), (10, 256));
741    }
742
743    #[test]
744    fn test_bioinspired_dataset_generation() {
745        let processor = NeuromorphicProcessor::default();
746        let dataset = processor
747            .generate_bioinspired_dataset(50, 5, 10, Some(42))
748            .expect("Operation failed");
749
750        assert_eq!(dataset.n_samples(), 50);
751        assert_eq!(dataset.n_features(), 5);
752        assert!(dataset.has_target());
753    }
754
755    #[test]
756    fn test_temporal_sequence_processing() {
757        let processor = NeuromorphicProcessor::default();
758        let sequence = Array3::from_shape_fn((5, 10, 4), |(t, s, f)| {
759            (t as f64 + s as f64 + f as f64) * 0.1
760        });
761
762        let result = processor
763            .process_temporal_sequence(&sequence, true, Some(42))
764            .expect("Operation failed");
765
766        assert_eq!(result.spike_patterns.dim(), (5, 256, 10)); // time, neurons, samples
767        assert_eq!(result.learning_trajectory.len(), 5);
768    }
769
770    #[test]
771    fn test_network_topology_configuration() {
772        let topology = NetworkTopology {
773            input_neurons: 50,
774            hidden_neurons: 128,
775            output_neurons: 5,
776            connection_probability: 0.2,
777            recurrent_connections: false,
778        };
779
780        let plasticity = SynapticPlasticity {
781            hebbian_strength: 0.02,
782            anti_hebbian_strength: 0.01,
783            weight_decay: 0.0005,
784            max_weight: 2.0,
785            min_weight: -2.0,
786        };
787
788        let processor = NeuromorphicProcessor::new(topology.clone(), plasticity.clone());
789        assert_eq!(processor.network_config.input_neurons, 50);
790        assert_eq!(processor.network_config.hidden_neurons, 128);
791        assert_eq!(processor.plasticity_config.hebbian_strength, 0.02);
792    }
793
794    #[test]
795    fn test_stdp_configuration() {
796        let processor = NeuromorphicProcessor::default()
797            .with_stdp(false)
798            .with_membrane_dynamics(0.9, 1.5);
799
800        assert!(!processor.stdp_enabled);
801        assert_eq!(processor.membrane_decay, 0.9);
802        assert_eq!(processor.spike_threshold, 1.5);
803    }
804}