scirs2_spatial/neuromorphic/algorithms/
competitive_learning.rs

1//! Competitive Learning Algorithms for Neuromorphic Spatial Processing
2//!
3//! This module implements advanced competitive learning algorithms including
4//! winner-take-all dynamics, homeostatic plasticity, and multi-timescale adaptation
5//! for spatial data clustering and pattern discovery.
6
7use crate::error::{SpatialError, SpatialResult};
8use scirs2_core::ndarray::{Array1, Array2, ArrayView1, ArrayView2};
9use scirs2_core::random::Rng;
10use std::collections::VecDeque;
11
12/// Bio-inspired competitive learning for spatial clustering
13///
14/// This clusterer uses winner-take-all dynamics with lateral inhibition to discover
15/// clusters in spatial data. Neurons compete for activation, with the winner
16/// (neuron with strongest response) being updated while others are inhibited.
17///
18/// # Features
19/// - Winner-take-all competitive dynamics
20/// - Lateral inhibition for neural competition
21/// - Adaptive learning rates
22/// - Neighborhood function for topological organization
23/// - Distance-based neuron activation
24///
25/// # Example
26/// ```rust
27/// use scirs2_core::ndarray::Array2;
28/// use scirs2_spatial::neuromorphic::algorithms::CompetitiveNeuralClusterer;
29///
30/// let points = Array2::from_shape_vec((4, 2), vec![
31///     0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0
32/// ]).unwrap();
33///
34/// let mut clusterer = CompetitiveNeuralClusterer::new(2, 2);
35/// let assignments = clusterer.fit(&points.view(), 100).unwrap();
36/// println!("Cluster assignments: {:?}", assignments);
37/// ```
38#[derive(Debug, Clone)]
39pub struct CompetitiveNeuralClusterer {
40    /// Network neurons representing cluster centers
41    neurons: Vec<Array1<f64>>,
42    /// Learning rates for each neuron
43    learning_rates: Vec<f64>,
44    /// Lateral inhibition strengths
45    inhibition_strengths: Array2<f64>,
46    /// Winner-take-all threshold
47    #[allow(dead_code)]
48    wta_threshold: f64,
49    /// Neighborhood function parameters
50    neighborhood_sigma: f64,
51}
52
53impl CompetitiveNeuralClusterer {
54    /// Create new competitive neural clusterer
55    ///
56    /// # Arguments
57    /// * `num_clusters` - Number of clusters to discover
58    /// * `input_dims` - Number of input dimensions
59    ///
60    /// # Returns
61    /// A new `CompetitiveNeuralClusterer` with randomly initialized neurons
62    pub fn new(num_clusters: usize, input_dims: usize) -> Self {
63        let mut neurons = Vec::new();
64        let mut learning_rates = Vec::new();
65        let mut rng = scirs2_core::random::rng();
66
67        // Initialize neurons with random weights
68        for _ in 0..num_clusters {
69            let weights = Array1::from_shape_fn(input_dims, |_| rng.gen_range(0.0..1.0));
70            neurons.push(weights);
71            learning_rates.push(0.1);
72        }
73
74        // Initialize inhibition matrix
75        let inhibition_strengths =
76            Array2::from_shape_fn(
77                (num_clusters, num_clusters),
78                |(i, j)| {
79                    if i == j {
80                        0.0
81                    } else {
82                        0.1
83                    }
84                },
85            );
86
87        Self {
88            neurons,
89            learning_rates,
90            inhibition_strengths,
91            wta_threshold: 0.5,
92            neighborhood_sigma: 1.0,
93        }
94    }
95
96    /// Configure neighborhood parameters
97    ///
98    /// # Arguments
99    /// * `sigma` - Neighborhood function width
100    /// * `wta_threshold` - Winner-take-all threshold
101    pub fn with_competition_params(mut self, sigma: f64, wta_threshold: f64) -> Self {
102        self.neighborhood_sigma = sigma;
103        self.wta_threshold = wta_threshold;
104        self
105    }
106
107    /// Train competitive network on spatial data
108    ///
109    /// Applies competitive learning dynamics where neurons compete for activation
110    /// and the winner adapts towards the input pattern while inhibiting neighbors.
111    ///
112    /// # Arguments
113    /// * `points` - Input spatial points (n_points × n_dims)
114    /// * `epochs` - Number of training epochs
115    ///
116    /// # Returns
117    /// Cluster assignments for each input point
118    pub fn fit(
119        &mut self,
120        points: &ArrayView2<'_, f64>,
121        epochs: usize,
122    ) -> SpatialResult<Array1<usize>> {
123        let n_points = points.dim().0;
124        let mut assignments = Array1::zeros(n_points);
125
126        if n_points == 0 {
127            return Ok(assignments);
128        }
129
130        for epoch in 0..epochs {
131            // Adjust learning rate and neighborhood size
132            let epoch_factor = 1.0 - (epoch as f64) / (epochs as f64);
133            let current_sigma = self.neighborhood_sigma * epoch_factor;
134
135            for (point_idx, point) in points.outer_iter().enumerate() {
136                // Find winning neuron
137                let winner = self.find_winner(&point.to_owned())?;
138                assignments[point_idx] = winner;
139
140                // Update winner and neighbors
141                self.update_neurons(&point.to_owned(), winner, current_sigma, epoch_factor)?;
142
143                // Apply lateral inhibition
144                self.apply_lateral_inhibition(winner)?;
145            }
146        }
147
148        Ok(assignments)
149    }
150
151    /// Find winning neuron using competitive dynamics
152    ///
153    /// Computes the neuron with the smallest distance to the input pattern.
154    fn find_winner(&self, input: &Array1<f64>) -> SpatialResult<usize> {
155        let mut min_distance = f64::INFINITY;
156        let mut winner = 0;
157
158        for (i, neuron) in self.neurons.iter().enumerate() {
159            // Calculate Euclidean distance
160            let distance: f64 = input
161                .iter()
162                .zip(neuron.iter())
163                .map(|(&a, &b)| (a - b).powi(2))
164                .sum::<f64>()
165                .sqrt();
166
167            if distance < min_distance {
168                min_distance = distance;
169                winner = i;
170            }
171        }
172
173        Ok(winner)
174    }
175
176    /// Update neuron weights using competitive learning
177    ///
178    /// Applies weight updates to the winner and its neighbors based on
179    /// the neighborhood function and learning rate scheduling.
180    fn update_neurons(
181        &mut self,
182        input: &Array1<f64>,
183        winner: usize,
184        sigma: f64,
185        learning_factor: f64,
186    ) -> SpatialResult<()> {
187        for (i, neuron) in self.neurons.iter_mut().enumerate() {
188            // Calculate neighborhood influence
189            let distance_to_winner = (i as i32 - winner as i32).abs() as f64;
190            let neighborhood_influence =
191                (-distance_to_winner.powi(2) / (2.0 * sigma.powi(2))).exp();
192
193            // Update neuron weights
194            let effective_learning_rate =
195                self.learning_rates[i] * learning_factor * neighborhood_influence;
196
197            for (weight, &input_val) in neuron.iter_mut().zip(input.iter()) {
198                *weight += effective_learning_rate * (input_val - *weight);
199            }
200        }
201
202        Ok(())
203    }
204
205    /// Apply lateral inhibition between neurons
206    ///
207    /// Strengthens inhibitory connections and adjusts learning rates
208    /// based on competitive dynamics.
209    fn apply_lateral_inhibition(&mut self, winner: usize) -> SpatialResult<()> {
210        // Strengthen inhibitory connections from winner to others
211        for i in 0..self.neurons.len() {
212            if i != winner {
213                self.inhibition_strengths[[winner, i]] += 0.001;
214                self.inhibition_strengths[[winner, i]] =
215                    self.inhibition_strengths[[winner, i]].min(0.5);
216
217                // Reduce learning rate of inhibited neurons
218                self.learning_rates[i] *= 0.99;
219                self.learning_rates[i] = self.learning_rates[i].max(0.001);
220            }
221        }
222
223        // Boost winner's learning rate slightly
224        self.learning_rates[winner] *= 1.001;
225        self.learning_rates[winner] = self.learning_rates[winner].min(0.2);
226
227        Ok(())
228    }
229
230    /// Get cluster centers (neuron weights)
231    ///
232    /// # Returns
233    /// Array containing the current neuron weight vectors as cluster centers
234    pub fn get_cluster_centers(&self) -> Array2<f64> {
235        let num_clusters = self.neurons.len();
236        let input_dims = self.neurons[0].len();
237
238        let mut centers = Array2::zeros((num_clusters, input_dims));
239        for (i, neuron) in self.neurons.iter().enumerate() {
240            centers.row_mut(i).assign(neuron);
241        }
242
243        centers
244    }
245
246    /// Get current learning rates
247    pub fn learning_rates(&self) -> &[f64] {
248        &self.learning_rates
249    }
250
251    /// Get inhibition strength matrix
252    pub fn inhibition_strengths(&self) -> &Array2<f64> {
253        &self.inhibition_strengths
254    }
255
256    /// Get number of clusters
257    pub fn num_clusters(&self) -> usize {
258        self.neurons.len()
259    }
260
261    /// Reset the clusterer to initial state
262    pub fn reset(&mut self) {
263        let mut rng = scirs2_core::random::rng();
264        let input_dims = self.neurons[0].len();
265
266        // Reinitialize neuron weights
267        for neuron in &mut self.neurons {
268            for weight in neuron.iter_mut() {
269                *weight = rng.gen_range(0.0..1.0);
270            }
271        }
272
273        // Reset learning rates
274        for rate in &mut self.learning_rates {
275            *rate = 0.1;
276        }
277
278        // Reset inhibition strengths
279        let num_clusters = self.neurons.len();
280        for i in 0..num_clusters {
281            for j in 0..num_clusters {
282                self.inhibition_strengths[[i, j]] = if i == j { 0.0 } else { 0.1 };
283            }
284        }
285    }
286}
287
288/// Advanced homeostatic plasticity for neuromorphic spatial learning
289///
290/// This clusterer implements homeostatic plasticity mechanisms that maintain
291/// stable neural activity levels while adapting to input patterns. It includes
292/// intrinsic plasticity, synaptic scaling, and multi-timescale adaptation.
293///
294/// # Features
295/// - Homeostatic neurons with intrinsic plasticity
296/// - Synaptic scaling for stability
297/// - Multi-timescale adaptation (fast, medium, slow)
298/// - Metaplasticity for adaptive learning rates
299/// - Target firing rate maintenance
300///
301/// # Example
302/// ```rust
303/// use scirs2_core::ndarray::Array2;
304/// use scirs2_spatial::neuromorphic::algorithms::HomeostaticNeuralClusterer;
305///
306/// let points = Array2::from_shape_vec((4, 2), vec![
307///     0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0
308/// ]).unwrap();
309///
310/// let mut clusterer = HomeostaticNeuralClusterer::new(2, 2)
311///     .with_homeostatic_params(0.1, 1000.0);
312/// let assignments = clusterer.fit(&points.view(), 50).unwrap();
313/// ```
314#[derive(Debug, Clone)]
315pub struct HomeostaticNeuralClusterer {
316    /// Number of clusters (output neurons)
317    num_clusters: usize,
318    /// Input dimension
319    input_dim: usize,
320    /// Output neurons with homeostatic mechanisms
321    output_neurons: Vec<HomeostaticNeuron>,
322    /// Synaptic weights
323    weights: Array2<f64>,
324    /// Global inhibition strength
325    #[allow(dead_code)]
326    global_inhibition: f64,
327    /// Learning rate adaptation parameters
328    learning_rate_adaptation: LearningRateAdaptation,
329    /// Metaplasticity parameters
330    metaplasticity: MetaplasticityController,
331    /// Multi-timescale adaptation
332    multi_timescale: MultiTimescaleAdaptation,
333}
334
335impl HomeostaticNeuralClusterer {
336    /// Create new homeostatic neural clusterer
337    ///
338    /// # Arguments
339    /// * `num_clusters` - Number of clusters to discover
340    /// * `input_dim` - Number of input dimensions
341    ///
342    /// # Returns
343    /// A new `HomeostaticNeuralClusterer` with default parameters
344    pub fn new(num_clusters: usize, input_dim: usize) -> Self {
345        let mut output_neurons = Vec::new();
346        for _ in 0..num_clusters {
347            output_neurons.push(HomeostaticNeuron::new());
348        }
349
350        let weights = Array2::zeros((num_clusters, input_dim));
351
352        let learning_rate_adaptation = LearningRateAdaptation::new(0.01);
353        let metaplasticity = MetaplasticityController::new(num_clusters, input_dim);
354        let multi_timescale = MultiTimescaleAdaptation::new();
355
356        Self {
357            num_clusters,
358            input_dim,
359            output_neurons,
360            weights,
361            global_inhibition: 0.1,
362            learning_rate_adaptation,
363            metaplasticity,
364            multi_timescale,
365        }
366    }
367
368    /// Configure homeostatic parameters
369    ///
370    /// # Arguments
371    /// * `target_firing_rate` - Target firing rate for neurons
372    /// * `homeostatic_tau` - Time constant for homeostatic adaptation
373    pub fn with_homeostatic_params(
374        mut self,
375        target_firing_rate: f64,
376        homeostatic_tau: f64,
377    ) -> Self {
378        for neuron in &mut self.output_neurons {
379            neuron.target_firing_rate = target_firing_rate;
380            neuron.homeostatic_tau = homeostatic_tau;
381        }
382        self
383    }
384
385    /// Fit homeostatic clustering model
386    ///
387    /// Trains the network using homeostatic plasticity mechanisms to discover
388    /// stable cluster representations while maintaining target activity levels.
389    ///
390    /// # Arguments
391    /// * `points` - Input spatial points (n_points × n_dims)
392    /// * `epochs` - Number of training epochs
393    ///
394    /// # Returns
395    /// Cluster assignments for each input point
396    pub fn fit(&mut self, points: &ArrayView2<f64>, epochs: usize) -> SpatialResult<Array1<usize>> {
397        let (n_samples, n_features) = points.dim();
398
399        if n_features != self.input_dim {
400            return Err(SpatialError::InvalidInput(
401                "Input dimension mismatch".to_string(),
402            ));
403        }
404
405        if n_samples == 0 {
406            return Ok(Array1::zeros(0));
407        }
408
409        // Initialize weights randomly
410        self.initialize_weights()?;
411
412        let mut assignments = Array1::zeros(n_samples);
413        let current_time = 0.0;
414        let dt = 0.001; // 1ms time step
415
416        for epoch in 0..epochs {
417            let mut epoch_error = 0.0;
418
419            for (sample_idx, sample) in points.outer_iter().enumerate() {
420                // Forward pass with homeostatic mechanisms
421                let (winner_idx, neuron_activities) = self.forward_pass_homeostatic(
422                    &sample,
423                    current_time + (epoch * n_samples + sample_idx) as f64 * dt,
424                )?;
425
426                assignments[sample_idx] = winner_idx;
427
428                // Compute reconstruction error
429                let reconstruction = self.weights.row(winner_idx);
430                let error: f64 = sample
431                    .iter()
432                    .zip(reconstruction.iter())
433                    .map(|(&x, &w)| (x - w).powi(2))
434                    .sum();
435                epoch_error += error;
436
437                // Homeostatic learning update
438                self.homeostatic_learning_update(
439                    &sample,
440                    winner_idx,
441                    &neuron_activities,
442                    error,
443                    current_time + (epoch * n_samples + sample_idx) as f64 * dt,
444                )?;
445            }
446
447            // Update learning rate based on performance
448            self.learning_rate_adaptation
449                .update_learning_rate(epoch_error / n_samples as f64);
450
451            // Update multi-timescale adaptation
452            self.multi_timescale
453                .update(epoch_error / n_samples as f64, dt * n_samples as f64);
454
455            // Homeostatic updates at end of epoch
456            self.update_homeostatic_mechanisms(dt * n_samples as f64)?;
457        }
458
459        Ok(assignments)
460    }
461
462    /// Get cluster centers (weights)
463    pub fn get_cluster_centers(&self) -> Array2<f64> {
464        self.weights.clone()
465    }
466
467    /// Get number of clusters
468    pub fn num_clusters(&self) -> usize {
469        self.num_clusters
470    }
471
472    /// Get neuron firing rates
473    pub fn neuron_firing_rates(&self) -> Vec<f64> {
474        self.output_neurons
475            .iter()
476            .map(|neuron| neuron.actual_firing_rate)
477            .collect()
478    }
479
480    /// Get current learning rate
481    pub fn current_learning_rate(&self) -> f64 {
482        self.learning_rate_adaptation.base_rate
483    }
484
485    /// Forward pass with homeostatic mechanisms
486    ///
487    /// Computes neural activities with homeostatic modulation and finds
488    /// the winning neuron based on highest activity.
489    fn forward_pass_homeostatic(
490        &mut self,
491        input: &ArrayView1<f64>,
492        current_time: f64,
493    ) -> SpatialResult<(usize, Array1<f64>)> {
494        let mut activities = Array1::zeros(self.num_clusters);
495
496        // Compute neural activities with homeostatic modulation
497        for (neuron_idx, neuron) in self.output_neurons.iter_mut().enumerate() {
498            let weights_row = self.weights.row(neuron_idx);
499
500            // Compute dot product (synaptic input)
501            let synaptic_input: f64 = input
502                .iter()
503                .zip(weights_row.iter())
504                .map(|(&x, &w)| x * w)
505                .sum();
506
507            // Apply synaptic scaling
508            let scaled_input = synaptic_input * neuron.synaptic_scaling;
509
510            // Update membrane potential
511            neuron.update_membrane_potential(scaled_input, current_time);
512
513            // Apply intrinsic excitability modulation
514            let modulated_potential = neuron.membrane_potential * neuron.intrinsic_excitability;
515            activities[neuron_idx] = modulated_potential;
516        }
517
518        // Find winner (highest activity)
519        let winner_idx = activities
520            .iter()
521            .enumerate()
522            .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
523            .map(|(i, _)| i)
524            .unwrap_or(0);
525
526        // Update firing rates
527        self.output_neurons[winner_idx].record_spike(current_time);
528
529        Ok((winner_idx, activities))
530    }
531
532    /// Initialize weights randomly
533    fn initialize_weights(&mut self) -> SpatialResult<()> {
534        let mut rng = scirs2_core::random::rng();
535
536        for mut row in self.weights.outer_iter_mut() {
537            for weight in row.iter_mut() {
538                *weight = rng.gen_range(0.0..1.0);
539            }
540
541            // Normalize weights
542            let norm: f64 = row.iter().map(|&w| w * w).sum::<f64>().sqrt();
543            if norm > 0.0 {
544                for weight in row.iter_mut() {
545                    *weight /= norm;
546                }
547            }
548        }
549
550        Ok(())
551    }
552
553    /// Homeostatic learning update
554    ///
555    /// Updates synaptic weights using homeostatic learning rules that
556    /// incorporate metaplasticity and multi-timescale adaptation.
557    fn homeostatic_learning_update(
558        &mut self,
559        input: &ArrayView1<f64>,
560        winner_idx: usize,
561        activities: &Array1<f64>,
562        error: f64,
563        current_time: f64,
564    ) -> SpatialResult<()> {
565        // Get current learning rate
566        let learning_rate = self.learning_rate_adaptation.base_rate;
567
568        // Apply metaplasticity
569        let meta_modulation = self.metaplasticity.compute_modulation(winner_idx, error);
570
571        // Apply multi-timescale adaptation
572        let timescale_modulation = self.multi_timescale.get_adaptation_factor();
573
574        // Combined learning rate
575        let effective_learning_rate = learning_rate * meta_modulation * timescale_modulation;
576
577        // Update winner weights (competitive learning with homeostatic modulation)
578        let winner_neuron = &self.output_neurons[winner_idx];
579        let homeostatic_factor = winner_neuron.get_homeostatic_factor();
580
581        for (weight, &input_val) in self
582            .weights
583            .row_mut(winner_idx)
584            .iter_mut()
585            .zip(input.iter())
586        {
587            let weight_update =
588                effective_learning_rate * homeostatic_factor * (input_val - *weight);
589            *weight += weight_update;
590        }
591
592        // Update metaplasticity variables
593        self.metaplasticity
594            .update_metaplastic_variables(winner_idx, activities, current_time);
595
596        Ok(())
597    }
598
599    /// Update homeostatic mechanisms
600    fn update_homeostatic_mechanisms(&mut self, dt: f64) -> SpatialResult<()> {
601        for neuron in &mut self.output_neurons {
602            neuron.update_homeostatic_mechanisms(dt);
603        }
604        Ok(())
605    }
606}
607
608/// Homeostatic neuron with intrinsic plasticity
609///
610/// This neuron implements multiple homeostatic mechanisms to maintain
611/// stable activity levels including intrinsic excitability adaptation
612/// and synaptic scaling.
613#[derive(Debug, Clone)]
614pub struct HomeostaticNeuron {
615    /// Current membrane potential
616    pub membrane_potential: f64,
617    /// Spike threshold (adaptive)
618    #[allow(dead_code)]
619    pub threshold: f64,
620    /// Target firing rate
621    pub target_firing_rate: f64,
622    /// Actual firing rate (exponential moving average)
623    pub actual_firing_rate: f64,
624    /// Intrinsic excitability
625    pub intrinsic_excitability: f64,
626    /// Homeostatic time constant
627    pub homeostatic_tau: f64,
628    /// Spike history for rate computation
629    pub spike_history: VecDeque<f64>,
630    /// Synaptic scaling factor
631    pub synaptic_scaling: f64,
632    /// Membrane time constant
633    pub membrane_tau: f64,
634    /// Last spike time
635    pub last_spike_time: f64,
636}
637
638impl Default for HomeostaticNeuron {
639    fn default() -> Self {
640        Self::new()
641    }
642}
643
644impl HomeostaticNeuron {
645    /// Create new homeostatic neuron
646    pub fn new() -> Self {
647        Self {
648            membrane_potential: 0.0,
649            threshold: 1.0,
650            target_firing_rate: 0.1,
651            actual_firing_rate: 0.0,
652            intrinsic_excitability: 1.0,
653            homeostatic_tau: 1000.0, // 1 second
654            spike_history: VecDeque::new(),
655            synaptic_scaling: 1.0,
656            membrane_tau: 10.0, // 10ms
657            last_spike_time: -1000.0,
658        }
659    }
660
661    /// Update membrane potential
662    pub fn update_membrane_potential(&mut self, input: f64, current_time: f64) {
663        let dt = current_time - self.last_spike_time;
664
665        // Leaky integrate-and-fire dynamics
666        let decay = (-dt / self.membrane_tau).exp();
667        self.membrane_potential = self.membrane_potential * decay + input;
668    }
669
670    /// Record spike and update firing rate
671    pub fn record_spike(&mut self, current_time: f64) {
672        self.spike_history.push_back(current_time);
673
674        // Keep only recent spikes (last 1 second)
675        while let Some(&front_time) = self.spike_history.front() {
676            if current_time - front_time > 1000.0 {
677                self.spike_history.pop_front();
678            } else {
679                break;
680            }
681        }
682
683        // Update actual firing rate (exponential moving average)
684        let instantaneous_rate = self.spike_history.len() as f64 / 1000.0; // spikes per ms
685        let alpha = 1.0 / self.homeostatic_tau;
686        self.actual_firing_rate =
687            (1.0 - alpha) * self.actual_firing_rate + alpha * instantaneous_rate;
688    }
689
690    /// Update homeostatic mechanisms
691    pub fn update_homeostatic_mechanisms(&mut self, dt: f64) {
692        // Intrinsic plasticity: adjust excitability to maintain target firing rate
693        let rate_error = self.target_firing_rate - self.actual_firing_rate;
694        let excitability_update = 0.001 * rate_error * dt;
695        self.intrinsic_excitability += excitability_update;
696        self.intrinsic_excitability = self.intrinsic_excitability.clamp(0.1, 10.0);
697
698        // Synaptic scaling: global scaling of all synapses
699        let scaling_rate = 0.0001;
700        let scaling_update = scaling_rate * rate_error * dt;
701        self.synaptic_scaling += scaling_update;
702        self.synaptic_scaling = self.synaptic_scaling.clamp(0.1, 10.0);
703    }
704
705    /// Get homeostatic factor for learning modulation
706    pub fn get_homeostatic_factor(&self) -> f64 {
707        // Higher factor when firing rate is below target (need to strengthen synapses)
708        let rate_ratio = self.actual_firing_rate / self.target_firing_rate.max(1e-6);
709        (2.0 / (1.0 + rate_ratio)).clamp(0.1, 10.0)
710    }
711}
712
713/// Learning rate adaptation mechanisms
714///
715/// Adapts the learning rate based on performance history to optimize
716/// learning dynamics during training.
717#[derive(Debug, Clone)]
718pub struct LearningRateAdaptation {
719    /// Base learning rate
720    pub base_rate: f64,
721    /// Adaptation factor
722    pub adaptation_factor: f64,
723    /// Performance history
724    pub performance_history: VecDeque<f64>,
725    /// Adaptation threshold
726    pub adaptation_threshold: f64,
727    /// Maximum learning rate
728    pub max_rate: f64,
729    /// Minimum learning rate
730    pub min_rate: f64,
731}
732
733impl LearningRateAdaptation {
734    /// Create new learning rate adaptation
735    pub fn new(base_rate: f64) -> Self {
736        Self {
737            base_rate,
738            adaptation_factor: 0.1,
739            performance_history: VecDeque::new(),
740            adaptation_threshold: 0.1,
741            max_rate: 0.1,
742            min_rate: 1e-6,
743        }
744    }
745
746    /// Update learning rate based on performance
747    pub fn update_learning_rate(&mut self, current_error: f64) {
748        self.performance_history.push_back(current_error);
749
750        // Keep only recent errors
751        if self.performance_history.len() > 100 {
752            self.performance_history.pop_front();
753        }
754
755        // Adapt learning rate based on error trend
756        if self.performance_history.len() >= 2 {
757            let recent_avg = self.performance_history.iter().rev().take(10).sum::<f64>() / 10.0;
758            let older_avg = self
759                .performance_history
760                .iter()
761                .rev()
762                .skip(10)
763                .take(10)
764                .sum::<f64>()
765                / 10.0;
766
767            let performance_ratio = if older_avg > 0.0 {
768                recent_avg / older_avg
769            } else {
770                1.0
771            };
772
773            // Adapt learning rate
774            if performance_ratio < 0.95 {
775                // Performance improving - increase learning rate slightly
776                self.base_rate *= 1.01;
777            } else if performance_ratio > 1.05 {
778                // Performance degrading - decrease learning rate
779                self.base_rate *= 0.99;
780            }
781
782            // Apply bounds
783            self.base_rate = self.base_rate.max(self.min_rate).min(self.max_rate);
784        }
785    }
786}
787
788/// Metaplasticity controller for flexible learning
789///
790/// Controls learning plasticity based on activity history to implement
791/// metaplasticity - plasticity of plasticity itself.
792#[derive(Debug, Clone)]
793pub struct MetaplasticityController {
794    /// Metaplastic variables for each synapse
795    metaplastic_variables: Array2<f64>,
796    /// Metaplastic time constant
797    meta_tau: f64,
798    /// Plasticity threshold
799    #[allow(dead_code)]
800    plasticity_threshold: f64,
801    /// LTP/LTD balance factor
802    #[allow(dead_code)]
803    ltp_ltd_balance: f64,
804    /// Activity-dependent scaling
805    #[allow(dead_code)]
806    activity_scaling: f64,
807}
808
809impl MetaplasticityController {
810    /// Create new metaplasticity controller
811    pub fn new(num_clusters: usize, input_dim: usize) -> Self {
812        Self {
813            metaplastic_variables: Array2::ones((num_clusters, input_dim)),
814            meta_tau: 10000.0, // 10 seconds
815            plasticity_threshold: 0.5,
816            ltp_ltd_balance: 1.0,
817            activity_scaling: 1.0,
818        }
819    }
820
821    /// Compute metaplastic modulation
822    pub fn compute_modulation(&self, neuron_idx: usize, error: f64) -> f64 {
823        let meta_var_avg = self
824            .metaplastic_variables
825            .row(neuron_idx)
826            .mean()
827            .unwrap_or(1.0);
828
829        // Higher metaplastic variable means lower plasticity (harder to change)
830        let modulation = 1.0 / (1.0 + meta_var_avg);
831
832        // Scale by error magnitude
833        modulation * (1.0 + error.abs()).ln()
834    }
835
836    /// Update metaplastic variables
837    pub fn update_metaplastic_variables(
838        &mut self,
839        winner_idx: usize,
840        activities: &Array1<f64>,
841        _current_time: f64,
842    ) {
843        let dt = 1.0; // Assume 1ms updates
844        let decay_factor = (-dt / self.meta_tau).exp();
845
846        // Update metaplastic variables for winner
847        for meta_var in self.metaplastic_variables.row_mut(winner_idx).iter_mut() {
848            *meta_var = *meta_var * decay_factor + (1.0 - decay_factor) * activities[winner_idx];
849        }
850    }
851}
852
853/// Multi-timescale adaptation for different learning phases
854///
855/// Implements adaptation mechanisms operating at different timescales
856/// from fast (milliseconds) to slow (minutes/hours) dynamics.
857#[derive(Debug, Clone)]
858pub struct MultiTimescaleAdaptation {
859    /// Fast adaptation (seconds to minutes)
860    fast_adaptation: AdaptationScale,
861    /// Medium adaptation (minutes to hours)
862    medium_adaptation: AdaptationScale,
863    /// Slow adaptation (hours to days)
864    slow_adaptation: AdaptationScale,
865    /// Current timescale weights
866    timescale_weights: Array1<f64>,
867}
868
869impl Default for MultiTimescaleAdaptation {
870    fn default() -> Self {
871        Self::new()
872    }
873}
874
875impl MultiTimescaleAdaptation {
876    /// Create new multi-timescale adaptation
877    pub fn new() -> Self {
878        Self {
879            fast_adaptation: AdaptationScale::new(1.0, 1.0), // 1ms timescale
880            medium_adaptation: AdaptationScale::new(1000.0, 0.5), // 1s timescale
881            slow_adaptation: AdaptationScale::new(60000.0, 0.1), // 1min timescale
882            timescale_weights: Array1::from(vec![0.5, 0.3, 0.2]),
883        }
884    }
885
886    /// Update all adaptation scales
887    pub fn update(&mut self, error: f64, dt: f64) {
888        self.fast_adaptation.update(error, dt);
889        self.medium_adaptation.update(error, dt);
890        self.slow_adaptation.update(error, dt);
891    }
892
893    /// Get combined adaptation factor
894    pub fn get_adaptation_factor(&self) -> f64 {
895        let fast_factor = self.fast_adaptation.memory_trace;
896        let medium_factor = self.medium_adaptation.memory_trace;
897        let slow_factor = self.slow_adaptation.memory_trace;
898
899        self.timescale_weights[0] * fast_factor
900            + self.timescale_weights[1] * medium_factor
901            + self.timescale_weights[2] * slow_factor
902    }
903}
904
905/// Individual adaptation scale
906///
907/// Represents adaptation operating at a specific timescale with
908/// exponential memory traces and decay dynamics.
909#[derive(Debug, Clone)]
910pub struct AdaptationScale {
911    /// Time constant for this scale
912    time_constant: f64,
913    /// Adaptation strength
914    #[allow(dead_code)]
915    adaptation_strength: f64,
916    /// Memory trace
917    pub memory_trace: f64,
918    /// Decay factor
919    #[allow(dead_code)]
920    decay_factor: f64,
921}
922
923impl AdaptationScale {
924    /// Create new adaptation scale
925    pub fn new(time_constant: f64, adaptation_strength: f64) -> Self {
926        Self {
927            time_constant,
928            adaptation_strength,
929            memory_trace: 1.0,
930            decay_factor: 0.999,
931        }
932    }
933
934    /// Update this adaptation scale
935    pub fn update(&mut self, error: f64, dt: f64) {
936        let decay = (-dt / self.time_constant).exp();
937        self.memory_trace = self.memory_trace * decay + (1.0 - decay) * (1.0 - error);
938        self.memory_trace = self.memory_trace.clamp(0.0, 2.0);
939    }
940}
941
942#[cfg(test)]
943mod tests {
944    use super::*;
945    use scirs2_core::ndarray::Array2;
946
947    #[test]
948    fn test_competitive_clusterer_creation() {
949        let clusterer = CompetitiveNeuralClusterer::new(3, 2);
950        assert_eq!(clusterer.num_clusters(), 3);
951        assert_eq!(clusterer.learning_rates().len(), 3);
952        assert_eq!(clusterer.inhibition_strengths().dim(), (3, 3));
953    }
954
955    #[test]
956    fn test_competitive_clustering() {
957        let points =
958            Array2::from_shape_vec((4, 2), vec![0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]).unwrap();
959
960        let mut clusterer = CompetitiveNeuralClusterer::new(2, 2);
961        let result = clusterer.fit(&points.view(), 10);
962        assert!(result.is_ok());
963
964        let assignments = result.unwrap();
965        assert_eq!(assignments.len(), 4);
966
967        let centers = clusterer.get_cluster_centers();
968        assert_eq!(centers.dim(), (2, 2));
969    }
970
971    #[test]
972    fn test_homeostatic_clusterer_creation() {
973        let clusterer = HomeostaticNeuralClusterer::new(3, 2);
974        assert_eq!(clusterer.num_clusters(), 3);
975        assert_eq!(clusterer.neuron_firing_rates().len(), 3);
976    }
977
978    #[test]
979    fn test_homeostatic_clustering() {
980        let points =
981            Array2::from_shape_vec((4, 2), vec![0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]).unwrap();
982
983        let mut clusterer =
984            HomeostaticNeuralClusterer::new(2, 2).with_homeostatic_params(0.1, 100.0);
985
986        let result = clusterer.fit(&points.view(), 10);
987        assert!(result.is_ok());
988
989        let assignments = result.unwrap();
990        assert_eq!(assignments.len(), 4);
991
992        let centers = clusterer.get_cluster_centers();
993        assert_eq!(centers.dim(), (2, 2));
994    }
995
996    #[test]
997    fn test_homeostatic_neuron() {
998        let mut neuron = HomeostaticNeuron::new();
999
1000        // Test membrane potential update
1001        neuron.update_membrane_potential(1.0, 10.0);
1002        assert!(neuron.membrane_potential > 0.0);
1003
1004        // Test spike recording
1005        neuron.record_spike(10.0);
1006        assert_eq!(neuron.spike_history.len(), 1);
1007
1008        // Test homeostatic mechanisms
1009        neuron.update_homeostatic_mechanisms(1.0);
1010        assert!(neuron.intrinsic_excitability > 0.0);
1011        assert!(neuron.synaptic_scaling > 0.0);
1012    }
1013
1014    #[test]
1015    fn test_learning_rate_adaptation() {
1016        let mut adaptation = LearningRateAdaptation::new(0.01);
1017        let initial_rate = adaptation.base_rate;
1018
1019        // Simulate improving performance
1020        adaptation.update_learning_rate(1.0);
1021        adaptation.update_learning_rate(0.8);
1022        adaptation.update_learning_rate(0.6);
1023
1024        // Rate should still be within bounds
1025        assert!(adaptation.base_rate >= adaptation.min_rate);
1026        assert!(adaptation.base_rate <= adaptation.max_rate);
1027    }
1028
1029    #[test]
1030    fn test_metaplasticity_controller() {
1031        let controller = MetaplasticityController::new(2, 3);
1032
1033        let modulation = controller.compute_modulation(0, 0.5);
1034        assert!(modulation > 0.0);
1035        assert!(modulation.is_finite());
1036    }
1037
1038    #[test]
1039    fn test_multi_timescale_adaptation() {
1040        let mut adaptation = MultiTimescaleAdaptation::new();
1041
1042        // Test update
1043        adaptation.update(0.5, 1.0);
1044
1045        let factor = adaptation.get_adaptation_factor();
1046        assert!(factor > 0.0);
1047        assert!(factor.is_finite());
1048    }
1049
1050    #[test]
1051    fn test_adaptation_scale() {
1052        let mut scale = AdaptationScale::new(10.0, 0.5);
1053        let initial_trace = scale.memory_trace;
1054
1055        scale.update(0.2, 1.0);
1056
1057        // Memory trace should have changed
1058        assert_ne!(scale.memory_trace, initial_trace);
1059        assert!(scale.memory_trace >= 0.0);
1060        assert!(scale.memory_trace <= 2.0);
1061    }
1062
1063    #[test]
1064    fn test_empty_input() {
1065        let points = Array2::zeros((0, 2));
1066
1067        let mut competitive = CompetitiveNeuralClusterer::new(2, 2);
1068        let result = competitive.fit(&points.view(), 10);
1069        assert!(result.is_ok());
1070        assert_eq!(result.unwrap().len(), 0);
1071
1072        let mut homeostatic = HomeostaticNeuralClusterer::new(2, 2);
1073        let result = homeostatic.fit(&points.view(), 10);
1074        assert!(result.is_ok());
1075        assert_eq!(result.unwrap().len(), 0);
1076    }
1077
1078    #[test]
1079    fn test_dimension_mismatch() {
1080        let points = Array2::zeros((4, 3)); // Wrong dimension
1081        let mut clusterer = HomeostaticNeuralClusterer::new(2, 2);
1082
1083        let result = clusterer.fit(&points.view(), 10);
1084        assert!(result.is_err());
1085    }
1086
1087    #[test]
1088    fn test_competitive_reset() {
1089        let mut clusterer = CompetitiveNeuralClusterer::new(2, 2);
1090
1091        // Modify some parameters
1092        clusterer.learning_rates[0] = 0.5;
1093
1094        // Reset should restore initial state
1095        clusterer.reset();
1096        assert_eq!(clusterer.learning_rates[0], 0.1);
1097    }
1098}