evo_rl/
neuron.rs

1//! Module for the `neuron` struct which defines the individual unit of computation for the neural
2//! network. 
3
4use log::*;
5use nalgebra as na;
6use std::sync::Arc;
7use crate::enecode::{NeuronalEneCode, NeuronType};
8use na::DVector;
9use rand::prelude::*;
10use rand::Rng;
11use rand_distr::{Distribution, Normal};
12use crate::{relu, sigmoid};
13
14//// `Nn` is a struct that defines an Artificial Neuron.
15/// 
16/// # Fields
17/// * `synaptic_weights` - A vector of synaptic weights.
18/// * `bias` - A synaptic bias for the neuron.
19/// * `inputs` - A list of input neurons.
20/// * `activation_level` - The neuron's activation level.
21/// * `tau` - The neuron's time constant.
22/// * `learning_threshold` - The neuron's learning threshold.
23/// * `learning_rate` - The neuron's learning rate.
24/// * `tanh_alpha` - The neuron's hyperbolic tangent alpha parameter.
25/// * `neuron_type` - The type of the neuron: Input, Output, or Hidden.
26///
27/// # Example
28/// ```rust
29/// // code example here
30#[derive(Debug, Clone)]
31pub struct Nn {
32    pub id: Arc<str>,
33    pub synaptic_weights: DVector<f32>,
34    pub bias: f32,
35    pub inputs: Vec<String>,
36    pub activation_level: f32,
37    pub tau: f32,
38    pub learning_threshold: f32,
39    pub learning_rate: f32,
40    pub tanh_alpha: f32,
41    pub neuron_type: NeuronType,
42}
43
44impl From<Arc<NeuronalEneCode<'_>>> for Nn {
45    fn from(ene: Arc<NeuronalEneCode>) -> Self {
46        let mut inputs_as_list: Vec<String> = Vec::new();
47        let mut weights_as_list: Vec<f32> = Vec::new();
48
49        for input_id in ene.topology.inputs.keys() {
50            inputs_as_list.push(input_id.clone());
51            weights_as_list.push(ene.topology.inputs[input_id]);
52        }
53
54        Nn {
55            id: ene.neuron_id.clone(),
56            inputs: inputs_as_list,
57            synaptic_weights: DVector::from_vec(weights_as_list), 
58            bias: ene.topology.genetic_bias,
59            activation_level: 0., 
60            tau: ene.properties.tau,
61            learning_rate: ene.meta.learning_rate,
62            learning_threshold: ene.meta.learning_threshold,
63            tanh_alpha: ene.properties.tanh_alpha,
64            neuron_type: ene.topology.pin.clone(),
65        }
66    }
67}
68
69impl Nn{
70
71    /// Propagates the input through the neuron to compute the next state.
72    ///
73    /// # Arguments
74    /// * `input` -f32
75    pub fn propagate(&mut self, input: f32) {
76        match self.neuron_type {
77            NeuronType::In => {
78                self.set_value(input);
79            },
80            _ => self.fwd(input),
81        }
82    }
83
84    /// Returns the output value of the neuron.
85    ///
86    /// # Returns
87    /// The output value as a floating-point number.
88    pub fn output_value(&self) -> f32 {
89        match self.neuron_type {
90            NeuronType::In => self.activation_level,
91            _ => self.nonlinearity(&self.activation_level)
92
93        }
94    }
95
96    /// Performs mutation on the neuron
97    ///
98    /// # Arguments
99    /// * `rng` - thread_rng
100    /// * `epsilon` - mutation rate
101    /// * `sd` - the standard deviation of a normal distribution used to sample changes
102    pub fn mutate<R: Rng>(&mut self, rng: &mut R, epsilon: f32, sd: f32) {
103        //bias mutation
104        let normal = Normal::new(0., sd).unwrap();
105        if rng.gen::<f32>() < epsilon {
106            let updated_bias = self.bias + normal.sample(rng);
107            self.bias = updated_bias;
108        }
109    }
110
111    fn set_value(&mut self, in_value: f32) {
112        self.activation_level = in_value;
113        debug!("Setting neuron {} to activation level of {}", self.id, self.activation_level);
114    }
115
116    fn fwd(&mut self, impulse: f32) {
117        self.activation_level = self.activation_level - self.activation_level*(-self.tau).exp();
118        self.activation_level += impulse + self.bias;
119        //self.learn?
120        debug!("Activation level for neuron {} set at {} after impulse {}", self.id, self.activation_level, impulse);
121    }
122
123    fn nonlinearity(&self, z: &f32) -> f32 {
124        // Use relu on hidden layers, tanh on output
125        match self.neuron_type {
126         NeuronType::Hidden => relu(z),
127         _ => (z * self.tanh_alpha).tanh()
128        }
129    }
130
131
132    fn learn(&self, syn_weight_current: f32) -> f32 {
133        //Calculates a delta to change the current synapse
134        if self.activation_level > self.learning_threshold {
135            syn_weight_current * self.learning_rate // - self.activation_level * self.homeostatic_force
136        } else { 0.} 
137    }
138
139}
140
141#[cfg(test)]
142mod tests {
143    use crate::enecode::*;
144    use crate::doctest::{TOPOLOGY_GENE_EXAMPLE, META_GENE_EXAMPLE, NEURONAL_PROPERTIES_GENE_EXAMPLE};
145    use super::*;
146
147    #[test]
148    fn test_propagate_euron() {
149        // Create a NeuronalEneCode and use it to initialize an Nn with NeuronType::Hidden
150        //
151        let nec = NeuronalEneCode {
152            neuron_id: "h01".into(),
153            topology: &TOPOLOGY_GENE_EXAMPLE,
154            properties: &NEURONAL_PROPERTIES_GENE_EXAMPLE,
155            meta: &META_GENE_EXAMPLE,
156        };
157
158        let mut neuron = Nn::from(Arc::new(nec));
159        neuron.propagate(12_f32);
160
161        assert_eq!(neuron.activation_level, 17.);
162
163    }
164
165    #[test]
166    fn test_output_value() {
167        let nec = NeuronalEneCode {
168            neuron_id: "h01".into(),
169            topology: &TOPOLOGY_GENE_EXAMPLE,
170            properties: &NEURONAL_PROPERTIES_GENE_EXAMPLE,
171            meta: &META_GENE_EXAMPLE,
172        };
173
174        let mut neuron = Nn::from(Arc::new(nec));
175        neuron.propagate(12_f32);
176
177        assert_eq!(neuron.activation_level, 17.);
178        assert_eq!(neuron.output_value(), relu(&17.));
179
180        //multiple runs of the same neuron with 0 tau should produce the same value
181        //in the absence of synaptic learning
182
183        neuron.propagate(12_f32);
184        assert_eq!(neuron.activation_level, 17.);
185        assert_eq!(neuron.output_value(), relu(&17.));
186    }
187
188    #[test]
189    fn test_mutate() {
190        let nec = NeuronalEneCode {
191            neuron_id: "h01".into(),
192            topology: &TOPOLOGY_GENE_EXAMPLE,
193            properties: &NEURONAL_PROPERTIES_GENE_EXAMPLE,
194            meta: &META_GENE_EXAMPLE,
195        };
196
197        let mut neuron = Nn::from(Arc::new(nec));
198
199        let seed = [17; 32]; // Fixed seed for determinism
200        let mut rng = StdRng::from_seed(seed);
201        neuron.mutate(&mut rng, 1., 0.1);
202
203        assert_ne!(neuron.bias, 5.);
204    }
205}
206