minet_ai/
lib.rs

1// MiNET AI - Miniaturized Neural Evolutionary Topology
2// Version 0.1.0
3use std::usize;
4
5mod minet_display;
6pub use minet_display::*;
7
8mod minet_encoding;
9pub use minet_encoding::*;
10
11mod minet_activation;
12use minet_activation::*;
13
14use rand::{seq::SliceRandom, thread_rng, Rng};
15use rand_distr::{Distribution, Normal};
16
17#[derive(Clone)]
18pub struct minet {
19    pub genes: Vec<(f32, Vec<(usize, f32)>)>,
20    pub input: usize,
21    pub hidden: usize,
22    pub output: usize,
23    pub fitness: f32,
24}
25
26const INITIAL_WEIGHT_STD_DEVIATION: f32 = 0.5;
27const WEIGHT_STD_DEVIATION: f32 = 0.1;
28const BIAS_STD_DEVIATION: f32 = 0.016;
29const SYNAPSE_PROBABILITY: f64 = 0.4;
30
31impl minet {
32    pub fn new(input: usize, hidden: usize, output: usize) -> Self {
33        let total_neurons = input + hidden + output;
34        let genes = vec![(0.0, Vec::new()); total_neurons];
35
36        let mut minet = minet {
37            genes,
38            input,
39            hidden,
40            output,
41            fitness: 0.0,
42        };
43    
44        
45        for i in input..total_neurons {
46            minet.connect_random_from(i);
47            minet.connect_random_from(i);
48            minet.connect_random_from(i);
49        }
50        
51        minet
52    }
53    
54    /// Initializes the population with random neural networks.
55    pub fn initialize_population(pop_size: usize, inputs: usize, hidden: usize, outputs: usize) -> Vec<minet> {
56        (0..pop_size).map(|_| minet::new(inputs, hidden, outputs)).collect()
57    }
58
59    fn mutate(&mut self) {
60        self.mutate_weights();
61        self.mutate_bias();
62
63        let mut rng = rand::thread_rng();
64        if rng.gen_bool(SYNAPSE_PROBABILITY) {
65            self.synapse_swap();
66        }
67    }
68
69    fn synapse_swap(&mut self) {
70        self.synapse_remove_smallest();
71        self.synapse_connect_random();
72    }
73
74    pub fn crossbreed(&self, other: &Self) -> Self {
75        let mut new_genes: Vec<(f32, Vec<(usize, f32)>)> = Vec::new();
76        let parent1 = self.genes.clone();
77        let parent2 = other.genes.clone();
78        let mut rng = rand::thread_rng();
79        for i in 0..parent1.len() {
80            let gene1 = parent1[i].clone();
81            let gene2 = parent2[i].clone();
82            let new_gene = if rng.gen_bool(0.5) { gene1 } else { gene2 };
83            new_genes.push(new_gene);
84        }
85        let mut child = minet {
86            genes: new_genes,
87            input: self.input,
88            hidden: self.hidden,
89            output: self.output,
90            fitness: 0.0,
91        };
92        child.mutate();
93        child
94    }
95
96    fn mutate_weights(&mut self) {
97        for gene in self.genes.iter_mut() {
98            for synapse in gene.1.iter_mut() {
99                synapse.1 += sample_normal(WEIGHT_STD_DEVIATION);
100            }
101        }
102    }
103
104    fn mutate_bias(&mut self) {
105        for gene in self.genes.iter_mut() {
106            gene.0 += sample_normal(BIAS_STD_DEVIATION);
107        }
108    }
109
110    pub fn forward(&self, inputs: Vec<f32>) -> Vec<f32> {
111        let input_neurons = self.input;
112        let hidden_neurons = self.hidden;
113        let output_neurons = self.output;
114        let length = self.genes.len();
115
116        let mut activation_map = vec![0.0; length];
117
118        // Set input activations and propagate forward
119        for i in 0..input_neurons {
120            activation_map[i] = inputs[i];
121            for &(target_idx, weight) in &self.genes[i].1 {
122                activation_map[target_idx as usize] += inputs[i] * weight;
123            }
124        }
125
126        // Process hidden layer
127        for i in input_neurons..(input_neurons + hidden_neurons) {
128            // Add bias
129            activation_map[i] += self.genes[i].0;
130            // Apply ReLU (assume relu(x) = max(0,x))
131            activation_map[i] = tanh(activation_map[i]);
132
133            // Propagate hidden activations forward
134            for &(target_idx, weight) in &self.genes[i].1 {
135                activation_map[target_idx as usize] += activation_map[i] * weight;
136            }
137        }
138
139        // Process output layer
140        let mut outputs = Vec::with_capacity(output_neurons);
141        for i in (input_neurons + hidden_neurons)..(input_neurons + hidden_neurons + output_neurons)
142        {
143            // Add bias
144            activation_map[i] += self.genes[i].0;
145            // Apply Sigmoid
146            activation_map[i] = sigmoid(activation_map[i]);
147            outputs.push(activation_map[i]);
148        }
149        outputs
150    }
151    
152    pub fn forward_display(&self, inputs: Vec<f32>) -> Vec<f32> {
153        let input_neurons = self.input;
154        let hidden_neurons = self.hidden;
155        let output_neurons = self.output;
156        let length = self.genes.len();
157
158        let mut activation_map = vec![0.0; length];
159
160        // Set input activations and propagate forward
161        for i in 0..input_neurons {
162            activation_map[i] = inputs[i];
163            for &(target_idx, weight) in &self.genes[i].1 {
164                activation_map[target_idx as usize] += inputs[i] * weight;
165            }
166        }
167
168        // Process hidden layer
169        for i in input_neurons..(input_neurons + hidden_neurons) {
170            // Add bias
171            activation_map[i] += self.genes[i].0;
172            // Apply ReLU (assume relu(x) = max(0,x))
173            activation_map[i] = tanh(activation_map[i]);
174
175            // Propagate hidden activations forward
176            for &(target_idx, weight) in &self.genes[i].1 {
177                activation_map[target_idx as usize] += activation_map[i] * weight;
178            }
179        }
180
181        // Process output layer
182        let mut outputs = Vec::with_capacity(output_neurons);
183        for i in (input_neurons + hidden_neurons)..(input_neurons + hidden_neurons + output_neurons)
184        {
185            // Add bias
186            activation_map[i] += self.genes[i].0;
187            // Apply Sigmoid
188            activation_map[i] = sigmoid(activation_map[i]);
189            outputs.push(activation_map[i]);
190        }
191        activation_map
192    }
193    
194    /// Removes a random synapse from the genome, if any synapses exist
195    fn synapse_remove_random(&mut self) {
196        let mut rng = rand::thread_rng();
197        let connected_neurons: Vec<(usize, usize)> = self
198            .genes
199            .iter()
200            .enumerate()
201            .flat_map(|(i, gene)| gene.1.iter().enumerate().map(move |(j, _)| (i, j)))
202            .collect();
203
204        if let Some(&(neuron, synapse)) = connected_neurons.choose(&mut rng) {
205            self.genes[neuron].1.remove(synapse);
206        } else {
207            println!("No synapses to remove.");
208        }
209    }
210    
211    pub fn synapse_remove_smallest(
212        &mut self,
213    ) {
214        let mut min_abs_weight = f32::MAX;
215        let mut min_synapse_location: (usize, usize) = (usize::MAX, usize::MAX);
216        
217        // Find the (gene_idx, synapse_idx) of the smallest |weight|
218        for (gene_idx, gene) in self.genes.iter().enumerate() {
219            for (synapse_idx, &(_, weight)) in gene.1.iter().enumerate() {
220                let abs_weight = weight.abs();
221                if abs_weight < min_abs_weight {
222                    min_abs_weight = abs_weight;
223                    min_synapse_location = (gene_idx, synapse_idx);
224                }
225            }
226        }
227            
228        // If we found a valid synapse, remove it
229        if min_synapse_location.0 != usize::MAX {
230            let (g_idx, s_idx) = min_synapse_location;
231            self.genes[g_idx].1.remove(s_idx);
232        }
233    }
234    
235    /// Connects two random, unconnected neurons in the forward direction. 
236    fn synapse_connect_random(&mut self) {
237        let mut rng = rand::thread_rng();
238        let non_output = self.input + self.hidden;
239
240        loop {
241            let source = rng.gen_range(0..non_output);
242            let target_candidates = self.synapse_candidates(source);
243            if let Some(&target) = target_candidates.choose(&mut rng) {
244                let weight = sample_normal(INITIAL_WEIGHT_STD_DEVIATION);
245                self.genes[source].1.push((target, weight));
246                break;
247            }
248        }
249    }
250    
251    /// Connects a random neuron from an index lower than the to_index. 
252    /// ie from an output neuron to a hidden or input neuron in the forward direction
253    fn connect_random_from(&mut self, to_index: usize) -> usize {
254        let mut rng = rand::thread_rng();
255        
256        let non_output = self.input + self.hidden;
257        
258        let from_index = rng.gen_range(0..(to_index.clamp(0, non_output)));
259        
260        let synapse_candidates = self.synapse_candidates(from_index);
261        
262        for i in 0..synapse_candidates.len() {
263            let target = synapse_candidates[i];
264            if target == to_index {
265                let weight = sample_normal(INITIAL_WEIGHT_STD_DEVIATION);
266                self.genes[from_index].1.push((to_index, weight));
267            }
268        }
269        from_index
270    }
271    
272    /// Generates candidates for synapse connections with the given criteria
273    /// 1. If source is a output neuron, it returns an empty vec
274    /// 2. Target index candidates must be larger than source index
275    /// 3. Candidates must not already be connected
276    /// 4. If the source is a input neuron, all candidates must be hidden or output neurons
277    fn synapse_candidates(&self, source: usize) -> Vec<usize> {
278        if source >= self.genes.len() - self.output {
279            return Vec::new();
280        }
281
282        self.genes
283            .iter()
284            .enumerate()
285            .skip(source + 1)
286            .filter(|&(i, _)| i >= self.input && !self.synapse_is_connected(source, i))
287            .map(|(i, _)| i)
288            .collect()
289    }
290    
291    /// Returns true if the two selected neurons are connected by synapse
292    fn synapse_is_connected(&self, source: usize, target: usize) -> bool {
293        self.genes[source].1.iter().any(|&(t, _)| t == target)
294    }
295
296    /// Returns count of synapses in the genome
297    pub fn synapse_count(&self) -> usize {
298        self.genes.iter().map(|gene| gene.1.len()).sum()
299    }
300    
301    /// Takes the best % of the population 
302    /// Randomly crossbreeds them (asexual reproduction is possible)
303    /// Returns a new population of the children of the survivors, with mutations
304    pub fn crossbreed_population(
305        mut population: Vec<minet>,
306        survival_rate: f32,
307        target_population: usize, 
308    ) -> Vec<minet> {
309        let population_size = population.len();
310        let surviving_count = (population_size as f32 * survival_rate).round() as usize;
311    
312        let new_target = target_population - surviving_count;
313        
314        // Take the best (survival_rate * population) of the population by fitness
315        population.sort_by(|a, b| b.fitness.partial_cmp(&a.fitness).unwrap());
316        population.truncate(surviving_count);
317        
318        for i in 0..population.len() {
319            population[i].fitness = 0.0;
320        }
321        
322        for _ in 0..new_target{
323            let parent1 = population.choose(&mut thread_rng()).unwrap();
324            let parent2 = population.choose(&mut thread_rng()).unwrap();
325            let mut child = parent1.crossbreed(parent2);
326            population.push(child);
327        }
328        population
329    }
330}
331
332fn sample_normal(std_dev: f32) -> f32 {
333    let normal = Normal::new(0.0, std_dev).expect("Invalid parameters for Normal distribution");
334    let mut rng = thread_rng();
335    normal.sample(&mut rng)
336}