evo_rl/
graph.rs

1//! This module implements neural networks in the form of directed graphs (from `petgraph`).
2//! Evolutionary changes are projected onto the graph first before being encoded genetically.
3
4use log::*;
5use crate::{increment_innovation_number, progenitor_code};
6use crate::neuron::Nn;
7use crate::enecode::{EneCode, NeuronalEneCode, NeuronType};
8use rand::prelude::*;
9use rand_distr::{Distribution, Normal};
10use std::collections::HashMap;
11use std::fs::File;
12use std::path::Path;
13use std::io::Write;
14use std::sync::Arc;
15use petgraph::graph::{DiGraph, NodeIndex, EdgeReference};
16use petgraph::dot::{Dot, Config};
17use petgraph::visit::Bfs;
18use nalgebra::DVector;
19use thiserror::Error;
20use pyo3::prelude::*;
21use pyo3::types::{PyDict, IntoPyDict};
22
23/// `NeuralNetwork` is a struct that represents a directed graph
24/// based feed-forward neural network, initialized from an `EneCode` genome.
25///
26/// The struct encapsulates the genome (genetic blueprint), graph-based network,
27/// node-identity mapping, and network output.
28///
29/// # Fields
30/// * `genome` - The genetic blueprint (`EneCode`) of the neural network.
31/// * `graph` - The directed graph (`DiGraph`) from `petgraph` that represents the network.
32/// * `node_identity_map` - A HashMap mapping each neuron ID (`String`) to its index (`NodeIndex`) in the graph.
33/// * `network_output` - A vector holding the output values of the output neurons.
34///
35/// # Example Usage
36/// ```rust
37/// # use evo_rl::doctest::GENOME_EXAMPLE;
38/// use evo_rl::graph::NeuralNetwork;
39/// use evo_rl::enecode::EneCode;
40///
41/// // Assume genome is a properly initialized EneCode
42/// # let genome = GENOME_EXAMPLE.clone();
43/// let mut network = NeuralNetwork::new(genome);
44///
45/// // Assume input is a properly initialized Vec<f32>
46/// # let input: Vec<f32> = vec![0.];
47/// network.fwd(input);
48///
49/// let output = network.fetch_network_output();
50/// ```
51#[derive(Debug, Clone)]
52pub struct NeuralNetwork {
53    pub genome: EneCode,
54    pub graph: DiGraph<Nn, f32>,
55    pub node_identity_map: HashMap<String, NodeIndex>,
56    network_output: Vec<f32>,
57}
58
59impl ToPyObject for NeuralNetwork {
60    fn to_object(&self, py: Python<'_>) -> PyObject {
61        let dict = PyDict::new(py);
62
63        let mut python_friendly_map: HashMap<&str, usize> = HashMap::new();
64
65        for key in self.node_identity_map.keys() {
66            python_friendly_map.insert(&key, self.node_identity_map[key].index().into());
67        }
68
69        let _ = dict.set_item("genome", &self.genome);
70        //TODO: Enable NeuralNetwork for #[pyclass]
71        //dict.set_item("graph", self.graph);
72        let _ = dict.set_item("network_output", &self.network_output);
73        let _ = dict.set_item("node_identity_map", python_friendly_map.into_py_dict(py));
74
75        dict.into()
76    }
77}
78
79impl NeuralNetwork {
80
81    /// Create a new `NeuralNetwork` from an `EneCode` genome.
82    pub fn new(genome: EneCode) -> Self {
83        let mut nn = NeuralNetwork {
84            genome: genome.clone(),
85            graph: DiGraph::new(),
86            node_identity_map: HashMap::new(),
87            network_output: Vec::new(),
88        };
89        nn.initialize();
90        nn
91    }
92
93    /// Initialize the neural network graph from the genome.
94    /// Adds neurons as nodes and synaptic connections as edges.
95    fn initialize(&mut self) {
96
97        //Add all neuron nodes
98        for neuron_id in &self.genome.neuron_id[..] {
99            let nge = NeuronalEneCode::new_from_enecode(neuron_id.as_str(), &self.genome);
100            let arc_nge = Arc::new(nge);
101            let neuron = Nn::from(arc_nge.clone());
102            let node = self.graph.add_node(neuron);
103            self.node_identity_map.entry(neuron_id.clone()).or_insert(node);
104        }
105
106        //Build Edges
107        for daughter in &self.genome.neuron_id[..] {
108            let nge = NeuronalEneCode::new_from_enecode(daughter.as_str(), &self.genome);
109
110            // It's possible to recombine genes in such a way that a topology gene refers to a
111            //non-existent parent
112            for parent in nge.topology.inputs.keys() {
113                if self.node_identity_map.contains_key(parent) {
114                    self.graph.add_edge(self.node_identity_map[parent], 
115                                        self.node_identity_map[daughter], 
116                                        nge.topology.inputs[parent]);
117                } else {
118                    continue
119                };
120
121            }
122        }
123
124    }
125
126    /// Returns serialized representation of genome
127    pub fn serialize_genome(&self) -> String {
128        serde_json::to_string_pretty(&self.genome).unwrap()
129
130    }
131
132    /// Cross over recombination of genetic code
133    pub fn recombine_enecode<R: Rng>(&self, rng: &mut R, partner: &NeuralNetwork) -> Result<EneCode, GraphConstructionError> {
134        if let Ok(offspring_enecode) = self.genome.recombine(rng, &partner.genome) {
135            Ok(offspring_enecode)
136        } else {
137            Err(GraphConstructionError::EnecodeRecombinationError)
138        }
139    }
140    
141    /// Run mutation for this network
142    pub fn mutate(&mut self, mutation_rate: f32, mutation_sd: f32, topology_mutation_rate: f32) {
143        let mut rng = rand::thread_rng();
144        self.mutate_synapses(&mut rng, mutation_rate, mutation_sd);
145        self.mutate_nn(&mut rng, mutation_rate, mutation_sd);
146        self.mutate_topology(&mut rng, topology_mutation_rate);
147        let new_enecode = self.read_current_enecode();
148        self.update_genome(new_enecode);
149    }
150
151    // Gets copy of current genome prior to update
152    fn read_current_enecode(&self) -> EneCode {
153        EneCode::from(self)
154    }
155
156    // Updates genome with current weights and nn fields
157    fn update_genome(&mut self, updated_genome: EneCode) {
158        self.genome = updated_genome;
159    }
160
161    /// Mutates properties in the Nn struct
162    fn mutate_nn<R: Rng>(&mut self, rng: &mut R, mutation_rate: f32, sd: f32) {
163        for nn in self.node_identity_map.keys() {
164            let node = self.node_identity_map[nn];
165            self.graph[node].mutate(rng, mutation_rate, sd);
166        }
167    }
168
169    /// Mutates connections in the network given the current mutation rate
170    fn mutate_synapses<R: Rng>(&mut self, rng: &mut R, epsilon: f32, sd: f32) {
171        //synaptic mutation
172        let normal = Normal::new(0., sd).unwrap();
173        for edge_index in self.graph.edge_indices() {
174            if rng.gen::<f32>() < epsilon {
175                let new_weight: f32 = self.graph[edge_index] + normal.sample(rng);
176                self.graph[edge_index] = new_weight;
177            }
178
179        }
180    }
181
182    /// Mutates the topology of the network by either adding a new neuron or connection
183    fn mutate_topology<R: Rng>(&mut self, rng: &mut R, epsilon: f32) {
184
185        let input_units = self.fetch_neuron_list_by_type(NeuronType::In);
186
187        for inp in input_units {
188
189            if rng.gen::<f32>() < epsilon.powf(2.0) {
190                if let Some(neuron_id) = self.node_identity_map.keys().choose(rng) {
191                    let downstream_node = self.node_identity_map[neuron_id];
192                    self.add_new_edge(inp, downstream_node);
193                }
194                continue;
195            }
196
197            if rng.gen::<f32>() < epsilon.powf(2.0) {
198                if let Some(neuron_id) = self.node_identity_map.keys().choose(rng) {
199                    let downstream_node = self.node_identity_map[neuron_id];
200                    self.remove_edge(inp, downstream_node);
201                }
202                continue;
203            }
204        }
205
206        let hidden_units = self.fetch_neuron_list_by_type(NeuronType::Hidden);
207        let num_units = hidden_units.len();
208
209        for nn in hidden_units {
210            if rng.gen::<f32>() < epsilon.powf(2.0) {
211                self.duplicate_neuron(nn);
212                break;
213            }
214
215            //Don't remove progenitors!
216            if rng.gen::<f32>() < epsilon.powf(2.0) {
217                let progenitor_code = progenitor_code(&*self.graph[nn].id);
218
219                if &*self.graph[nn].id != progenitor_code {
220                    self.remove_neuron(nn);
221                }
222
223                break;
224            }
225
226            if rng.gen::<f32>() < epsilon.powf(2.0) {
227                if let Some(neuron_id) = self.node_identity_map.keys().choose(rng) {
228                    let downstream_node = self.node_identity_map[neuron_id];
229                    self.add_new_edge(nn, downstream_node);
230                }
231                continue;
232            }
233
234            if rng.gen::<f32>() < epsilon.powf(2.0) {
235                if let Some(neuron_id) = self.node_identity_map.keys().choose(rng) {
236                    let downstream_node = self.node_identity_map[neuron_id];
237                    self.remove_edge(nn, downstream_node);
238                }
239                continue;
240            }
241
242        }
243    }
244
245    /// adds an edge of zero weight between two neurons if none exists in either direction
246    fn add_new_edge(&mut self, n1_node: NodeIndex, n2_node: NodeIndex) {
247
248        //cannot connect to self
249        if n1_node == n2_node {return};
250
251        //don't add edges outgoing to inputs
252        match self.graph[n2_node].neuron_type {
253            NeuronType::In => return,
254            _ => {},
255        }
256
257        //first see if the opposite direction edge is present
258        match self.graph.find_edge(n2_node, n1_node) {
259            Some(_idx) => return,
260            None => {},
261        };
262        
263        //then add an edge in the right direction if non exists
264        match self.graph.find_edge(n1_node, n2_node) {
265            Some(_idx) => return,
266            None => self.graph.add_edge(n1_node, n2_node, 0.),
267        };
268    }
269
270    /// removes an edge between two neurons
271    fn remove_edge(&mut self, n1_node: NodeIndex, n2_node: NodeIndex) {
272        //cannot remove from self
273        if n1_node == n2_node {return};
274
275        //don't remove edges from inputs
276        match self.graph[n1_node].neuron_type {
277            NeuronType::In => return,
278            _ => {},
279        }
280
281        if let Some(idx) = self.graph.find_edge(n1_node, n2_node) {
282            match self.graph.remove_edge(idx) {
283                Some(_f) => {debug!("Removing edge between {} and {}", self.graph[n1_node].id, self.graph[n2_node].id);
284                            return
285                }
286                _ => {},
287            }
288        }
289        
290    }
291
292    /// duplicates neuron with given innovation number and adds it as a child 
293    fn duplicate_neuron(&mut self, neuron: NodeIndex) {
294        let node_children = self.graph.neighbors_directed(neuron, petgraph::Direction::Outgoing);
295
296        let mut node_children_id: Vec<&str> = Vec::new();
297        for cnode in node_children {
298            match self.graph[cnode].neuron_type {
299                NeuronType::Hidden => node_children_id.push(&self.graph[cnode].id),
300                _ => continue,
301            }
302        }
303
304        let mut neuron_daughter = self.graph[neuron].clone();
305        let daughter_innovation_number = increment_innovation_number(&self.graph[neuron].id, node_children_id);
306        let daughter_id_key = daughter_innovation_number.clone();
307
308        //initialize with zero weights and bias having a connection to parent
309        neuron_daughter.id = daughter_innovation_number;
310        neuron_daughter.inputs = vec![self.graph[neuron].id.to_string()];
311        neuron_daughter.bias = 0.;
312        neuron_daughter.synaptic_weights = DVector::from_vec(vec![0.]);
313
314        let daughter_node = self.graph.add_node(neuron_daughter);
315        self.node_identity_map.entry(daughter_id_key.to_string()).or_insert(daughter_node);
316
317        self.graph.add_edge(neuron, daughter_node, 0.);
318    }
319
320    /// remove a neuron from the graph and update node identity map
321    fn remove_neuron(&mut self, neuron: NodeIndex) {
322        let neuron_id = self.graph[neuron].id.to_string();
323        self.graph.remove_node(neuron);
324
325        let value = self.node_identity_map.remove(&neuron_id);
326
327        match value {
328            Some(idx) => debug!("Successfully removed node {} at index {:?}", neuron_id, idx),
329            None => error!("Node for {} was not there to be removed!", neuron_id),
330        }
331
332        for node_index in self.graph.node_indices() {
333            let node_entry = self.graph[node_index].id.to_string();
334            self.node_identity_map.insert(node_entry, node_index);
335        }
336    }
337    
338    //transfer ownership
339    pub fn transfer(self) -> Self {
340        self
341    }
342
343    /// Helper function to identify neurons of a paritcular type and returns them sorted by id
344    fn fetch_neuron_list_by_type(&self, neurontype: NeuronType) -> Vec<NodeIndex> {
345
346        let mut neuron_ids: Vec<String> = self.graph.node_indices().into_iter()
347            .filter(|&n| self.graph[n].neuron_type == neurontype)
348            .map(|n| self.graph[n].id.to_string())
349            .collect();
350
351        neuron_ids.sort();
352
353        neuron_ids.iter().map(|id| self.node_identity_map[id]).collect()
354    }
355
356    /// Performs propagation at an individual node
357    fn propagate_node(&mut self, node: NodeIndex) {
358        let node_parents = self.graph.neighbors_directed(node, petgraph::Direction::Incoming);
359
360        let mut dot_product: f32 = 0.;
361        let mut n_parents = 0;
362
363        for pnode in node_parents {
364
365            n_parents += 1;
366
367            //grab current synaptic weights
368            let edge = self.graph.find_edge(pnode, node);
369
370            let synaptic_value: f32 = match edge {
371                Some(syn) => *self.graph.edge_weight(syn).expect("Connection was not initialized!"),
372                None => panic!("Improper Edge")
373            };
374
375            dot_product = dot_product + synaptic_value * self.graph[pnode].output_value();
376        }
377
378        if n_parents > 0  { self.graph[node].propagate(dot_product) };
379    }
380
381    /// Forward propagate through the neural network.
382    /// This function takes a vector of input values and populates the network output.
383    pub fn fwd(&mut self, input: Vec<f32>) {
384        // For all input neurons, set values to input
385        let input_nodes = self.fetch_neuron_list_by_type(NeuronType::In);
386        assert_eq!(input.len(), input_nodes.len());
387
388        for (i, &node) in input_nodes.iter().enumerate() {
389            self.graph[node].propagate(input[i]);
390        }
391
392        // Create a Bfs iterator starting from first input node
393        let init_node = input_nodes[0]; 
394        let mut dfs = Bfs::new(&self.graph, init_node);
395
396        // Iterate over the nodes in depth-first order without visiting output nodes
397        while let Some(nx) = dfs.next(&self.graph) {
398            if self.graph[nx].neuron_type == NeuronType::Out { continue };
399            self.propagate_node(nx);
400        }
401
402        // Create a vector to store the result
403        let mut network_output: Vec<f32> = Vec::new();
404
405        let output_neurons = self.fetch_neuron_list_by_type(NeuronType::Out);
406
407        for nx in output_neurons {
408            self.propagate_node(nx);
409            network_output.push( self.graph[nx].output_value() );
410            }
411
412        self.network_output = network_output;
413    }
414
415    /// Fetch the output of the network as a vector of floating-point numbers.
416    pub fn fetch_network_output(&self) -> Vec<f32> {
417        self.network_output.clone()
418    }
419
420    /// Write the graph as a .dot file for visualization/inspection
421    pub fn write_dot(&self, file_path: &Path) {
422        let node_label = |g: &DiGraph<Nn, f32>, node_ref: (NodeIndex, &Nn)| {
423            format!("label=\"{}\"", node_ref.1.id)
424        };
425
426        let edge_attr = |g: &DiGraph<Nn, f32>, edge_ref: EdgeReference<'_, f32>| -> String {
427        format!("label=\"{}\"", edge_ref.weight())
428        };
429
430        let dot = Dot::with_attr_getters(&self.graph, &[Config::NodeIndexLabel], &edge_attr, &node_label);
431        let mut file = File::create(file_path).unwrap();
432
433        writeln!(&mut file, "{:?}", dot).unwrap();
434    }
435
436
437
438}
439
440#[derive(Debug, Error)]
441pub enum GraphConstructionError {
442    #[error("Enecode level error during recombination.")]
443    EnecodeRecombinationError
444}
445
446#[cfg(test)]
447mod tests {
448    use super::*;
449    use crate::{doctest::{GENOME_EXAMPLE, GENOME_EXAMPLE2, XOR_GENOME}, setup_logger};
450
451    #[test]
452    fn test_initialize() {
453        let genome = GENOME_EXAMPLE.clone();
454
455        // Create an EneCode and use it to initialize a NeuralNetwork
456        let mut network_example = NeuralNetwork::new(genome);
457        network_example.initialize();
458
459        // Validate that the graph is built correctly
460        let mut dfs = Bfs::new(&network_example.graph, network_example.node_identity_map["input_1"]);
461
462        let mut traversal_order: Vec<String> = Vec::new();
463
464        while let Some(nx) = dfs.next(&network_example.graph) {
465            traversal_order.push(network_example.graph[nx].id.to_string())
466        }
467
468        assert_eq!(vec!["input_1", "N1", "output_1"], traversal_order);
469    }
470
471    #[test]
472    fn test_fwd_fetch_network_output() {
473        let genome = GENOME_EXAMPLE.clone();
474        let mut network_example = NeuralNetwork::new(genome);
475
476        network_example.fwd(vec![0.]);
477        // Test the forward pass and verify that the network_output is as expected
478        
479        let network_out = network_example.fetch_network_output();
480        assert_eq!(network_out[0],  0.);
481
482        network_example.fwd(vec![2.]);
483
484
485        let network_out = network_example.fetch_network_output();
486        assert!(network_out[0] > 0.);
487    }
488
489    #[test]
490    fn test_mutate_synapses() {
491        let genome = GENOME_EXAMPLE.clone();
492        let mut network_example = NeuralNetwork::new(genome);
493
494        let gt = GENOME_EXAMPLE.clone();
495        let n1gene = gt.topology_gene(&String::from("N1"));
496        let weight_before_mut: f32 = n1gene.inputs["input_1"];
497
498        let epsilon: f32 = 1.;
499
500        let seed = [0; 32]; // Fixed seed for determinism
501        let mut rng = StdRng::from_seed(seed);
502
503        network_example.mutate_synapses(&mut rng, epsilon, 0.1);
504
505        let in1_n1_edge = network_example.graph.find_edge(network_example.node_identity_map["input_1"], network_example.node_identity_map["N1"]);
506
507        let synaptic_value: f32 = match in1_n1_edge {
508            Some(syn) => *network_example.graph.edge_weight(syn).expect("Edge not found!!"),
509            None => panic!("No weight at edge index")
510        };
511
512        assert_ne!(synaptic_value, weight_before_mut);
513
514    }
515
516    #[test]
517    fn test_mutate_topology() {
518        let genome = GENOME_EXAMPLE.clone();
519        let mut network_example = NeuralNetwork::new(genome);
520
521        let epsilon: f32 = 1.;
522
523        let seed = [0; 32]; // Fixed seed for determinism
524        let mut rng = StdRng::from_seed(seed);
525
526        network_example.mutate_topology(&mut rng, epsilon);
527
528        let added_node = network_example.node_identity_map["N1-0001"];
529
530        let parent_nodes = network_example.graph.neighbors_directed(added_node, petgraph::Direction::Incoming);
531
532        let mut parent_node_vec: Vec<String> = Vec::new();
533        for pnode in parent_nodes {
534            parent_node_vec.push(String::from(network_example.graph[pnode].id.to_string()));
535        }
536
537        assert_eq!(parent_node_vec.len(), 1);
538        assert_eq!(parent_node_vec[0], String::from("N1"));
539    }
540
541
542    #[test]
543    fn test_mutate() {
544        let genome = GENOME_EXAMPLE.clone();
545        let mut network_example = NeuralNetwork::new(genome);
546
547        let gt = GENOME_EXAMPLE.clone();
548        let n1gene = gt.topology_gene(&String::from("N1"));
549        let weight_before_mut: f32 = n1gene.inputs["input_1"];
550        let bias_before_mut: f32 = n1gene.genetic_bias;
551
552        let epsilon: f32 = 1.;
553
554        network_example.mutate(epsilon, 0.1, 0.);
555
556        let in1_n1_edge = network_example.graph.find_edge(network_example.node_identity_map["input_1"], network_example.node_identity_map["N1"]);
557
558        let synaptic_value: f32 = match in1_n1_edge {
559            Some(syn) => *network_example.graph.edge_weight(syn).expect("Edge not found!!"),
560            None => panic!("No weight at edge index")
561        };
562
563        let bias_value: f32 = network_example.graph[network_example.node_identity_map["N1"]].bias;
564
565        assert_ne!(synaptic_value, weight_before_mut);
566        assert_ne!(bias_value, bias_before_mut);
567
568    }
569
570    #[test]
571    fn test_recombine_enecode() {
572        setup_logger();
573
574        let seed = [0; 32]; // Fixed seed for determinism
575        let mut rng = StdRng::from_seed(seed);
576
577        let ene1 = GENOME_EXAMPLE.clone();
578        let network1 = NeuralNetwork::new(ene1);
579
580        let ene2 = GENOME_EXAMPLE2.clone();
581        let network2 = NeuralNetwork::new(ene2);
582
583        let recombined_enecode = network1.recombine_enecode(&mut rng, &network2).unwrap();
584
585        let mut recombined: NeuralNetwork = NeuralNetwork::new(recombined_enecode);
586
587        info!("Offspring genome: {:#?}", recombined.genome.topology);
588        recombined.fwd(vec![1.]);
589
590        let test_output = recombined.fetch_network_output();
591        assert_ne!(test_output[0], 0.);
592
593        let recombined_nodes: Vec<_> = recombined.node_identity_map.keys().map(|k| String::from(k)).collect();
594
595        assert!(recombined_nodes.len() == 4);
596    }
597
598    #[test]
599    fn test_duplicate_neuron() {
600        setup_logger();
601
602        let ene1 = GENOME_EXAMPLE.clone();
603        let mut network1 = NeuralNetwork::new(ene1);
604
605        network1.duplicate_neuron(network1.node_identity_map["N1"]);
606
607        let added_node = network1.node_identity_map["N1-0001"];
608
609        let parent_nodes = network1.graph.neighbors_directed(added_node, petgraph::Direction::Incoming);
610
611        let mut parent_node_vec: Vec<String> = Vec::new();
612        for pnode in parent_nodes {
613            parent_node_vec.push(network1.graph[pnode].id.to_string())
614        }
615
616        assert_eq!(parent_node_vec.len(), 1);
617        assert_eq!(parent_node_vec[0], String::from("N1"));
618    }
619
620    #[test]
621    fn test_add_new_edge() {
622        setup_logger();
623
624        let xor = XOR_GENOME.clone();
625        let mut network = NeuralNetwork::new(xor);
626
627        let a = network.node_identity_map["A"];
628        let b = network.node_identity_map["B"];
629
630        network.add_new_edge(a, b);
631
632        let bnode = network.node_identity_map["B"];
633
634        let parent_nodes = network.graph.neighbors_directed(bnode, petgraph::Direction::Incoming);
635        let mut parent_node_vec: Vec<String> = Vec::new();
636        for pnode in parent_nodes {
637            parent_node_vec.push(network.graph[pnode].id.to_string())
638        }
639
640        let filt_list: Vec<&String> = parent_node_vec.iter().filter(|&x| x == "A").collect();
641
642        assert_eq!(filt_list[0], "A");
643    }
644
645    #[test]
646    fn test_remove_neuron() {
647        setup_logger();
648
649        let xor = XOR_GENOME.clone();
650        let mut network = NeuralNetwork::new(xor);
651
652        let b = network.node_identity_map["B"];
653        network.remove_neuron(b);
654        assert!(!network.node_identity_map.contains_key("B"));
655
656        let mut graph_nodes: Vec<String> = Vec::new();
657
658        for node in network.graph.node_indices() {
659            graph_nodes.push(network.graph[node].id.to_string());
660        }
661
662        assert!(!graph_nodes.contains(&String::from("B")));
663    }
664}