evo_rl/
enecode.rs

1//! Module which specifies the genetic encoding for neural network and neural properties. The
2//! genetic encoding is used to reconstruct neural network graphs and for recombination of
3//! offspring.
4
5use log::*;
6use pyo3::ToPyObject;
7use rand::Rng;
8use rand::seq::IteratorRandom;
9use std::collections::{HashMap, HashSet};
10use std::sync::Arc;
11use std::path::PathBuf;
12use std::fs::File;
13use std::io::{self, Read};
14use thiserror::Error;
15use pyo3::prelude::*;
16use pyo3::types::PyDict;
17use serde::{Serialize, Deserialize};
18
19use crate::{graph::NeuralNetwork, sort_genes_by_neuron_type};
20use crate::enecode::topology::TopologyGene;
21use crate::enecode::properties::NeuronalPropertiesGene;
22use crate::enecode::meta::MetaLearningGene;
23
24pub mod topology;
25pub mod properties;
26pub mod meta;
27
28/// `EneCode` encapsulates the genetic blueprint for constructing an entire neural network.
29///
30/// This struct holds all the information required to instantiate a neural network with
31/// distinct neurons, synaptic connections, and meta-learning parameters. It consists of a 
32/// collection of genes that provide the blueprint for each individual neuron's topology,
33/// its neuronal properties, and meta-learning rules.
34///
35/// # Fields
36/// * `neuron_id` - A vector containing the unique identifiers for each neuron in the genome.
37///                 These IDs are used to find the associated topology gene for each neuron.
38///
39/// * `topology` - A list of `TopologyGene` structs that describe the synaptic connections, 
40///                neuron types, and other topological features for each neuron in the network.
41///
42/// * `neuronal_props` - An instance of `NeuronalPropertiesGene` that provides global neuronal
43///                      properties like time constants, homeostatic forces, and activation function
44///                      scaling factors.
45///
46/// * `meta_learning` - An instance of `MetaLearningGene` that provides meta-learning parameters 
47///                     such as learning rate and learning thresholds for synaptic adjustments.
48///
49/// # Example Usage
50/// ```rust
51/// use evo_rl::enecode::EneCode;
52/// use evo_rl::enecode::TopologyGene;
53/// use evo_rl::enecode::NeuronalPropertiesGene;
54/// use evo_rl::enecode::MetaLearningGene;
55/// use evo_rl::enecode::NeuronType;
56/// # use std::collections::HashMap;
57///
58/// // Initialization (example)
59/// let genome = EneCode::new_from_genome (
60///     vec![
61///         TopologyGene {
62///             innovation_number: Arc::from("N1"),
63///             pin: NeuronType::In,
64///             inputs: HashMap::new(),
65///             genetic_bias: 0.1,
66///             active: true
67///         },
68///         // ... more TopologyGene
69///     ],
70///     NeuronalPropertiesGene {
71///         innovation_number: Arc::from("NP01"),
72///         tau: 0.9,
73///         homeostatic_force: 0.1,
74///         tanh_alpha: 2.0,
75///     },
76///     MetaLearningGene {
77///         innovation_number: Arc::from("MTL01"),
78///         learning_rate: 0.01,
79///         learning_threshold: 0.5,
80///     });
81/// ```
82
83#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
84#[pyclass]
85pub struct EneCode {
86    pub neuron_id: Vec<String>, //equivalent to innovation_number in TopologyGene
87    pub topology: Vec<TopologyGene>,
88    pub neuronal_props: NeuronalPropertiesGene,
89    pub meta_learning: MetaLearningGene,
90}
91
92impl ToPyObject for EneCode {
93    fn to_object(&self, py: Python<'_>) -> PyObject {
94        let dict = PyDict::new(py);
95        dict.set_item("neuron_id", &self.neuron_id);
96        dict.set_item("topology", &self.topology);
97        dict.set_item("neuronal_props", &self.neuronal_props);
98        dict.set_item("meta_learning", &self.meta_learning);
99        
100        dict.into()
101    }
102}
103
104/// Creates genome from neural network after recombination and mutation
105impl From<&NeuralNetwork> for EneCode {
106    fn from(network: &NeuralNetwork) ->  Self {
107        let neuron_id: Vec<String> = network.node_identity_map.keys().map(|id| String::from(id)).collect();
108
109        let mut topology: Vec<TopologyGene> = Vec::new();
110        for id in neuron_id.iter() {
111            //Identify parent nodes and build HashMap of Weights
112            let node = network.node_identity_map[id];
113            let node_parents = network.graph.neighbors_directed(node, petgraph::Direction::Incoming);
114
115            let mut input_map: HashMap<String, f32> = HashMap::new();
116
117            for parent in node_parents {
118                let parent_id = network.graph[parent].id.clone();
119                let edge_id = network.graph.find_edge(parent, node);
120
121                let edge_weight: f32 = match edge_id {
122                    Some(w) => *network.graph.edge_weight(w).unwrap(),
123                    None => panic!("Edge ID was not found"),
124                };
125
126                input_map.insert(String::from(&*parent_id), edge_weight);
127            }
128
129            topology.push (
130                TopologyGene {
131                    innovation_number: network.graph[node].id.clone(),
132                    inputs: input_map,
133                    pin: network.graph[node].neuron_type.clone(),
134                    genetic_bias: network.graph[node].bias,
135                    active: true,
136                }
137            )
138
139        }
140
141        let neuronal_props = network.genome.neuronal_props.clone();
142        let meta_learning = network.genome.meta_learning.clone();
143
144        EneCode::new_from_genome(topology, neuronal_props, meta_learning) 
145    }
146}
147
148impl TryFrom<&PathBuf> for EneCode {
149
150    type Error = serde_json::Error;
151
152    fn try_from(checkpoint: &PathBuf) -> Result<Self, Self::Error> {
153        let mut file = File::open(checkpoint).unwrap();
154        let mut content = String::new();
155
156        let enecode = match file.read_to_string(&mut content) {
157            Ok(_) => serde_json::from_str(&content),
158            Err(err) => panic!("{}", err)
159        };
160
161        enecode
162    }
163}
164
165impl EneCode {
166    //TODO: Constructor for Vec<ToplogyGene> that has parameters for number of inputs/outputs
167    //TODO: Default constructors for all *gene structs
168    
169    ///Constructor function for basic genome with defined number of inputs/outputs
170    pub fn new(num_inputs: usize, num_hidden: usize, num_outputs: usize, module: Option<&str>) -> Self {
171
172        // Generate topology with a default rule for hidden progenitors. 
173        let topology_s: Vec<TopologyGene> = EneCode::generate_new_topology(num_inputs, num_outputs, num_hidden);
174        
175        // generate owned string of neuron id
176        let neuron_id: Vec<String> = topology_s.iter().map(|x| String::from(&*x.innovation_number)).collect();
177        
178        let neuronal_props: NeuronalPropertiesGene =  match module {
179            Some(x) => NeuronalPropertiesGene::new(x),
180            None => NeuronalPropertiesGene::default()
181        };
182
183        Self {
184            neuron_id,
185            topology: topology_s,
186            neuronal_props,
187            meta_learning: MetaLearningGene::default()
188        }
189
190    }
191
192
193    ///Generates an appropriate Vec<TopologyGene> for a given number of inputs and outputs and
194    ///hidden neurons. Default is a fully connected FF layer. 
195    pub fn generate_new_topology(input_size: usize, output_size: usize, num_hidden: usize) -> Vec<TopologyGene> {
196
197        let mut topology: Vec<TopologyGene> = Vec::new();
198
199        for input_idx in 0..input_size {
200            let neuron_id  =  format!("i{:02}", input_idx);
201
202            debug!("adding neuron {}", neuron_id);
203
204            topology.push (
205                TopologyGene {
206                    innovation_number: Arc::from(neuron_id),
207                    pin: NeuronType::In,
208                    inputs: HashMap::new(),
209                    genetic_bias: 0.,
210                    active: true,
211                }
212            )
213        }
214
215        for hidden_idx in 0..num_hidden {
216            let mut inputs: Vec<String> = (0..input_size).map(|x| format!("i{:02}", x)).collect();
217
218
219            let mut input_weight_map: HashMap<String, f32> = HashMap::new();
220
221            for input_id in inputs.drain(..) {
222                input_weight_map.insert(input_id, 0.);
223            }
224
225            let neuron_id = format!("h{:02}", hidden_idx); 
226            debug!("adding neuron {}", neuron_id);
227
228            topology.push (
229                TopologyGene {
230                    innovation_number: Arc::from(neuron_id),
231                    pin: NeuronType::Hidden,
232                    inputs: input_weight_map,
233                    genetic_bias: 0.,
234                    active: true,
235                }
236            )
237        }
238
239        for output_idx in 0..output_size {
240            let mut hidden: Vec<String> = (0..num_hidden).map(|x| format!("h{:02}", x)).collect();
241
242            let mut input_weight_map: HashMap<String, f32> = HashMap::new();
243
244            for input_id in hidden.drain(..) {
245                input_weight_map.insert(input_id, 0.);
246            }
247
248            let neuron_id =  format!("x{:02}", output_idx);
249            debug!("adding neuron {}", neuron_id);
250
251            topology.push (
252                TopologyGene {
253                    innovation_number: Arc::from(neuron_id),
254                    pin: NeuronType::Out,
255                    inputs: input_weight_map,
256                    genetic_bias: 0.,
257                    active: true,
258                }
259            )
260        }
261
262
263        topology
264    }
265
266    ///Constructor function for EneCode based on established genome, puts things into correct order based on NeuronType and
267    ///innovation number
268    pub fn new_from_genome(topology: Vec<TopologyGene>, neuronal_props: NeuronalPropertiesGene, meta_learning: MetaLearningGene) -> Self {
269
270        let topology_s: Vec<TopologyGene> = sort_genes_by_neuron_type(topology);
271        let neuron_id: Vec<String> = topology_s.iter().map(|tg| tg.innovation_number.to_string()).collect();
272
273        EneCode {
274            neuron_id,
275            topology: topology_s,
276            neuronal_props,
277            meta_learning
278        }
279
280
281    }
282
283    /// Fetches the topology gene corresponding to a given neuron ID.
284    ///
285    /// # Arguments
286    /// * `neuron_id` - The unique identifier for the neuron.
287    ///
288    /// # Returns
289    /// A reference to the `TopologyGene` associated with the neuron ID.
290    pub fn topology_gene(&self, neuron_id: &str) -> &TopologyGene {
291        let gene = self.topology.iter()
292            .find(|&g| &*g.innovation_number == neuron_id)
293            .expect("Innovation Number Not Found in Topology Genome!");
294
295        gene
296    }
297
298    /// Performs genetic recombination during mating
299    ///
300    /// # Arguments
301    /// * `rng` - thread_rng 
302    /// * `other_genome` - the enecode of the partner to recombine with
303    ///
304    /// # Returns
305    /// A Result<EneCode, RecombinationError>
306    pub fn recombine<R: Rng>(&self, rng: &mut R, other_genome: &EneCode) -> Result<EneCode, RecombinationError> {
307        // determine the number of crossover points by seeing how many genes have matching
308        // innovation number
309        let self_innovation_nums: HashSet<&str> = self.neuron_id.iter().map(|x| &x[..]).collect();
310        let other_innovation_nums: HashSet<&str> = other_genome.neuron_id.iter().map(|x| &x[..]).collect();
311        let homology_genes: Vec<&str> = self_innovation_nums.intersection(&other_innovation_nums).map(|x| *x).collect();
312
313        let crossover_topology_vec: Vec<TopologyGene> = self.topology.iter().filter(|x| homology_genes.contains(&&*x.innovation_number))
314                                                             .map(|tg| tg.clone()).collect();
315
316        let sorted_crosover_topology: Vec<_> = sort_genes_by_neuron_type(crossover_topology_vec);
317        let sorted_homology_genes: Vec<&str> = sorted_crosover_topology.iter().map(|tg| &*tg.innovation_number).collect();
318
319        // determine number of crossover points or return a recombination error if none exists
320        if sorted_homology_genes.len() == 0 {
321            return Err(RecombinationError::CrossoverMatchError);
322        } 
323
324        let max_crossover_points = if sorted_homology_genes.len() == 1 {
325            1
326        } else { 
327            sorted_homology_genes.len() / 2
328        };
329
330        let n_crossover = if max_crossover_points < 2 { 1 } else {rng.gen_range(1..=max_crossover_points) };
331
332        // determine location of each crossover point
333        let mut crossover_points: Vec<usize> = (0..sorted_homology_genes.len() - 1).choose_multiple(rng, n_crossover);
334        crossover_points.sort();
335        debug!("Crossover points {:#?}", crossover_points);
336
337        let mut recombined_offspring_topology: Vec<TopologyGene> = Vec::new();
338
339        // for each crossover, swap at the matching innovation number
340
341        //Clone each genome and reverse it for popping
342        let mut own_copy: Vec<TopologyGene> = self.topology.clone();
343        own_copy.reverse();
344
345        let mut others_copy: Vec<TopologyGene> = other_genome.topology.clone();
346        others_copy.reverse();
347
348        let mut use_self = true;
349
350        for point in crossover_points {
351            let innovation_number = sorted_homology_genes[point];
352
353            let mut self_genes: Vec<TopologyGene> = Vec::new();
354            while let Some(sg) = own_copy.pop() {
355                if &*sg.innovation_number == innovation_number {
356                    self_genes.push(sg);
357                    break;
358                }
359                self_genes.push(sg);
360            }
361
362            let mut other_genes: Vec<TopologyGene> = Vec::new();
363            while let Some(og) = others_copy.pop() {
364                if &*og.innovation_number == innovation_number {
365                    other_genes.push(og);
366                    break;
367                }
368                other_genes.push(og);
369            }
370
371            //If innovation number isn't found then there is no corresponding crossover point
372            if others_copy.is_empty() {
373                return Err(RecombinationError::CrossoverMatchError);
374            }
375
376            if use_self {
377                recombined_offspring_topology.extend(self_genes.drain(..));
378            } else {
379                recombined_offspring_topology.extend(other_genes.drain(..));
380            }
381
382            use_self = !use_self;
383
384        }
385
386        //Add any remaining genes left over from the last recombination point onwards
387        if use_self {
388            recombined_offspring_topology.extend(own_copy.drain(..));
389        } else {
390            recombined_offspring_topology.extend(others_copy.drain(..));
391        }
392
393        Ok(EneCode::new_from_genome(recombined_offspring_topology, self.neuronal_props.clone(), self.meta_learning.clone()))
394    }
395
396
397}
398
399/// `NeuronalEneCode` is a struct that encapsulates all genetic information for a single neuron.
400///
401/// This struct combines the topological, neuronal properties, and meta-learning genes 
402/// for an individual neuron in a neural network. It is used to instantiate a 
403/// single `Nn` (neuron) in the network graph.
404///
405/// # Fields
406/// * `neuron_id` - The unique identifier for the neuron.
407/// * `topology` - The topological gene, which includes information like innovation number, 
408///                inputs, outputs, weights, bias, and activation status.
409/// * `properties` - The neuronal properties gene, which includes time constants and other 
410///                  neuron-specific parameters.
411/// * `meta` - The meta-learning gene, which includes learning rates and thresholds.
412///
413/// # Example
414/// ```rust
415/// # use evo_rl::doctest::GENOME_EXAMPLE;
416/// use evo_rl::enecode::NeuronalEneCode;
417/// use evo_rl::enecode::EneCode;
418///
419/// // Assume `genome` is a properly initialized EneCode
420/// # let genome = GENOME_EXAMPLE.clone();
421/// let neuron_id = Arc::from("N1");
422/// let neuronal_ene_code = NeuronalEneCode::new_from_genome_from_enecode(neuron_id, &genome);
423/// ```
424#[derive(Debug, Clone, PartialEq)]
425pub struct NeuronalEneCode<'a> {
426    pub neuron_id: Arc<str>,
427    pub topology: &'a TopologyGene,
428    pub properties: &'a NeuronalPropertiesGene,
429    pub meta: &'a MetaLearningGene,
430}
431
432/// Generates a more specific genetic handle for use in initializing a neuron
433impl<'a> NeuronalEneCode<'a> {
434    pub fn new_from_enecode(neuron_id: &str, genome: &'a EneCode) -> Self {
435        let topology = genome.topology_gene(&neuron_id);
436        NeuronalEneCode {
437            neuron_id: neuron_id.into(),
438            topology,
439            properties: &genome.neuronal_props,
440            meta: &genome.meta_learning, 
441        }
442    }
443}
444
445#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
446pub enum NeuronType {
447    In,
448    Out,
449    Hidden,
450}
451
452#[derive(Debug, Error)]
453pub enum RecombinationError {
454    #[error("Incompatible Genomes: Non-matching innovation number chosen for crossover!")]
455    CrossoverMatchError
456}
457
458
459#[cfg(test)]
460mod tests {
461    use super::*;
462    use rand::SeedableRng;
463    use rand::rngs::StdRng;
464    use assert_matches::assert_matches;
465    use crate::doctest::{XOR_GENOME, GENOME_EXAMPLE, GENOME_EXAMPLE2, XOR_GENOME_MINIMAL};
466    use crate::setup_logger;
467
468    #[test] 
469    fn test_generate_new_topology() {
470        setup_logger();
471
472        let topology = EneCode::generate_new_topology(1, 1, 1);
473        assert_eq!(topology.len(), 3);
474
475        let h00: &TopologyGene = topology.iter().filter(|&t| &*t.innovation_number == "h00").next().unwrap(); 
476        let h00_input_keys: Vec<&str> = h00.inputs.iter().map(|(x, _)| x.as_str()).collect();
477        assert_eq!(vec!["i00"], h00_input_keys);
478
479        let x00: &TopologyGene = topology.iter().filter(|&t| &*t.innovation_number == "x00").next().unwrap(); 
480        let x00_input_keys: Vec<&str> = x00.inputs.iter().map(|(x, _)| x.as_str()).collect();
481        assert_eq!(vec!["h00"], x00_input_keys);
482
483        let topology2 = EneCode::generate_new_topology(2, 2, 2);
484        assert_eq!(topology2.len(), 6);
485
486        let h01: &TopologyGene = topology2.iter().filter(|&t| &*t.innovation_number == "h01").next().unwrap(); 
487        let mut h01_input_keys: Vec<&str> = h01.inputs.iter().map(|(x, _)| x.as_str()).collect();
488        h01_input_keys.sort();
489
490        assert_eq!(vec!["i00", "i01"], h01_input_keys);
491
492        let x01: &TopologyGene = topology2.iter().filter(|&t| &*t.innovation_number == "x01").next().unwrap(); 
493        let mut x01_input_keys: Vec<&str> = x01.inputs.iter().map(|(x, _)| x.as_str()).collect();
494        x01_input_keys.sort();
495
496        assert_eq!(vec!["h00", "h01"], x01_input_keys);
497
498        let topology3 = EneCode::generate_new_topology(6, 4, 3);
499        let t3inputs: Vec<&TopologyGene> = topology3.iter().filter(|&t| t.pin == NeuronType::In).collect();
500        assert_eq!(t3inputs.len(), 6);
501
502        let t3hidden: Vec<&TopologyGene> = topology3.iter().filter(|&t| t.pin == NeuronType::Hidden).collect();
503        assert_eq!(t3hidden.len(), 3);
504
505        let t3output: Vec<&TopologyGene> = topology3.iter().filter(|&t| t.pin == NeuronType::Out).collect();
506        assert_eq!(t3output.len(), 4);
507
508
509    }
510
511    #[test]
512    fn test_new_from_enecode() {
513        // Create an EneCode instance and use it to initialize a NeuronalEneCode
514
515        let neuronal_ene_code = NeuronalEneCode::new_from_enecode("N1", &GENOME_EXAMPLE);
516
517        let mut input_map = HashMap::new();
518        input_map.insert(String::from("input_1"), 1.0_f32);
519
520        let expected_nec: NeuronalEneCode = NeuronalEneCode {
521         neuron_id: Arc::from("N1"),
522         topology: &TopologyGene {
523                 innovation_number: Arc::from("N1"),
524                 pin: NeuronType::Hidden,
525                 inputs: input_map,
526                 genetic_bias: 0.0,
527                 active: true }, 
528        properties: &GENOME_EXAMPLE.neuronal_props,
529        meta: &GENOME_EXAMPLE.meta_learning,
530        };
531
532        // Validate that the properties have been copied over correctly
533        assert_eq!(neuronal_ene_code, expected_nec);
534    }
535
536    #[test]
537    fn test_enecode_from_neural_network() {
538        let genome = GENOME_EXAMPLE.clone();
539        let genome_comparison = GENOME_EXAMPLE.clone();
540        let network_example = NeuralNetwork::new(genome);
541
542        let test_enecode = EneCode::from(&network_example);
543
544        assert_eq!(test_enecode, genome_comparison);
545    }
546
547    #[test]
548    fn test_topology_gene() {
549        let topology_gene_n1 = GENOME_EXAMPLE.topology_gene(&String::from("N1"));
550        assert_eq!(String::from("N1"), topology_gene_n1.innovation_number.to_string());
551    }
552
553    #[test]
554    fn test_recombine_same_everything_short_genome() {
555        let seed = [0; 32]; // Fixed seed for determinism
556        let mut rng = StdRng::from_seed(seed);
557
558        let ene1 = GENOME_EXAMPLE.clone();
559        let ene2 = GENOME_EXAMPLE.clone();
560
561        let recombined = ene1.recombine(&mut rng, &ene2).unwrap();
562
563        assert_eq!(recombined.neuron_id.len(), ene1.neuron_id.len());
564    }
565
566    #[test]
567    fn test_recombine_same_everything_long_genome() {
568        let seed = [0; 32]; // Fixed seed for determinism
569        let mut rng = StdRng::from_seed(seed);
570
571        let ene1 = XOR_GENOME.clone();
572        let ene2 = XOR_GENOME.clone();
573
574        let recombined = ene1.recombine(&mut rng, &ene2).unwrap();
575
576        assert_eq!(recombined.neuron_id.len(), ene1.neuron_id.len());
577    }
578
579    #[test]
580    fn test_recombine_missing_gene_long_genome() {
581        let seed = [0; 32]; // Fixed seed for determinism
582        let mut rng = StdRng::from_seed(seed);
583
584        let ene1 = XOR_GENOME.clone();
585        let ene2 = XOR_GENOME_MINIMAL.clone();
586
587        let recombined = ene1.recombine(&mut rng, &ene2).unwrap();
588        let crossover_genes: Vec<&String> = recombined.neuron_id.iter().filter(|&id| id == "A").collect();
589
590        assert!(crossover_genes.len() == 1);
591        assert_eq!(crossover_genes[0], "A");
592    }
593
594
595    #[test]
596    fn test_recombine_same_topology_different_genetic_bias() {
597        setup_logger();
598
599        let seed = [17; 32]; // Fixed seed for determinism
600        let mut rng = StdRng::from_seed(seed);
601
602        let ene1 = XOR_GENOME.clone();
603        let ene2_base = XOR_GENOME.clone();
604
605        let new_topology_genome: Vec<TopologyGene> = ene2_base.topology.iter().map( |tg| 
606                TopologyGene {
607                    innovation_number: tg.innovation_number.clone(),
608                    pin: tg.pin.clone(),
609                    inputs: tg.inputs.clone(),
610                    genetic_bias: 5.,
611                    active: tg.active
612                }
613            ).collect();
614
615        let ene2: EneCode = EneCode::new_from_genome(new_topology_genome, ene2_base.neuronal_props, ene2_base.meta_learning);
616
617        let recombined = ene1.recombine(&mut rng, &ene2).unwrap();
618        let recombined_genetic_bias: Vec<_> = recombined.topology.iter().map(|tg| tg.genetic_bias).collect();
619
620        info!("Recombined bias vector {:#?}", recombined_genetic_bias);
621        assert_ne!(recombined_genetic_bias, vec![0., 0., 0., 0.]);
622        assert_ne!(recombined_genetic_bias, vec![5., 5., 5., 5.]);
623        assert_eq!(recombined_genetic_bias.len(), ene1.neuron_id.len());
624
625    }
626
627    #[test]
628    fn test_recombine_different_topology_compatible_genomes() {
629        let seed = [0; 32]; // Fixed seed for determinism
630        let mut rng = StdRng::from_seed(seed);
631
632        let ene1 = GENOME_EXAMPLE.clone();
633        let ene2 = GENOME_EXAMPLE2.clone();
634
635        let recombined = ene1.recombine(&mut rng, &ene2).unwrap();
636
637        assert!(recombined.neuron_id.len() == 4);
638    }
639
640
641    #[test]
642    fn test_recombine_incompatible_genomes() {
643        let seed = [0; 32]; // Fixed seed for determinism
644        let mut rng = StdRng::from_seed(seed);
645
646        let ene1 = GENOME_EXAMPLE.clone();
647        let ene2 = XOR_GENOME.clone();
648
649        let recombined = ene1.recombine(&mut rng, &ene2);
650
651        assert_matches!(recombined, Err(RecombinationError::CrossoverMatchError));
652    }
653}
654