1use rand::Rng;
4use rand::distributions::{Distribution, Uniform};
5use rand::prelude::*;
6use log::*;
7use thiserror::Error;
8use std::sync::Arc;
9use std::path::PathBuf;
10use std::io::Result as FileResult;
11use crate::rng_box;
12
13use crate::agent_wrapper::*;
14use crate::{graph::NeuralNetwork, enecode::EneCode};
15
16pub struct PopulationConfig {
19 project_name: Arc<str>,
20 project_directory: Arc<PathBuf>,
21 rng_seed: Option<u8>,
22 epoch_size: usize,
23 mutation_rate_scale_per_epoch: f32,
24 mutation_effect_scale_per_epoch: f32,
25 visualize_best_agent: bool,
26}
27
28#[derive(Debug, Error)]
29pub enum FitnessValueError{
30 #[error("Negative fitness values are not allowed!")]
31 NegativeFitnessError
32}
33
34impl PopulationConfig {
35 pub fn new(project_name: Arc<str>,
36 home_directory: Option<Arc<PathBuf>>,
37 epoch_size: usize,
38 mutation_rate_scale_per_epoch: f32,
39 mutation_effect_scale_per_epoch: f32,
40 visualize_best_agent: bool,
41 rng_seed: Option<u8>) -> Self {
42
43 let project_directory: Arc<PathBuf> = match home_directory {
44 Some(dir) => dir,
45 None => PathBuf::from(".").into(),
46 };
47
48 PopulationConfig {
49 project_name,
50 project_directory,
51 rng_seed,
52 epoch_size,
53 mutation_rate_scale_per_epoch,
54 mutation_effect_scale_per_epoch,
55 visualize_best_agent,
56 }
57 }
58}
59
60pub struct Population {
63 pub agents: Vec<Agent>,
64 pub size: usize,
65 pub topology_mutation_rate: f32,
66 pub mutation_rate: f32,
67 pub mutation_effect_sd: f32,
68 pub generation: usize,
69 pub population_fitness: f32,
70 survival_rate: f32,
71}
72
73impl Population {
74
75 pub fn new(genome_base: EneCode, population_size: usize, survival_rate: f32, mutation_rate: f32, topology_mutation_rate: f32) -> Self {
76 let mut agent_vector:Vec<Agent> = Vec::new();
77
78 for _idx in 0..population_size {
79 let mut agent = Agent::new(genome_base.clone());
80
81 agent.mutate(1., 10., 0.);
83 agent_vector.push(agent);
84 }
85
86 Population {
87 agents: agent_vector,
88 topology_mutation_rate,
89 mutation_rate,
90 mutation_effect_sd: 5.,
91 size: population_size,
92 generation: 0,
93 population_fitness: 0.,
94 survival_rate,
95 }
96 }
97
98 fn selection(&self, rng_seed: Option<u8>, n_select: usize) -> Vec<usize> {
103 let truncated_population = self.truncate_population();
104 self.stochastic_universal_sampling(rng_seed, truncated_population, n_select)
105 }
106
107 fn stochastic_universal_sampling(&self, rng_seed: Option<u8>, sample: Vec<usize>, n_select: usize) -> Vec<usize> {
110 let sample_fitness: Vec<f32> = sample.iter().map(|&idx| self.agents[idx].fitness).collect();
111 let total_population_fitness: f32 = sample_fitness.iter().sum();
112 let point_spacing = total_population_fitness / (n_select as f32);
113 let u = Uniform::from(0_f32..point_spacing);
114
115 let mut selection: Vec<usize> = Vec::new();
116
117 let mut rng = rng_box(rng_seed);
120 let mut roulette_pointer = u.sample(&mut rng);
121
122 for _p in 0..n_select {
123 let mut idx: usize = 0;
124 while sample_fitness[0..idx].iter().sum::<f32>() < roulette_pointer {
125 idx += 1;
126
127 if idx == sample_fitness.len() {
128 break;
129 }
130 }
131 selection.push(sample[idx-1]);
132 roulette_pointer += point_spacing;
133 }
134
135 selection
136 }
137
138 fn truncate_population(&self) -> Vec<usize> {
142 let n_survival = (self.agents.len() as f32) * self.survival_rate;
143 let agent_fitness: Vec<f32> = self.agents.iter().map( |a| a.fitness).collect();
144 let mut fitness_pairing: Vec<_> = (0..self.agents.len()).zip(agent_fitness).collect();
145 fitness_pairing.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
146 let sorted_data: Vec<usize> = fitness_pairing.into_iter().map(|(a, _)| a).collect();
147
148 sorted_data[..n_survival.round() as usize].to_vec()
149
150 }
151
152 fn generate_offspring(&self, rng_seed: Option<u8>, parental_ids: Vec<usize>) -> Vec<Agent> {
155 let mut offspring: Vec<Agent> = Vec::new();
156
157 let num_parents = parental_ids.len();
159
160 let mate_attempt_limit = self.size as f32 * 1.5;
161
162 let mut n_mate_attempts = 0;
163
164 let mut rng = rng_box(rng_seed);
165
166 while offspring.len() < self.size {
167 let a1 = rng.gen_range(0..num_parents);
168 let partner_list: Vec<&usize> = parental_ids.iter().filter(|&&p| p != a1).collect();
169
170 let a2 = rng.gen_range(0..partner_list.len());
171 let parent_1 = parental_ids[a1];
172 let parent_2 = *partner_list[a2];
173
174 let offspring_ec = self.agents[parent_1].nn.recombine_enecode(&mut rng, &self.agents[parent_2].nn );
175
176 match offspring_ec {
177 Ok(ec) => {
178 let agent = Agent::new(ec);
179 offspring.push(agent);
180 }
181 Err(e) => debug!("Recombination failed: {:#?}", e),
182 }
183
184 n_mate_attempts += 1;
185
186 if n_mate_attempts > mate_attempt_limit as i32 {
187 panic!("Offspring mating has exceeded 50% failure rate.");
188 }
189 }
190
191 offspring
192 }
193
194 pub fn evolve_step(&mut self, pop_config: &PopulationConfig) {
197
198 let selection = self.selection(pop_config.rng_seed, self.size);
200
201 let mut offspring = self.generate_offspring(pop_config.rng_seed, selection);
202
203 for agent in offspring.iter_mut() {
204 agent.mutate(self.mutation_rate, self.mutation_effect_sd, self.topology_mutation_rate);
205 }
206
207 self.agents = offspring;
208
209 self.generation += 1;
210
211 if self.generation % pop_config.epoch_size == 0 {
212 self.mutation_rate *= pop_config.mutation_rate_scale_per_epoch;
213 self.topology_mutation_rate *= pop_config.mutation_rate_scale_per_epoch;
214 self.mutation_effect_sd *= pop_config.mutation_effect_scale_per_epoch;
215 }
216
217 }
218
219 pub fn update_population_fitness(&mut self) {
220 let agent_fitness_vector: Vec<f32> = self.agents.iter().map(|a| a.fitness).collect();
221 self.population_fitness = agent_fitness_vector.iter().sum::<f32>() / self.size as f32;
222 }
223
224 pub fn report(&self, pop_config: &PopulationConfig) {
225 let agent_fitness_vector: Vec<f32> = self.agents.iter().map(|a| a.fitness).collect();
226 let (best_agent_idx, population_max) = agent_fitness_vector
227 .clone()
228 .into_iter()
229 .enumerate()
230 .fold((0, std::f32::MIN), |(idx_max, val_max), (idx, val) | {
231 if val > val_max { (idx, val) } else { (idx_max, val_max) }
232 });
233
234 if (self.generation % 10 == 0) & pop_config.visualize_best_agent {
235 let dot_file = format!("{}_{:04}.dot", pop_config.project_name.to_string(), self.generation);
236 let agent_path = pop_config.project_directory.join(dot_file);
237 info!("Writing agent dot with path {:?}", agent_path);
238 self.agents[best_agent_idx].nn.write_dot(&agent_path);
239 }
240
241 info!("Observing N={} population with fitness {} on generation {} with max of {} (agent {})", self.agents.len(), self.population_fitness, self.generation, population_max, best_agent_idx);
242 }
243
244 pub fn write_agent_genome(&self, idx: usize, file_path: PathBuf) -> FileResult<()> {
245 self.agents[idx].write_genome(file_path)
246 }
247
248}
249
250mod tests {
254 use super::*;
255 use crate::graph::NeuralNetwork;
256 use crate::agent_wrapper::Agent;
257 use crate::population::FitnessValueError;
258
259 use crate::doctest::{GENOME_EXAMPLE, XOR_GENOME, XOR_GENOME_MINIMAL};
260 use crate::setup_logger;
261
262 struct XorEvaluation {
263 pub fitness_begin: f32
264 }
265
266 impl XorEvaluation {
267 pub fn new() -> Self {
268 XorEvaluation {
269 fitness_begin: 6.0
270 }
271 }
272
273 pub fn evaluate_agent(&self, agent: &mut Agent) -> Result<(), FitnessValueError> {
274 let mut fitness_evaluation = self.fitness_begin;
275 let complexity = agent.nn.node_identity_map.len() as f32;
277 let complexity_penalty = 0.01 * complexity;
278
279 for bit1 in 0..2 {
280 for bit2 in 0..2 {
281 agent.fwd(vec![bit1 as f32, bit2 as f32]);
282 let network_output = agent.nn.fetch_network_output();
283
284 let xor_true = (bit1 > 0) ^ (bit2 > 0);
285 let xor_true_float: f32 = if xor_true {1.} else {0.};
286
287 fitness_evaluation -= (xor_true_float - network_output[0]).powf(2.);
288
289 }
290 }
291
292 let fitness_value = if fitness_evaluation > complexity_penalty {
293 fitness_evaluation - complexity_penalty }
294 else {0.};
295
296 if fitness_value < 0. {
297 Err(FitnessValueError::NegativeFitnessError)
298 }
299 else {
300 debug!("Updating agent with fitness value {}", fitness_value);
301 agent.update_fitness(fitness_value);
302 Ok(())
303 }
304
305 }
306 }
307
308 #[test]
309 fn test_create_population() {
310 let genome = GENOME_EXAMPLE.clone();
311 let population_test = Population::new(genome, 125, 0.8, 0.1, 0.);
312 assert_eq!(population_test.agents.len(), 125);
313 }
314
315 #[test]
316 fn test_truncate_population() {
317 let genome = GENOME_EXAMPLE.clone();
318 let mut population_test = Population::new(genome, 10, 0.8, 0.1, 0.);
319
320 let agent_fitness_vector = vec![0., 1., 2., 3., 4., 5., 6., 7., 8., 9.];
321 for (agent, fitness) in population_test.agents.iter_mut().zip(agent_fitness_vector.iter()) {
322 agent.fitness = *fitness;
323 }
324
325 let sorted_fitness_indices = population_test.truncate_population();
326
327 assert_eq!(sorted_fitness_indices, vec![9, 8, 7, 6, 5, 4, 3, 2]);
328 }
329
330 #[test]
331 fn test_stochastic_universal_sampling() {
332 let seed = Some(17); let genome = GENOME_EXAMPLE.clone();
335 let mut population_test = Population::new(genome, 3, 0.8, 0.1, 0.);
336 let agent_fitness_vector = vec![5., 3., 2.];
337 for (agent, fitness) in population_test.agents.iter_mut().zip(agent_fitness_vector.iter()) {
338 agent.fitness = *fitness;
339 }
340 let sample: Vec<usize> = vec![0, 1, 2];
341
342 let sus: Vec<usize> = population_test.stochastic_universal_sampling(seed, sample, 10);
343 assert_eq!(vec![0, 0, 0, 0, 0, 1, 1, 1, 2, 2], sus);
344 }
345
346
347 #[test]
348 fn test_generate_offspring() {
349
350 let genome = GENOME_EXAMPLE.clone();
351 let population_test = Population::new(genome, 10, 0.8, 0.1, 0.);
352 let parent_id_vector = vec![0, 1, 3, 5];
353
354 let offspring_vec = population_test.generate_offspring(Some(17), parent_id_vector);
355
356 assert_eq!(offspring_vec.len(), 10);
357 }
358
359 #[test]
360 fn test_evolve_xor_predefined_topology() {
361 setup_logger();
362
363 let genome = XOR_GENOME.clone();
364 let mut population = Population::new(genome, 200, 0.1, 0.4, 0.01);
365
366 let ef = XorEvaluation::new();
367
368
369 let project_name: &str = "XOR_Predefined_Test";
370 let config = PopulationConfig::new(Arc::from(project_name), None, 50, 0.50, 0.50, false, Some(13));
371
372 while (population.generation < 400) & (population.population_fitness < 5.8) {
373 for agent in population.agents.iter_mut() {
374 ef.evaluate_agent(agent);
375 }
376
377 population.update_population_fitness();
378 population.report(&config);
379 population.evolve_step(&config);
380 }
381
382 assert!(population.population_fitness >= 5.2);
383 }
384
385 #[test]
386 fn test_evolve_xor_minimal_topology() {
387 setup_logger();
388
389 let genome = XOR_GENOME_MINIMAL.clone();
390 let mut population = Population::new(genome, 200, 0.2, 0.4, 0.4);
391
392 let ef = XorEvaluation::new();
393
394 let project_name: &str = "XOR_Test";
395 let project_directory: PathBuf = PathBuf::from("agents/XORtest");
396
397 let config = PopulationConfig::new(Arc::from(project_name), Some(Arc::from(project_directory)), 200, 0.50, 0.50, false, Some(237));
398 while (population.generation < 1000) & (population.population_fitness < 5.8) {
399
400 for agent in population.agents.iter_mut() {
401 ef.evaluate_agent(agent);
402 }
403
404 population.update_population_fitness();
405 population.report(&config);
406 population.evolve_step(&config);
407
408 }
409
410 assert!(population.population_fitness >= 5.2);
411 }
412
413}