use crate::*;
#[derive(Debug, Clone)]
pub struct Agent {
pub network: Network,
pub fitness: f64,
}
#[profiling::all_functions]
impl Agent {
pub fn activate(&mut self, sensors: Vec<f64>) -> Vec<f64> {
self.network.activate(sensors, false)
}
pub fn mutate(&self) -> Agent {
Agent {
network: self.network.mutate(),
fitness: 0.0,
}
}
pub fn get_params(&self) -> Vec<f64> {
self.network.get_params_list()
}
pub fn apply_params(&mut self, params: Vec<f64>) {
self.network.apply_params(params);
}
}
#[derive(Debug, Clone)]
pub struct Population {
pub agents: Vec<Agent>,
pub pop_amt: usize,
pub generations: usize,
pub champion: Option<Agent>,
}
#[profiling::all_functions]
impl Population {
pub fn new_without_connections(
agent_amount: usize,
input_neuron_amount: usize,
output_neuron_amount: usize,
) -> Population {
Population {
agents: (0..agent_amount)
.map(|_| Agent {
network: Network::new(input_neuron_amount, output_neuron_amount, None, None),
fitness: 0.0,
})
.collect(),
pop_amt: agent_amount,
generations: 0,
champion: None,
}
}
pub fn new_with_base_network(network: Network, agent_amount: usize) -> Population {
Population {
agents: (0..agent_amount)
.map(|_| Agent {
network: network.clone(),
fitness: 0.0,
})
.collect(),
pop_amt: agent_amount,
generations: 0,
champion: None,
}
}
pub fn new_initialized(
agent_amount: usize,
input_neuron_amount: usize,
output_neuron_amount: usize,
extra_layers: Option<Vec<usize>>,
) -> Population {
let mut network = Network::new(
input_neuron_amount,
output_neuron_amount,
extra_layers,
None,
);
network.all_connect();
Population {
agents: vec![
Agent {
network: network.clone(),
fitness: 0.0,
};
agent_amount
],
pop_amt: agent_amount,
generations: 0,
champion: None,
}
}
pub fn scramble(&mut self, initmethod: InitializationMethod) {
for agent in self.agents.iter_mut() {
agent.network.initialize_weights(initmethod);
}
}
pub fn average_fitness(&self) -> f64 {
self.agents.iter().map(|a| a.fitness).sum::<f64>() / self.agents.len() as f64
}
pub fn evaluate(&mut self, evaluator: &Evaluator) {
self.agents = self
.agents
.iter()
.map(|a| {
let mut agent = a.clone();
agent.fitness = evaluator.evaluate(&mut agent);
agent.to_owned()
})
.collect();
self.agents.sort_by(|a, b| {
a.fitness
.partial_cmp(&b.fitness)
.unwrap_or(std::cmp::Ordering::Equal)
});
self.agents.reverse();
self.champion = Some(self.agents[0].clone());
}
pub fn evolve(&mut self, keep_top: usize) {
let mut rng = rand::thread_rng();
let keep_top = keep_top.min(self.agents.len());
let mut new_agents: Vec<Agent> = self.agents[0..keep_top].to_vec();
while new_agents.len() < self.pop_amt {
let parent_index = rng.gen_range(0..keep_top);
let new_agent = self.agents[parent_index].mutate();
new_agents.push(new_agent);
}
self.agents = new_agents;
self.generations += 1;
}
pub fn loop_train(&mut self, evaluator: &Evaluator, keep_top: usize, loops: usize) {
for _ in 0..loops {
self.evaluate(evaluator);
let avg_fit = self.average_fitness();
self.evolve(keep_top);
println!(
"Generation: {} | Top fitness: {} | Average fitness: {}",
self.generations,
self.champion.clone().unwrap().fitness,
avg_fit
);
}
}
}