use std::fmt::Error;
use rand::prelude::*;
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct NeuralNetwork {
hidden_layers: Vec<Vec<f64>>,
output_weights: Vec<f64>
}
impl NeuralNetwork {
pub fn new(default_wheight:f64 ,hidden_layers:usize ,hidden_neurons_per_layer:usize ,outputs:usize) -> NeuralNetwork {
NeuralNetwork {
hidden_layers: vec![vec![default_wheight;hidden_neurons_per_layer];hidden_layers],
output_weights: vec![default_wheight;outputs]
}
}
pub fn new_from(hidden_layers:Vec<Vec<f64>>, output_weights:Vec<f64>) -> NeuralNetwork {
NeuralNetwork {
hidden_layers: {
let mut layers:Vec<Vec<f64>> = Vec::with_capacity(hidden_layers.len());
for layer in hidden_layers.iter() {
layers.push(Vec::with_capacity(layer.len()));
for neuron in layer.iter() {
layers.last_mut().unwrap().push(*neuron);
}
}
layers
},
output_weights
}
}
#[inline]
pub fn run(&mut self, inputs:&[f64]) -> Vec<f64> {
let mut temp:Vec<Vec<f64>> = vec![vec![0.0;self.hidden_layers[0].len()];self.hidden_layers.len()];
for neuron in inputs.iter() {
for temp_neuron in 0..self.hidden_layers.len() {
temp[0][temp_neuron] += neuron*(self.hidden_layers[0][temp_neuron]);
}
}
for layer in 0..(temp.len()-1) {
for neuron in 0..temp[layer].len() {
for next_neuron in 0..temp[layer+1].len() {
temp[layer+1][next_neuron] += temp[layer][neuron]*self.hidden_layers[layer+1][next_neuron];
}
}
}
let mut outputs:Vec<f64> = vec![0.0f64;self.output_weights.len()];
for neuron in temp[temp.len()-1].iter() {
#[allow(clippy::needless_range_loop)]
for output in 0..self.output_weights.len() {
outputs[output] += neuron*self.output_weights[output];
}
}
outputs
}
pub fn set_wheight(&mut self, wheight:f64 ,neuron:(usize,usize)) -> Option<String> {
if neuron.0 < self.hidden_layers.len() {
if neuron.1 < self.hidden_layers.len() {
self.hidden_layers[neuron.0][neuron.1] = wheight;
None
} else {
Some("Error setting wheight of a neuron, the neuron is out of bounds on the y axis.".to_owned())
}
} else {
Some("Error setting wheight of a neuron, the neuron is out of bounds on the x axis.".to_owned())
}
}
pub fn get_wheight(&self, neuron:(usize,usize)) -> Result<f64, Error> {
if neuron.0 < self.hidden_layers.len() {
if neuron.1 < self.hidden_layers[neuron.0].len() {
Ok(self.hidden_layers[neuron.0][neuron.1])
} else {
Err(Error)
}
} else {
Err(Error)
}
}
pub fn mutate(&mut self, mutation_rate:f64, outputs:bool) {
let mut rng:ThreadRng = thread_rng();
for layer in self.hidden_layers.iter_mut() {
for neuron in layer.iter_mut() {
*neuron += rng.gen_range(-mutation_rate..=mutation_rate);
}
}
if outputs {
for neuron in self.output_weights.iter_mut() {
*neuron += rng.gen_range(-mutation_rate..=mutation_rate);
}
}
}
pub fn batch_new(amount:usize ,default_wheight:f64 ,hidden_layers:usize ,hidden_neurons_per_layer:usize ,outputs:usize) -> Vec<NeuralNetwork> {
let mut networks: Vec<NeuralNetwork> = Vec::with_capacity(amount);
networks.reserve_exact(amount);
for _ in 0..amount {
networks.push(NeuralNetwork::new(default_wheight, hidden_layers, hidden_neurons_per_layer, outputs));
}
networks
}
pub fn get_weights(&self) -> Vec<Vec<f64>> {
let mut weights:Vec<Vec<f64>> = Vec::with_capacity(self.hidden_layers.len());
for layer in self.hidden_layers.iter() {
let mut layer_weights:Vec<f64> = Vec::with_capacity(layer.len());
for wheight in layer.iter() {
layer_weights.push(*wheight);
}
weights.push(layer_weights);
}
weights
}
pub fn get_output_weights(&self) -> Vec<f64> {
self.output_weights.clone()
}
}
pub fn batch_run(networks:&mut [NeuralNetwork], inputs:&[f64]) -> Vec<Vec<f64>> {
let mut output: Vec<Vec<f64>> = Vec::with_capacity(networks.len());
for network in networks.iter_mut() {
output.push(network.run(inputs));
}
output
}
pub fn batch_mutate(amount:usize, mutation_rate:f64, network:&NeuralNetwork, outputs:bool) -> Vec<NeuralNetwork> {
let mut networks: Vec<NeuralNetwork> = Vec::with_capacity(amount);
for i in 0..amount {
networks.push(network.clone());
networks[i].mutate(mutation_rate, outputs);
}
networks
}