smarty_pants/
neural_network.rs

1/*!
2This `module` gives access to `NeuralNetwork` related things and most importantly
3it gives access to the `NeuralNetwork` `Type` itself. It also contains some 
4convenience functions such as `batch_run()` and `batch_mutate()`
5*/
6
7use std::fmt::Error;
8use rand::prelude::*;
9use serde::{Serialize, Deserialize};
10
11/**
12The `NeuralNetwork` type from which all learning functions come from, stores data for the
13network and gives access to functions which can be used to access, run, and mutate the
14network.
15
16```
17use smarty_pants::neural_network::NeuralNetwork;
18
19let mut brain:NeuralNetwork = NeuralNetwork::new(1.0,10,10,3);
20
21brain.set_wheight(10.0,(5,7));
22assert!(brain.get_wheight((5,7)).unwrap() == 10.0);
23
24let output:Vec<f64> = brain.run(&vec![1.0,2.0,3.0,4.0,5.0]);
25```
26*/
27//#[derive(Serialize, Deserialize, Debug, Clone)]
28#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
29//#[derive(Clone)]
30pub struct NeuralNetwork {
31    /// A 2D Vector of all hidden `neuron`, every `neuron` is a `f64` that is its `wheight`
32    hidden_layers: Vec<Vec<f64>>,
33    /// The `wheight` of the outputs.
34    output_weights: Vec<f64>
35}
36
37impl NeuralNetwork {
38    /// Creates a new `NeuralNetwork` using the specified arguments.
39    pub fn new(default_wheight:f64 ,hidden_layers:usize ,hidden_neurons_per_layer:usize ,outputs:usize) -> NeuralNetwork {
40        NeuralNetwork {
41            hidden_layers: vec![vec![default_wheight;hidden_neurons_per_layer];hidden_layers],
42            output_weights: vec![default_wheight;outputs]
43        }
44    }
45
46    /// Creates a new `NeuralNetwork` using the inputs as the `weights`
47    pub fn new_from(hidden_layers:Vec<Vec<f64>>, output_weights:Vec<f64>) -> NeuralNetwork {
48        NeuralNetwork {
49            hidden_layers: {
50                let mut layers:Vec<Vec<f64>> = Vec::with_capacity(hidden_layers.len());
51                for layer in hidden_layers.iter() {
52                    layers.push(Vec::with_capacity(layer.len()));
53                    for neuron in layer.iter() {
54                        layers.last_mut().unwrap().push(*neuron);
55                    }
56                }
57                layers
58            },
59            output_weights
60        }
61    }
62
63    #[inline]
64    /// Runs the NeuralNetwork using the provided arguments, then returns the output
65    pub fn run(&mut self, inputs:&[f64]) -> Vec<f64> {
66        // Create a place to store the temporary values.
67        let mut temp:Vec<Vec<f64>> = vec![vec![0.0;self.hidden_layers[0].len()];self.hidden_layers.len()];
68
69        // For each input pass the value to the first `hidden_layer` and multiply by the `neurons` wheight
70        for neuron in inputs.iter() {
71            for temp_neuron in 0..self.hidden_layers.len() {
72                temp[0][temp_neuron] += neuron*(self.hidden_layers[0][temp_neuron]);
73            }
74        }
75
76        // For each `neuron` in each `hidden_layer` push the values forwards towards the last layer
77        for layer in 0..(temp.len()-1) {
78            for neuron in 0..temp[layer].len() {
79                for next_neuron in 0..temp[layer+1].len() {
80                    // Set `next_neuron` to the `current neuron`, passing the value forwards through the layers
81                    temp[layer+1][next_neuron] += temp[layer][neuron]*self.hidden_layers[layer+1][next_neuron];
82                }
83            }
84        }
85
86        // Now that the values have reached the last layer transfer the values from the last layer to each output. Then return the outputs
87        let mut outputs:Vec<f64> = vec![0.0f64;self.output_weights.len()];
88        for neuron in temp[temp.len()-1].iter() {
89            // This is allowed becuase I couldn't find a way to implement clippy's suggestion without doing something ridiculous.
90            #[allow(clippy::needless_range_loop)]
91            for output in 0..self.output_weights.len() {
92                outputs[output] += neuron*self.output_weights[output];
93            }
94        }
95        outputs
96    }
97
98    /**
99    Sets the wheight of a single `neuron` in the hidden layers.
100    If the specified neuron is out of bounds then it will return an error in the form of a `Option<String>`
101    This will contain text that be either outputted or ignored and simply checked if it exists.
102    ```
103    use smarty_pants::neural_network::NeuralNetwork;
104
105    let mut brain:NeuralNetwork = NeuralNetwork::new(1.0,10,10,3);
106
107    match brain.set_wheight(64f64 ,(16usize,23usize)) {
108        None => println!("No error"),
109        Some(e) => println!("Error: {}", e)
110    }
111    ```
112    */
113    pub fn set_wheight(&mut self, wheight:f64 ,neuron:(usize,usize)) -> Option<String> {
114        if neuron.0 < self.hidden_layers.len() {
115            if neuron.1 < self.hidden_layers.len() {
116                self.hidden_layers[neuron.0][neuron.1] = wheight;
117                None
118            } else {
119                Some("Error setting wheight of a neuron, the neuron is out of bounds on the y axis.".to_owned())
120            }
121        } else {
122            Some("Error setting wheight of a neuron, the neuron is out of bounds on the x axis.".to_owned())
123        }
124    }
125
126    /**
127    Gets the wheight of a single `neuron` in the `hidden_layers`
128    Returns an error if the specified `neuron` is greater than the bounds on the `hidden_layers`
129    ```
130    use smarty_pants::neural_network::NeuralNetwork;
131
132    let mut brain:NeuralNetwork = NeuralNetwork::new(1.0,10,10,3);
133
134    match brain.get_wheight((16usize,23usize)) {
135        Ok(_) => println!("No error"),
136        Err(e) => println!("Error")
137    }
138    ```
139    */
140    pub fn get_wheight(&self, neuron:(usize,usize)) -> Result<f64, Error> {
141        if neuron.0 < self.hidden_layers.len() {
142            if neuron.1 < self.hidden_layers[neuron.0].len() {
143                Ok(self.hidden_layers[neuron.0][neuron.1])
144            } else {
145                Err(Error)
146            }
147        } else {
148            Err(Error)
149        }
150    }
151
152    /// Mutates every `wheight` in the `NeuralNetwork` by a random amount that is a maximum of `max`
153    /// in both the possitive and negative directions. It does this through addition and subtraction.
154    /// if `outputs` is true then it will also mutate the `output_weights`
155    pub fn mutate(&mut self, mutation_rate:f64, outputs:bool) {
156        let mut rng:ThreadRng = thread_rng();
157        for layer in self.hidden_layers.iter_mut() {
158            for neuron in layer.iter_mut() {
159                *neuron += rng.gen_range(-mutation_rate..=mutation_rate);
160            }
161        }
162
163        if outputs {
164            for neuron in self.output_weights.iter_mut() {
165                *neuron += rng.gen_range(-mutation_rate..=mutation_rate);
166            }
167        }
168    }
169
170    /// Returns a `Vector` containing `amount` number of neural networks all with the same starting values.
171    /// This function does this by repeatedly calling `NeuralNetwork::new()` so it isn't any more efficent, its simply
172    /// here for convenience.
173    pub fn batch_new(amount:usize ,default_wheight:f64 ,hidden_layers:usize ,hidden_neurons_per_layer:usize ,outputs:usize) -> Vec<NeuralNetwork> {
174        let mut networks: Vec<NeuralNetwork> = Vec::with_capacity(amount);
175        networks.reserve_exact(amount);
176        for _ in 0..amount {
177            networks.push(NeuralNetwork::new(default_wheight, hidden_layers, hidden_neurons_per_layer, outputs));
178        }
179        networks
180    }
181
182    /// Returns the `hidden_layers` `weights` of the network.
183    pub fn get_weights(&self) -> Vec<Vec<f64>> {
184        let mut weights:Vec<Vec<f64>> = Vec::with_capacity(self.hidden_layers.len());
185        for layer in self.hidden_layers.iter() {
186            let mut layer_weights:Vec<f64> = Vec::with_capacity(layer.len());
187            for wheight in layer.iter() {
188                layer_weights.push(*wheight);
189            }
190            weights.push(layer_weights);
191        }
192        weights
193    }
194
195    /// Returns the `output_weights` of the network.
196    pub fn get_output_weights(&self) -> Vec<f64> {
197        self.output_weights.clone()
198    }
199}
200
201/// Returns a `Vector` of `Vector`s that makes up the output of all `NeuralNetworks` given to this function.
202/// This function does this by repeatedly calling `NeuralNetwork::run()` so it isn't any more efficent, its simply
203/// here for convenience.
204pub fn batch_run(networks:&mut [NeuralNetwork], inputs:&[f64]) -> Vec<Vec<f64>> {
205    let mut output: Vec<Vec<f64>> = Vec::with_capacity(networks.len());
206    for network in networks.iter_mut() {
207        output.push(network.run(inputs));
208    }
209    output
210}
211
212/// Turns one inputted `NeuralNetwork` into `amount` number of mutated `NeuralNetwork`s mutated by mutation.
213/// If `outputs` is true then it will also mutate the output weights. It does this through cloning and
214/// calling `NeuralNetwork::mutate(mutation)` so it isn't any more efficent, its simply here for convenience.
215pub fn batch_mutate(amount:usize, mutation_rate:f64, network:&NeuralNetwork, outputs:bool) -> Vec<NeuralNetwork> {
216    let mut networks: Vec<NeuralNetwork> = Vec::with_capacity(amount);
217    for i in 0..amount {
218        networks.push(network.clone());
219        networks[i].mutate(mutation_rate, outputs);
220    }
221    networks
222}