1use self::neuron::Neuron;
2use rand::Rng;
3
4#[cfg(feature = "parallelization")]
5use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
6#[cfg(feature = "serialization")]
7use serde::{Serialize, Deserialize};
8
9mod neuron;
10
11#[cfg_attr(feature = "serialization", Serialize, Deserialize)]
13pub struct NeuralNetwork<const I: usize, const O: usize> {
14 layers: Vec<Vec<Neuron>>,
15 #[cfg_attr(feature = "serialization", serde(skip))]
16 last_edit: Option<Edit>,
17 longest_layer: usize,
19 #[cfg_attr(feature = "serialization", serde(skip))]
21 buffers: (Vec<f32>, Vec<f32>),
22}
23
24struct Edit {
26 old: Neuron,
27 layer: usize,
28 row: usize,
29}
30
31impl<const I: usize, const O: usize> Default for NeuralNetwork<I, O> {
32 fn default() -> Self {
33 Self::new()
34 }
35}
36
37impl<const I: usize, const O: usize> NeuralNetwork<I, O> {
38 pub fn new() -> Self {
39 Self {
40 layers: Vec::new(),
41 last_edit: None,
42 longest_layer: 0,
43 buffers: (vec![], vec![]),
44 }
45 }
46
47 pub fn add_layer(mut self, n: usize, func: ActivationFunction) -> Self {
49 let n_inputs = self.get_layer_inputs();
50 self.layers.push(vec![Neuron::new(n_inputs, 0.0, func); n]);
51 self.check_max_layer(n);
52 self
53 }
54
55 fn check_max_layer(&mut self, n: usize) {
57 if n > self.longest_layer {
58 self.longest_layer = n;
59 self.buffers = (Vec::with_capacity(n), Vec::with_capacity(n))
60 }
61 }
62
63 pub fn random_layer(mut self, n: usize, func: ActivationFunction) -> Self {
65 let mut layer: Vec<Neuron> = vec![];
66 for _ in 0..n {
67 layer.push(Neuron::random(self.get_layer_inputs(), func))
68 }
69 self.layers.push(layer);
70 Self::check_max_layer(&mut self, n);
71 self
72 }
73
74 pub fn random_edit(&mut self) {
76 let mut rng = rand::thread_rng();
77 let layer = rng.gen_range(0..self.layers.len());
78 let row = rng.gen_range(0..self.layers[layer].len());
79 let mut change: f32 = rng.gen::<f32>() / 10.0;
80
81 if rng.gen_bool(0.5) {
82 change *= -1.0;
83 }
84
85 let neuron = &mut self.layers[layer][row];
86 self.last_edit = Some(Edit {
87 old: neuron.clone(),
88 layer,
89 row,
90 });
91
92 if rng.gen_bool(0.95) {
93 let index = rng.gen_range(0..neuron.get_weights_len());
94 neuron.change_weight(index, change);
95 } else {
96 neuron.change_bias(change);
97 }
98 }
99
100 pub fn reverse_edit(&mut self) {
102 match &self.last_edit {
103 Some(edit) => {
104 self.layers[edit.layer][edit.row] = edit.old.clone();
105 }
106 None => {}
107 }
108 }
109
110 fn get_layer_inputs(&self) -> usize {
112 if self.layers.is_empty() {
113 return I;
114 }
115 self.layers[self.layers.len() - 1].len()
116 }
117
118 pub fn with_weights(mut self, weights: Vec<Vec<f32>>) -> Self {
120 match self.layers.last_mut() {
121 None => panic!("tried to add weights before layers!"),
122 Some(layer) => layer
123 .iter_mut()
124 .zip(weights)
125 .for_each(|(neuron, weight)| neuron.set_weights(weight)),
126 }
127 self
128 }
129
130 pub fn with_bias(mut self, biases: Vec<f32>) -> Self {
132 match self.layers.last_mut() {
133 None => panic!("tried to add biases before layers!"),
134 Some(layer) => layer
135 .iter_mut()
136 .zip(biases)
137 .for_each(|(neuron, bias)| neuron.set_bias(bias)),
138 }
139 self
140 }
141
142 #[inline]
144 pub fn run(&mut self, input: &[f32; I]) -> [f32; O] {
145 let mut data = &mut self.buffers.0;
147 let mut temp = &mut self.buffers.1;
148
149 #[allow(clippy::uninit_vec)]
151 unsafe {
152 data.set_len(input.len())
153 }
154
155 data[..input.len()].copy_from_slice(&input[..]);
157
158 for layer in &self.layers {
159 #[allow(clippy::uninit_vec)]
161 unsafe {
162 temp.set_len(layer.len())
163 }
164
165 for (i, neuron) in layer.iter().enumerate() {
166 temp[i] = neuron.compute(data);
167 }
168
169 (data, temp) = (temp, data);
170 }
171
172 let mut out = [0.0; O];
173 out[..O].copy_from_slice(&data[..O]);
174 out
175 }
176
177 #[inline]
180 pub fn unbuffered_run(&self, input: &[f32; I]) -> [f32; O] {
181 let mut data = Vec::with_capacity(self.longest_layer);
182
183 #[allow(clippy::uninit_vec)]
185 unsafe {
186 data.set_len(input.len())
187 }
188
189 data[..input.len()].copy_from_slice(&input[..]);
190
191 let mut temp = Vec::with_capacity(self.longest_layer);
192 for layer in &self.layers {
193 #[allow(clippy::uninit_vec)]
195 unsafe {
196 temp.set_len(layer.len())
197 }
198
199 for (i, neuron) in layer.iter().enumerate() {
200 temp[i] = neuron.compute(&data);
201 }
202
203 (data, temp) = (temp, data);
204 }
205
206 let mut out = [0.0; O];
207 out[..O].copy_from_slice(&data[..O]);
208 out
209 }
210
211 #[cfg(feature = "parallelization")]
213 pub fn par_run(&self, inputs: &Vec<[f32; I]>) -> Vec<[f32; O]> {
214 inputs
215 .par_iter()
216 .map(|input| self.unbuffered_run(input))
217 .collect()
218 }
219}
220
221#[cfg_attr(feature = "serialzation", Serialize, Deserialize)]
223
224#[derive(Debug, Default, Clone, Copy)]
225pub enum ActivationFunction {
226 #[default]
227 ReLU,
228 Linear,
229}
230
231#[cfg(test)]
232mod test {
233 use super::*;
234
235 #[test]
236 fn minimal_test() {
237 let mut net = NeuralNetwork::new()
238 .add_layer(10, ActivationFunction::ReLU)
239 .add_layer(5, ActivationFunction::Linear);
240 assert_eq!(net.run(&[1.0]), [10.0; 5])
241 }
242
243 #[test]
244 fn better_test() {
245 let mut net: NeuralNetwork<2, 1> = NeuralNetwork::new()
246 .add_layer(4, ActivationFunction::ReLU)
247 .with_weights(vec![
248 vec![1.1, -0.93],
249 vec![-0.9, -0.96],
250 vec![1.2, 0.81],
251 vec![-0.91, 0.95],
252 ])
253 .with_bias(vec![0.048, 0.12, 0.083, -0.02])
254 .add_layer(1, ActivationFunction::Linear)
255 .with_weights(vec![vec![-1.4, 1.3, 1.4, -1.3]]);
256
257 assert!(net.run(&[3.0, 3.0])[0] > 0.0);
258 assert!(net.run(&[-3.0, -3.0])[0] > 0.0);
259 assert!(net.run(&[3.0, -3.0])[0] < 0.0);
260 assert!(net.run(&[-3.0, 3.0])[0] < 0.0);
261 }
262}