pub struct NeuralNetwork<const I: usize, const O: usize> { /* private fields */ }
Expand description
A simple Neural Network
Implementations§
Source§impl<const I: usize, const O: usize> NeuralNetwork<I, O>
impl<const I: usize, const O: usize> NeuralNetwork<I, O>
Sourcepub fn new() -> Self
pub fn new() -> Self
Examples found in repository?
examples/bench.rs (line 5)
3fn main() {
4 // let's check how fast this thing gets
5 let mut net: NeuralNetwork<1, 1> = NeuralNetwork::new()
6 .add_layer(5, ActivationFunction::ReLU)
7 .add_layer(5, ActivationFunction::ReLU)
8 .add_layer(5, ActivationFunction::ReLU)
9 .add_layer(5, ActivationFunction::ReLU)
10 .add_layer(5, ActivationFunction::ReLU)
11 .add_layer(5, ActivationFunction::ReLU)
12 .add_layer(1, ActivationFunction::Linear);
13
14 let mut sum = 0.0;
15 for i in 0..10_000_000 {
16 sum += net.run(&[i as f32])[0];
17 }
18
19 println!("{sum}");
20}
More examples
examples/training.rs (line 5)
3fn main() {
4 // create a neural network
5 let mut net = NeuralNetwork::new().add_layer(1, ActivationFunction::Linear);
6
7 // create training data
8 let mut inputs = vec![];
9 let mut outputs = vec![];
10 for i in -50..50 {
11 inputs.push([i as f32]);
12 outputs.push([i as f32 * 3.0]);
13 }
14 let data = DataSet { inputs, outputs };
15
16 let trainer = BasicTrainer::new(data);
17
18 // train the model
19 for _ in 0..10 {
20 trainer.train(&mut net, 10);
21 // lower is better
22 println!("{}", trainer.get_total_error(&net))
23 }
24
25 // show that this actually works!
26 println!("########");
27 for i in -5..5 {
28 println!("{}", &net.run(&[i as f32 + 0.5])[0]);
29 }
30}
examples/circle.rs (line 5)
3fn main() {
4 // this network is completely overkill, but it does the job
5 let mut net: NeuralNetwork<2, 1> = NeuralNetwork::new()
6 .add_layer(3, ActivationFunction::ReLU)
7 .add_layer(3, ActivationFunction::ReLU)
8 // this layer reduced everything to one input!
9 .add_layer(1, ActivationFunction::Linear);
10
11 let mut inputs = vec![];
12 let mut output = vec![];
13 for x in 0..=100 {
14 for y in 0..=100 {
15 inputs.push([x as f32, y as f32]);
16 // we want this to be a classifier, so we ask it for a result greater zero or smaller zero
17 output.push(if (x as f32).abs() + (y as f32).abs() < 30.0 {
18 [1.0]
19 } else {
20 [-1.0]
21 })
22 }
23 }
24
25 let data = DataSet {
26 inputs,
27 outputs: output,
28 };
29
30 let trainer = BasicTrainer::new(data);
31 for _ in 0..50 {
32 trainer.train(&mut net, 10);
33 println!("{}", trainer.get_total_error(&net))
34 }
35}
Sourcepub fn add_layer(self, n: usize, func: ActivationFunction) -> Self
pub fn add_layer(self, n: usize, func: ActivationFunction) -> Self
adds a layer with n
neurons and the specified activation function
Examples found in repository?
examples/bench.rs (line 6)
3fn main() {
4 // let's check how fast this thing gets
5 let mut net: NeuralNetwork<1, 1> = NeuralNetwork::new()
6 .add_layer(5, ActivationFunction::ReLU)
7 .add_layer(5, ActivationFunction::ReLU)
8 .add_layer(5, ActivationFunction::ReLU)
9 .add_layer(5, ActivationFunction::ReLU)
10 .add_layer(5, ActivationFunction::ReLU)
11 .add_layer(5, ActivationFunction::ReLU)
12 .add_layer(1, ActivationFunction::Linear);
13
14 let mut sum = 0.0;
15 for i in 0..10_000_000 {
16 sum += net.run(&[i as f32])[0];
17 }
18
19 println!("{sum}");
20}
More examples
examples/training.rs (line 5)
3fn main() {
4 // create a neural network
5 let mut net = NeuralNetwork::new().add_layer(1, ActivationFunction::Linear);
6
7 // create training data
8 let mut inputs = vec![];
9 let mut outputs = vec![];
10 for i in -50..50 {
11 inputs.push([i as f32]);
12 outputs.push([i as f32 * 3.0]);
13 }
14 let data = DataSet { inputs, outputs };
15
16 let trainer = BasicTrainer::new(data);
17
18 // train the model
19 for _ in 0..10 {
20 trainer.train(&mut net, 10);
21 // lower is better
22 println!("{}", trainer.get_total_error(&net))
23 }
24
25 // show that this actually works!
26 println!("########");
27 for i in -5..5 {
28 println!("{}", &net.run(&[i as f32 + 0.5])[0]);
29 }
30}
examples/circle.rs (line 6)
3fn main() {
4 // this network is completely overkill, but it does the job
5 let mut net: NeuralNetwork<2, 1> = NeuralNetwork::new()
6 .add_layer(3, ActivationFunction::ReLU)
7 .add_layer(3, ActivationFunction::ReLU)
8 // this layer reduced everything to one input!
9 .add_layer(1, ActivationFunction::Linear);
10
11 let mut inputs = vec![];
12 let mut output = vec![];
13 for x in 0..=100 {
14 for y in 0..=100 {
15 inputs.push([x as f32, y as f32]);
16 // we want this to be a classifier, so we ask it for a result greater zero or smaller zero
17 output.push(if (x as f32).abs() + (y as f32).abs() < 30.0 {
18 [1.0]
19 } else {
20 [-1.0]
21 })
22 }
23 }
24
25 let data = DataSet {
26 inputs,
27 outputs: output,
28 };
29
30 let trainer = BasicTrainer::new(data);
31 for _ in 0..50 {
32 trainer.train(&mut net, 10);
33 println!("{}", trainer.get_total_error(&net))
34 }
35}
Sourcepub fn random_layer(self, n: usize, func: ActivationFunction) -> Self
pub fn random_layer(self, n: usize, func: ActivationFunction) -> Self
adds a layer with n
neurons and randomized weights/bias to the model
Sourcepub fn random_edit(&mut self)
pub fn random_edit(&mut self)
randomly edits some neuron
Sourcepub fn reverse_edit(&mut self)
pub fn reverse_edit(&mut self)
Reverses the last random edit
Sourcepub fn with_weights(self, weights: Vec<Vec<f32>>) -> Self
pub fn with_weights(self, weights: Vec<Vec<f32>>) -> Self
Adds custom weights to the last layer of the model
Sourcepub fn with_bias(self, biases: Vec<f32>) -> Self
pub fn with_bias(self, biases: Vec<f32>) -> Self
adds custom biases to the last layer of the model
Sourcepub fn run(&mut self, input: &[f32; I]) -> [f32; O]
pub fn run(&mut self, input: &[f32; I]) -> [f32; O]
runs the model on the given input.
Examples found in repository?
examples/bench.rs (line 16)
3fn main() {
4 // let's check how fast this thing gets
5 let mut net: NeuralNetwork<1, 1> = NeuralNetwork::new()
6 .add_layer(5, ActivationFunction::ReLU)
7 .add_layer(5, ActivationFunction::ReLU)
8 .add_layer(5, ActivationFunction::ReLU)
9 .add_layer(5, ActivationFunction::ReLU)
10 .add_layer(5, ActivationFunction::ReLU)
11 .add_layer(5, ActivationFunction::ReLU)
12 .add_layer(1, ActivationFunction::Linear);
13
14 let mut sum = 0.0;
15 for i in 0..10_000_000 {
16 sum += net.run(&[i as f32])[0];
17 }
18
19 println!("{sum}");
20}
More examples
examples/training.rs (line 28)
3fn main() {
4 // create a neural network
5 let mut net = NeuralNetwork::new().add_layer(1, ActivationFunction::Linear);
6
7 // create training data
8 let mut inputs = vec![];
9 let mut outputs = vec![];
10 for i in -50..50 {
11 inputs.push([i as f32]);
12 outputs.push([i as f32 * 3.0]);
13 }
14 let data = DataSet { inputs, outputs };
15
16 let trainer = BasicTrainer::new(data);
17
18 // train the model
19 for _ in 0..10 {
20 trainer.train(&mut net, 10);
21 // lower is better
22 println!("{}", trainer.get_total_error(&net))
23 }
24
25 // show that this actually works!
26 println!("########");
27 for i in -5..5 {
28 println!("{}", &net.run(&[i as f32 + 0.5])[0]);
29 }
30}
Trait Implementations§
Auto Trait Implementations§
impl<const I: usize, const O: usize> Freeze for NeuralNetwork<I, O>
impl<const I: usize, const O: usize> RefUnwindSafe for NeuralNetwork<I, O>
impl<const I: usize, const O: usize> Send for NeuralNetwork<I, O>
impl<const I: usize, const O: usize> Sync for NeuralNetwork<I, O>
impl<const I: usize, const O: usize> Unpin for NeuralNetwork<I, O>
impl<const I: usize, const O: usize> UnwindSafe for NeuralNetwork<I, O>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more