pub struct NeuralNetwork { /* private fields */ }
Expand description
Neural network.
Implementations§
Source§impl NeuralNetwork
impl NeuralNetwork
Sourcepub fn new() -> Self
pub fn new() -> Self
Examples found in repository?
examples/example.rs (line 4)
3fn main() {
4 let mut nn = NeuralNetwork::new();
5
6 // Define a network with an input layer, a hidden layer, and an output layer
7 nn.add_layer(3, ActivationFunction::None, 0.0); // Input layer with 3 neurons
8 nn.add_layer(5, ActivationFunction::LeakyReLU, 0.1); // Hidden layer with 5 neurons
9 nn.add_layer(2, ActivationFunction::Linear, 0.1); // Output layer with 2 neurons
10
11 // Define 10 predictable input-target pairs
12 // Targets are linear combinations of inputs:
13 // output1 = (x1 + x2 + x3) / 3
14 // output2 = (x1 - x2 + x3) / 3
15 let training_data = [
16 ([0.1, 0.2, 0.3], [0.2, 0.0667]),
17 ([0.4, 0.5, 0.6], [0.5, 0.1333]),
18 ([0.2, 0.1, 0.4], [0.2333, 0.1667]),
19 ([0.9, 0.8, 0.7], [0.8, 0.2667]),
20 ([0.3, 0.4, 0.5], [0.4, 0.1333]),
21 ([0.6, 0.5, 0.4], [0.5, 0.1667]),
22 ([0.4, 0.3, 0.2], [0.3, 0.1]),
23 ([0.7, 0.8, 0.9], [0.8, 0.2667]),
24 ([0.2, 0.3, 0.1], [0.2, 0.0]),
25 ([0.8, 0.7, 0.6], [0.7, 0.2333]),
26 ];
27
28 let epochs = 1000;
29
30 for epoch in 1..=epochs {
31 let mut total_error = 0.0;
32 for (inputs, targets) in &training_data {
33 let error = nn.train(inputs, targets, 0.01);
34 total_error += error;
35 }
36 let avg_error = total_error / training_data.len() as f32;
37 if epoch % 100 == 0 {
38 println!("Epoch {}: Average Error = {:.6}", epoch, avg_error);
39 }
40 }
41
42 // Make predictions on the training data
43 for (inputs, targets) in &training_data {
44 let output = nn.predict(inputs);
45 println!(
46 "Input: {:?} | Predicted Output: {:?} | Target: {:?}",
47 inputs, output, targets
48 );
49 }
50}
Sourcepub fn add_layer(
&mut self,
width: usize,
activation: ActivationFunction,
bias: f32,
)
pub fn add_layer( &mut self, width: usize, activation: ActivationFunction, bias: f32, )
Adds a layer to the neural network.
Examples found in repository?
examples/example.rs (line 7)
3fn main() {
4 let mut nn = NeuralNetwork::new();
5
6 // Define a network with an input layer, a hidden layer, and an output layer
7 nn.add_layer(3, ActivationFunction::None, 0.0); // Input layer with 3 neurons
8 nn.add_layer(5, ActivationFunction::LeakyReLU, 0.1); // Hidden layer with 5 neurons
9 nn.add_layer(2, ActivationFunction::Linear, 0.1); // Output layer with 2 neurons
10
11 // Define 10 predictable input-target pairs
12 // Targets are linear combinations of inputs:
13 // output1 = (x1 + x2 + x3) / 3
14 // output2 = (x1 - x2 + x3) / 3
15 let training_data = [
16 ([0.1, 0.2, 0.3], [0.2, 0.0667]),
17 ([0.4, 0.5, 0.6], [0.5, 0.1333]),
18 ([0.2, 0.1, 0.4], [0.2333, 0.1667]),
19 ([0.9, 0.8, 0.7], [0.8, 0.2667]),
20 ([0.3, 0.4, 0.5], [0.4, 0.1333]),
21 ([0.6, 0.5, 0.4], [0.5, 0.1667]),
22 ([0.4, 0.3, 0.2], [0.3, 0.1]),
23 ([0.7, 0.8, 0.9], [0.8, 0.2667]),
24 ([0.2, 0.3, 0.1], [0.2, 0.0]),
25 ([0.8, 0.7, 0.6], [0.7, 0.2333]),
26 ];
27
28 let epochs = 1000;
29
30 for epoch in 1..=epochs {
31 let mut total_error = 0.0;
32 for (inputs, targets) in &training_data {
33 let error = nn.train(inputs, targets, 0.01);
34 total_error += error;
35 }
36 let avg_error = total_error / training_data.len() as f32;
37 if epoch % 100 == 0 {
38 println!("Epoch {}: Average Error = {:.6}", epoch, avg_error);
39 }
40 }
41
42 // Make predictions on the training data
43 for (inputs, targets) in &training_data {
44 let output = nn.predict(inputs);
45 println!(
46 "Input: {:?} | Predicted Output: {:?} | Target: {:?}",
47 inputs, output, targets
48 );
49 }
50}
Sourcepub fn train(&mut self, inputs: &[f32], targets: &[f32], rate: f32) -> f32
pub fn train(&mut self, inputs: &[f32], targets: &[f32], rate: f32) -> f32
Trains the neural network with the given inputs and targets.
Examples found in repository?
examples/example.rs (line 33)
3fn main() {
4 let mut nn = NeuralNetwork::new();
5
6 // Define a network with an input layer, a hidden layer, and an output layer
7 nn.add_layer(3, ActivationFunction::None, 0.0); // Input layer with 3 neurons
8 nn.add_layer(5, ActivationFunction::LeakyReLU, 0.1); // Hidden layer with 5 neurons
9 nn.add_layer(2, ActivationFunction::Linear, 0.1); // Output layer with 2 neurons
10
11 // Define 10 predictable input-target pairs
12 // Targets are linear combinations of inputs:
13 // output1 = (x1 + x2 + x3) / 3
14 // output2 = (x1 - x2 + x3) / 3
15 let training_data = [
16 ([0.1, 0.2, 0.3], [0.2, 0.0667]),
17 ([0.4, 0.5, 0.6], [0.5, 0.1333]),
18 ([0.2, 0.1, 0.4], [0.2333, 0.1667]),
19 ([0.9, 0.8, 0.7], [0.8, 0.2667]),
20 ([0.3, 0.4, 0.5], [0.4, 0.1333]),
21 ([0.6, 0.5, 0.4], [0.5, 0.1667]),
22 ([0.4, 0.3, 0.2], [0.3, 0.1]),
23 ([0.7, 0.8, 0.9], [0.8, 0.2667]),
24 ([0.2, 0.3, 0.1], [0.2, 0.0]),
25 ([0.8, 0.7, 0.6], [0.7, 0.2333]),
26 ];
27
28 let epochs = 1000;
29
30 for epoch in 1..=epochs {
31 let mut total_error = 0.0;
32 for (inputs, targets) in &training_data {
33 let error = nn.train(inputs, targets, 0.01);
34 total_error += error;
35 }
36 let avg_error = total_error / training_data.len() as f32;
37 if epoch % 100 == 0 {
38 println!("Epoch {}: Average Error = {:.6}", epoch, avg_error);
39 }
40 }
41
42 // Make predictions on the training data
43 for (inputs, targets) in &training_data {
44 let output = nn.predict(inputs);
45 println!(
46 "Input: {:?} | Predicted Output: {:?} | Target: {:?}",
47 inputs, output, targets
48 );
49 }
50}
Sourcepub fn predict(&mut self, inputs: &[f32]) -> &[f32]
pub fn predict(&mut self, inputs: &[f32]) -> &[f32]
Predicts the output for the given inputs.
Examples found in repository?
examples/example.rs (line 44)
3fn main() {
4 let mut nn = NeuralNetwork::new();
5
6 // Define a network with an input layer, a hidden layer, and an output layer
7 nn.add_layer(3, ActivationFunction::None, 0.0); // Input layer with 3 neurons
8 nn.add_layer(5, ActivationFunction::LeakyReLU, 0.1); // Hidden layer with 5 neurons
9 nn.add_layer(2, ActivationFunction::Linear, 0.1); // Output layer with 2 neurons
10
11 // Define 10 predictable input-target pairs
12 // Targets are linear combinations of inputs:
13 // output1 = (x1 + x2 + x3) / 3
14 // output2 = (x1 - x2 + x3) / 3
15 let training_data = [
16 ([0.1, 0.2, 0.3], [0.2, 0.0667]),
17 ([0.4, 0.5, 0.6], [0.5, 0.1333]),
18 ([0.2, 0.1, 0.4], [0.2333, 0.1667]),
19 ([0.9, 0.8, 0.7], [0.8, 0.2667]),
20 ([0.3, 0.4, 0.5], [0.4, 0.1333]),
21 ([0.6, 0.5, 0.4], [0.5, 0.1667]),
22 ([0.4, 0.3, 0.2], [0.3, 0.1]),
23 ([0.7, 0.8, 0.9], [0.8, 0.2667]),
24 ([0.2, 0.3, 0.1], [0.2, 0.0]),
25 ([0.8, 0.7, 0.6], [0.7, 0.2333]),
26 ];
27
28 let epochs = 1000;
29
30 for epoch in 1..=epochs {
31 let mut total_error = 0.0;
32 for (inputs, targets) in &training_data {
33 let error = nn.train(inputs, targets, 0.01);
34 total_error += error;
35 }
36 let avg_error = total_error / training_data.len() as f32;
37 if epoch % 100 == 0 {
38 println!("Epoch {}: Average Error = {:.6}", epoch, avg_error);
39 }
40 }
41
42 // Make predictions on the training data
43 for (inputs, targets) in &training_data {
44 let output = nn.predict(inputs);
45 println!(
46 "Input: {:?} | Predicted Output: {:?} | Target: {:?}",
47 inputs, output, targets
48 );
49 }
50}
Auto Trait Implementations§
impl Freeze for NeuralNetwork
impl RefUnwindSafe for NeuralNetwork
impl Send for NeuralNetwork
impl Sync for NeuralNetwork
impl Unpin for NeuralNetwork
impl UnwindSafe for NeuralNetwork
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more