Struct NeuralNetwork

Source
pub struct NeuralNetwork { /* private fields */ }

Implementations§

Source§

impl NeuralNetwork

Source

pub fn new(layer_sizes: Vec<usize>, learning_rate: f32) -> Self

Examples found in repository?
examples/xor.rs (line 5)
3fn main() {
4    // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5    let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1);
6
7    // Training data for XOR
8    let training_data = vec![
9        (vec![0.0, 0.0], vec![0.0]),
10        (vec![0.0, 1.0], vec![1.0]),
11        (vec![1.0, 0.0], vec![1.0]),
12        (vec![1.0, 1.0], vec![0.0]),
13    ];
14
15    // Train the network
16    for _ in 0..100000 {
17        for (inputs, expected) in &training_data {
18            nn.forward(inputs.clone());
19            let outputs = nn.get_outputs();
20            let errors = vec![expected[0] - outputs[0]];
21            nn.backwards(errors);
22        }
23    }
24
25    // Test the network
26    for (inputs, expected) in &training_data {
27        nn.forward(inputs.clone());
28        let outputs = nn.get_outputs();
29        println!(
30            "Input: {:?}, Expected: {:?}, Got: {:.4}",
31            inputs, expected[0], outputs[0]
32        );
33    }
34}
More examples
Hide additional examples
examples/xor_file.rs (line 13)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
examples/sine_wave.rs (line 15)
12fn main() {
13    // Create a network with 1 input, two hidden layers, and 1 output
14    // Larger architecture to handle the complexity of sine function
15    let mut nn = NeuralNetwork::new(vec![1, 32, 32, 1], 0.005);
16
17    // Generate training data: sin(x) for x in [0, 2π]
18    let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19        .map(|i| {
20            let x = (i as f32) * 2.0 * PI / 200.0;
21            let normalized_x = normalize(x, 0.0, 2.0 * PI);
22            let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23            (vec![normalized_x], vec![normalized_sin])
24        })
25        .collect();
26
27    // Train the network
28    println!("Training...");
29    for epoch in 0..20000 {
30        let mut total_error = 0.0;
31        for (input, expected) in &training_data {
32            nn.forward(input.clone());
33            let output = nn.get_outputs();
34            let error = expected[0] - output[0];
35            total_error += error * error;
36            nn.backwards(vec![error]);
37        }
38
39        if epoch % 1000 == 0 {
40            println!(
41                "Epoch {}: MSE = {:.6}",
42                epoch,
43                total_error / training_data.len() as f32
44            );
45        }
46    }
47
48    // Test the network
49    println!("\nTesting...");
50    let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
51    for x in test_points {
52        let normalized_x = normalize(x, 0.0, 2.0 * PI);
53        nn.forward(vec![normalized_x]);
54        let predicted = denormalize(nn.get_outputs()[0], -1.0, 1.0);
55        println!(
56            "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
57            x,
58            x.sin(),
59            predicted,
60            (x.sin() - predicted).abs()
61        );
62    }
63}
Source

pub fn from_file(path: &str) -> Result<Self>

Examples found in repository?
examples/xor_file.rs (line 8)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
Source

pub fn save_to_file(&self, path: &str) -> Result<()>

Examples found in repository?
examples/xor_file.rs (line 45)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
Source

pub fn forward(&mut self, inputs: Vec<f32>)

Examples found in repository?
examples/xor.rs (line 18)
3fn main() {
4    // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5    let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1);
6
7    // Training data for XOR
8    let training_data = vec![
9        (vec![0.0, 0.0], vec![0.0]),
10        (vec![0.0, 1.0], vec![1.0]),
11        (vec![1.0, 0.0], vec![1.0]),
12        (vec![1.0, 1.0], vec![0.0]),
13    ];
14
15    // Train the network
16    for _ in 0..100000 {
17        for (inputs, expected) in &training_data {
18            nn.forward(inputs.clone());
19            let outputs = nn.get_outputs();
20            let errors = vec![expected[0] - outputs[0]];
21            nn.backwards(errors);
22        }
23    }
24
25    // Test the network
26    for (inputs, expected) in &training_data {
27        nn.forward(inputs.clone());
28        let outputs = nn.get_outputs();
29        println!(
30            "Input: {:?}, Expected: {:?}, Got: {:.4}",
31            inputs, expected[0], outputs[0]
32        );
33    }
34}
More examples
Hide additional examples
examples/xor_file.rs (line 27)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
examples/sine_wave.rs (line 32)
12fn main() {
13    // Create a network with 1 input, two hidden layers, and 1 output
14    // Larger architecture to handle the complexity of sine function
15    let mut nn = NeuralNetwork::new(vec![1, 32, 32, 1], 0.005);
16
17    // Generate training data: sin(x) for x in [0, 2π]
18    let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19        .map(|i| {
20            let x = (i as f32) * 2.0 * PI / 200.0;
21            let normalized_x = normalize(x, 0.0, 2.0 * PI);
22            let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23            (vec![normalized_x], vec![normalized_sin])
24        })
25        .collect();
26
27    // Train the network
28    println!("Training...");
29    for epoch in 0..20000 {
30        let mut total_error = 0.0;
31        for (input, expected) in &training_data {
32            nn.forward(input.clone());
33            let output = nn.get_outputs();
34            let error = expected[0] - output[0];
35            total_error += error * error;
36            nn.backwards(vec![error]);
37        }
38
39        if epoch % 1000 == 0 {
40            println!(
41                "Epoch {}: MSE = {:.6}",
42                epoch,
43                total_error / training_data.len() as f32
44            );
45        }
46    }
47
48    // Test the network
49    println!("\nTesting...");
50    let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
51    for x in test_points {
52        let normalized_x = normalize(x, 0.0, 2.0 * PI);
53        nn.forward(vec![normalized_x]);
54        let predicted = denormalize(nn.get_outputs()[0], -1.0, 1.0);
55        println!(
56            "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
57            x,
58            x.sin(),
59            predicted,
60            (x.sin() - predicted).abs()
61        );
62    }
63}
Source

pub fn backwards(&mut self, errors: Vec<f32>)

Examples found in repository?
examples/xor.rs (line 21)
3fn main() {
4    // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5    let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1);
6
7    // Training data for XOR
8    let training_data = vec![
9        (vec![0.0, 0.0], vec![0.0]),
10        (vec![0.0, 1.0], vec![1.0]),
11        (vec![1.0, 0.0], vec![1.0]),
12        (vec![1.0, 1.0], vec![0.0]),
13    ];
14
15    // Train the network
16    for _ in 0..100000 {
17        for (inputs, expected) in &training_data {
18            nn.forward(inputs.clone());
19            let outputs = nn.get_outputs();
20            let errors = vec![expected[0] - outputs[0]];
21            nn.backwards(errors);
22        }
23    }
24
25    // Test the network
26    for (inputs, expected) in &training_data {
27        nn.forward(inputs.clone());
28        let outputs = nn.get_outputs();
29        println!(
30            "Input: {:?}, Expected: {:?}, Got: {:.4}",
31            inputs, expected[0], outputs[0]
32        );
33    }
34}
More examples
Hide additional examples
examples/xor_file.rs (line 30)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
examples/sine_wave.rs (line 36)
12fn main() {
13    // Create a network with 1 input, two hidden layers, and 1 output
14    // Larger architecture to handle the complexity of sine function
15    let mut nn = NeuralNetwork::new(vec![1, 32, 32, 1], 0.005);
16
17    // Generate training data: sin(x) for x in [0, 2π]
18    let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19        .map(|i| {
20            let x = (i as f32) * 2.0 * PI / 200.0;
21            let normalized_x = normalize(x, 0.0, 2.0 * PI);
22            let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23            (vec![normalized_x], vec![normalized_sin])
24        })
25        .collect();
26
27    // Train the network
28    println!("Training...");
29    for epoch in 0..20000 {
30        let mut total_error = 0.0;
31        for (input, expected) in &training_data {
32            nn.forward(input.clone());
33            let output = nn.get_outputs();
34            let error = expected[0] - output[0];
35            total_error += error * error;
36            nn.backwards(vec![error]);
37        }
38
39        if epoch % 1000 == 0 {
40            println!(
41                "Epoch {}: MSE = {:.6}",
42                epoch,
43                total_error / training_data.len() as f32
44            );
45        }
46    }
47
48    // Test the network
49    println!("\nTesting...");
50    let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
51    for x in test_points {
52        let normalized_x = normalize(x, 0.0, 2.0 * PI);
53        nn.forward(vec![normalized_x]);
54        let predicted = denormalize(nn.get_outputs()[0], -1.0, 1.0);
55        println!(
56            "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
57            x,
58            x.sin(),
59            predicted,
60            (x.sin() - predicted).abs()
61        );
62    }
63}
Source

pub fn get_outputs(&self) -> Vec<f32>

Examples found in repository?
examples/xor.rs (line 19)
3fn main() {
4    // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5    let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1);
6
7    // Training data for XOR
8    let training_data = vec![
9        (vec![0.0, 0.0], vec![0.0]),
10        (vec![0.0, 1.0], vec![1.0]),
11        (vec![1.0, 0.0], vec![1.0]),
12        (vec![1.0, 1.0], vec![0.0]),
13    ];
14
15    // Train the network
16    for _ in 0..100000 {
17        for (inputs, expected) in &training_data {
18            nn.forward(inputs.clone());
19            let outputs = nn.get_outputs();
20            let errors = vec![expected[0] - outputs[0]];
21            nn.backwards(errors);
22        }
23    }
24
25    // Test the network
26    for (inputs, expected) in &training_data {
27        nn.forward(inputs.clone());
28        let outputs = nn.get_outputs();
29        println!(
30            "Input: {:?}, Expected: {:?}, Got: {:.4}",
31            inputs, expected[0], outputs[0]
32        );
33    }
34}
More examples
Hide additional examples
examples/xor_file.rs (line 28)
4fn main() {
5    let file_path = "xor_model.json";
6
7    // Try to load the neural network from a file, or create a new one if the file does not exist
8    let mut nn = if let Ok(nn) = NeuralNetwork::from_file(file_path) {
9        println!("Loaded neural network from file.");
10        nn
11    } else {
12        println!("Creating a new neural network.");
13        NeuralNetwork::new(vec![2, 4, 1], 0.1)
14    };
15
16    // Training data for XOR
17    let training_data = vec![
18        (vec![0.0, 0.0], vec![0.0]),
19        (vec![0.0, 1.0], vec![1.0]),
20        (vec![1.0, 0.0], vec![1.0]),
21        (vec![1.0, 1.0], vec![0.0]),
22    ];
23
24    // Train the network
25    for _ in 0..1000000 {
26        for (inputs, expected) in &training_data {
27            nn.forward(inputs.clone());
28            let outputs = nn.get_outputs();
29            let errors = vec![expected[0] - outputs[0]];
30            nn.backwards(errors);
31        }
32    }
33
34    // Test the network
35    for (inputs, expected) in &training_data {
36        nn.forward(inputs.clone());
37        let outputs = nn.get_outputs();
38        println!(
39            "Input: {:?}, Expected: {:?}, Got: {:.4}",
40            inputs, expected[0], outputs[0]
41        );
42    }
43
44    // Save the trained neural network to a file
45    if let Err(e) = nn.save_to_file(file_path) {
46        eprintln!("Failed to save neural network to file: {}", e);
47    } else {
48        println!("Neural network saved to file.");
49    }
50}
examples/sine_wave.rs (line 33)
12fn main() {
13    // Create a network with 1 input, two hidden layers, and 1 output
14    // Larger architecture to handle the complexity of sine function
15    let mut nn = NeuralNetwork::new(vec![1, 32, 32, 1], 0.005);
16
17    // Generate training data: sin(x) for x in [0, 2π]
18    let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19        .map(|i| {
20            let x = (i as f32) * 2.0 * PI / 200.0;
21            let normalized_x = normalize(x, 0.0, 2.0 * PI);
22            let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23            (vec![normalized_x], vec![normalized_sin])
24        })
25        .collect();
26
27    // Train the network
28    println!("Training...");
29    for epoch in 0..20000 {
30        let mut total_error = 0.0;
31        for (input, expected) in &training_data {
32            nn.forward(input.clone());
33            let output = nn.get_outputs();
34            let error = expected[0] - output[0];
35            total_error += error * error;
36            nn.backwards(vec![error]);
37        }
38
39        if epoch % 1000 == 0 {
40            println!(
41                "Epoch {}: MSE = {:.6}",
42                epoch,
43                total_error / training_data.len() as f32
44            );
45        }
46    }
47
48    // Test the network
49    println!("\nTesting...");
50    let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
51    for x in test_points {
52        let normalized_x = normalize(x, 0.0, 2.0 * PI);
53        nn.forward(vec![normalized_x]);
54        let predicted = denormalize(nn.get_outputs()[0], -1.0, 1.0);
55        println!(
56            "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
57            x,
58            x.sin(),
59            predicted,
60            (x.sin() - predicted).abs()
61        );
62    }
63}

Trait Implementations§

Source§

impl<'de> Deserialize<'de> for NeuralNetwork

Source§

fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl Serialize for NeuralNetwork

Source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,