pub struct NeuralNetwork { /* private fields */ }Implementations§
Source§impl NeuralNetwork
impl NeuralNetwork
Sourcepub fn new(
layer_sizes: Vec<usize>,
learning_rate: f32,
activation: Box<dyn Activation>,
) -> Self
pub fn new( layer_sizes: Vec<usize>, learning_rate: f32, activation: Box<dyn Activation>, ) -> Self
Examples found in repository?
More examples
examples/rps.rs (lines 91-95)
89 fn new(model_file: &str) -> Self {
90 // Try to load an existing model or create a new one
91 let nn = NeuralNetwork::new(
92 vec![HISTORY_LENGTH * 3, 32, 32, 16, 3],
93 0.1,
94 Box::new(Sigmoid),
95 );
96
97 MovePredictor {
98 nn,
99 player_history: Vec::new(),
100 initialized: false,
101 model_file: model_file.to_string(),
102 }
103 }examples/xor.rs (line 5)
3fn main() {
4 // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/xor_file.rs (line 5)
3fn main() {
4 // Try to load the neural network from a file, or create a new one if the file does not exist
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/square_function.rs (line 14)
13fn main() {
14 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.01, Box::new(Sigmoid));
15 // Create a neural network with 1 input, one hidden layer of 10 neurons, and 1 output
16
17 // Generate training data: f(x) = x^2 for x in [-1, 1]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (-100..=100)
19 .map(|i| {
20 let x = i as f32 / 100.0;
21 let y = x * x;
22 (vec![normalize(x, -1.0, 1.0)], vec![normalize(y, 0.0, 1.0)])
23 })
24 .collect();
25
26 // Train the network
27 println!("Training...");
28 for epoch in 0..1000000 {
29 let mut error = 0.0;
30 for (input, expected) in &training_data {
31 let _output = nn.forward(input);
32 error = nn.errors(expected);
33 nn.backpropagate(expected);
34 }
35
36 if epoch % 10000 == 0 {
37 println!(
38 "Epoch {}: MSE = {:.6}",
39 epoch,
40 error / training_data.len() as f32
41 );
42 }
43 }
44
45 // Test the network
46 println!("\nTesting...");
47 let test_points = vec![-1.0, -0.5, 0.0, 0.5, 1.0, 1.0 / PI];
48 for x in test_points {
49 let predicted = denormalize(nn.forward(&vec![normalize(x, -1.0, 1.0)])[0], 0.0, 1.0);
50 println!(
51 "x = {:.3}, x^2 = {:.3}, predicted = {:.3}, error = {:.3}",
52 x,
53 x * x,
54 predicted,
55 ((x * x) - predicted).abs()
56 );
57 }
58}examples/sine_wave.rs (line 15)
12fn main() {
13 // Create a network with 1 input, two hidden layers, and 1 output
14 // Larger architecture to handle the complexity of sine function
15 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.001, Box::new(Sigmoid));
16
17 // Generate training data: sin(x) for x in [0, 2π]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19 .map(|i| {
20 let x = (i as f32) * 2.0 * PI / 200.0;
21 let normalized_x = normalize(x, 0.0, 2.0 * PI);
22 let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23 (vec![normalized_x], vec![normalized_sin])
24 })
25 .collect();
26
27 // Train the network
28 println!("Training...");
29 for epoch in 0..1000000 {
30 let mut total_error = 0.0;
31 for (input, expected) in &training_data {
32 let _outputs = nn.forward(&vec![input[0]]);
33 nn.backpropagate(&vec![expected[0]]);
34 total_error += nn.errors(&vec![expected[0]]);
35 }
36
37 if epoch % 1000 == 0 {
38 println!(
39 "Epoch {}: MSE = {:.6}",
40 epoch,
41 total_error / training_data.len() as f32
42 );
43 }
44 }
45
46 // Test the network
47 println!("\nTesting...");
48 let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
49 for x in test_points {
50 let normalized_x = normalize(x, 0.0, 2.0 * PI);
51 let predicted = denormalize(nn.forward(&vec![normalized_x])[0], -1.0, 1.0);
52 println!(
53 "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
54 x,
55 x.sin(),
56 predicted,
57 (x.sin() - predicted).abs()
58 );
59 }
60}Sourcepub fn forward(&mut self, inputs: &Vec<f32>) -> Vec<f32>
pub fn forward(&mut self, inputs: &Vec<f32>) -> Vec<f32>
Examples found in repository?
examples/tic_tac_toe.rs (line 191)
182 fn get_move(&mut self, board: &Board) -> usize {
183 let empty_cells = board.get_empty_cells();
184
185 if empty_cells.len() == 1 {
186 return empty_cells[0];
187 }
188
189 let input = board.to_nn_input(self.symbol);
190
191 let output = self.nn.forward(&input);
192
193 let mut valid_moves: Vec<(usize, f32)> =
194 empty_cells.iter().map(|&idx| (idx, output[idx])).collect();
195
196 valid_moves.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
197
198 let mut rng = rand::rng();
199 if rng.random::<f32>() < 0.1 {
200 *empty_cells.choose(&mut rng).unwrap()
201 } else {
202 valid_moves[0].0
203 }
204 }
205
206 fn record_game(&mut self, board_states: &Vec<(Board, usize)>, winner: GameState) {
207 for (board, move_idx) in board_states {
208 let current_player =
209 if board.cells.iter().filter(|&&c| c != Cell::Empty).count() % 2 == 0 {
210 Cell::X
211 } else {
212 Cell::O
213 };
214
215 if current_player != self.symbol {
216 continue;
217 }
218
219 let input = board.to_nn_input(self.symbol);
220
221 let mut target = vec![0.0; 9];
222
223 let move_value = match winner {
224 GameState::XWins => {
225 if self.symbol == Cell::X {
226 1.0
227 } else {
228 0.0
229 }
230 }
231 GameState::OWins => {
232 if self.symbol == Cell::O {
233 1.0
234 } else {
235 0.0
236 }
237 }
238 GameState::Draw => 0.5,
239 _ => 0.1,
240 };
241
242 target[*move_idx] = move_value;
243
244 self.training_data.push((input, target));
245 }
246 }
247
248 fn train_incremental(&mut self, epochs: usize) {
249 if self.training_data.len() < 3 {
250 return;
251 }
252
253 let mut rng = rand::rng();
254 let data_size = self.training_data.len();
255 let mut indices: Vec<usize> = (0..data_size).collect();
256 indices.shuffle(&mut rng);
257
258 let training_size = data_size.min(50);
259 let indices = indices[0..training_size].to_vec();
260
261 for _ in 0..epochs {
262 for &idx in &indices {
263 let (input, target) = &self.training_data[idx];
264
265 let _outputs = self.nn.forward(input);
266
267 self.nn.backpropagate(target);
268 }
269 }
270
271 if self.training_data.len() > 1000 {
272 self.training_data = self.training_data[self.training_data.len() - 1000..].to_vec();
273 }
274 }More examples
examples/rps.rs (line 132)
120 fn train(&mut self) {
121 if !self.initialized || self.player_history.len() < HISTORY_LENGTH + 1 {
122 return;
123 }
124
125 // Train on sequences in the history
126 for i in 0..self.player_history.len() - HISTORY_LENGTH {
127 let inputs = self.history_to_input(&self.player_history[i..i + HISTORY_LENGTH]);
128 let target = self.player_history[i + HISTORY_LENGTH].to_input_vec();
129
130 // Train multiple times on each sequence to reinforce learning
131 for _ in 0..TRAINING_ITERATIONS {
132 let outputs = self.nn.forward(&inputs);
133
134 // Calculate errors (expected - actual)
135
136 self.nn.backpropagate(&target);
137 }
138 }
139 }
140
141 fn predict_next_move(&mut self) -> Move {
142 if !self.initialized || self.player_history.len() < HISTORY_LENGTH {
143 return Move::random();
144 }
145
146 // Get the last HISTORY_LENGTH moves
147 let recent_history = &self.player_history[self.player_history.len() - HISTORY_LENGTH..];
148 let inputs = self.history_to_input(recent_history);
149
150 // Forward pass through the neural network
151 let outputs = self.nn.forward(&inputs);
152
153 // Find the move with highest probability
154 let mut max_idx = 0;
155 let mut max_val = outputs[0];
156
157 for (i, &val) in outputs.iter().enumerate().skip(1) {
158 if val > max_val {
159 max_val = val;
160 max_idx = i;
161 }
162 }
163
164 // Return the move corresponding to the highest output
165 Move::from_index(max_idx)
166 }examples/xor.rs (line 18)
3fn main() {
4 // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/xor_file.rs (line 18)
3fn main() {
4 // Try to load the neural network from a file, or create a new one if the file does not exist
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/square_function.rs (line 31)
13fn main() {
14 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.01, Box::new(Sigmoid));
15 // Create a neural network with 1 input, one hidden layer of 10 neurons, and 1 output
16
17 // Generate training data: f(x) = x^2 for x in [-1, 1]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (-100..=100)
19 .map(|i| {
20 let x = i as f32 / 100.0;
21 let y = x * x;
22 (vec![normalize(x, -1.0, 1.0)], vec![normalize(y, 0.0, 1.0)])
23 })
24 .collect();
25
26 // Train the network
27 println!("Training...");
28 for epoch in 0..1000000 {
29 let mut error = 0.0;
30 for (input, expected) in &training_data {
31 let _output = nn.forward(input);
32 error = nn.errors(expected);
33 nn.backpropagate(expected);
34 }
35
36 if epoch % 10000 == 0 {
37 println!(
38 "Epoch {}: MSE = {:.6}",
39 epoch,
40 error / training_data.len() as f32
41 );
42 }
43 }
44
45 // Test the network
46 println!("\nTesting...");
47 let test_points = vec![-1.0, -0.5, 0.0, 0.5, 1.0, 1.0 / PI];
48 for x in test_points {
49 let predicted = denormalize(nn.forward(&vec![normalize(x, -1.0, 1.0)])[0], 0.0, 1.0);
50 println!(
51 "x = {:.3}, x^2 = {:.3}, predicted = {:.3}, error = {:.3}",
52 x,
53 x * x,
54 predicted,
55 ((x * x) - predicted).abs()
56 );
57 }
58}examples/sine_wave.rs (line 32)
12fn main() {
13 // Create a network with 1 input, two hidden layers, and 1 output
14 // Larger architecture to handle the complexity of sine function
15 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.001, Box::new(Sigmoid));
16
17 // Generate training data: sin(x) for x in [0, 2π]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19 .map(|i| {
20 let x = (i as f32) * 2.0 * PI / 200.0;
21 let normalized_x = normalize(x, 0.0, 2.0 * PI);
22 let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23 (vec![normalized_x], vec![normalized_sin])
24 })
25 .collect();
26
27 // Train the network
28 println!("Training...");
29 for epoch in 0..1000000 {
30 let mut total_error = 0.0;
31 for (input, expected) in &training_data {
32 let _outputs = nn.forward(&vec![input[0]]);
33 nn.backpropagate(&vec![expected[0]]);
34 total_error += nn.errors(&vec![expected[0]]);
35 }
36
37 if epoch % 1000 == 0 {
38 println!(
39 "Epoch {}: MSE = {:.6}",
40 epoch,
41 total_error / training_data.len() as f32
42 );
43 }
44 }
45
46 // Test the network
47 println!("\nTesting...");
48 let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
49 for x in test_points {
50 let normalized_x = normalize(x, 0.0, 2.0 * PI);
51 let predicted = denormalize(nn.forward(&vec![normalized_x])[0], -1.0, 1.0);
52 println!(
53 "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
54 x,
55 x.sin(),
56 predicted,
57 (x.sin() - predicted).abs()
58 );
59 }
60}Sourcepub fn errors(&self, expected: &Vec<f32>) -> f32
pub fn errors(&self, expected: &Vec<f32>) -> f32
Examples found in repository?
examples/square_function.rs (line 32)
13fn main() {
14 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.01, Box::new(Sigmoid));
15 // Create a neural network with 1 input, one hidden layer of 10 neurons, and 1 output
16
17 // Generate training data: f(x) = x^2 for x in [-1, 1]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (-100..=100)
19 .map(|i| {
20 let x = i as f32 / 100.0;
21 let y = x * x;
22 (vec![normalize(x, -1.0, 1.0)], vec![normalize(y, 0.0, 1.0)])
23 })
24 .collect();
25
26 // Train the network
27 println!("Training...");
28 for epoch in 0..1000000 {
29 let mut error = 0.0;
30 for (input, expected) in &training_data {
31 let _output = nn.forward(input);
32 error = nn.errors(expected);
33 nn.backpropagate(expected);
34 }
35
36 if epoch % 10000 == 0 {
37 println!(
38 "Epoch {}: MSE = {:.6}",
39 epoch,
40 error / training_data.len() as f32
41 );
42 }
43 }
44
45 // Test the network
46 println!("\nTesting...");
47 let test_points = vec![-1.0, -0.5, 0.0, 0.5, 1.0, 1.0 / PI];
48 for x in test_points {
49 let predicted = denormalize(nn.forward(&vec![normalize(x, -1.0, 1.0)])[0], 0.0, 1.0);
50 println!(
51 "x = {:.3}, x^2 = {:.3}, predicted = {:.3}, error = {:.3}",
52 x,
53 x * x,
54 predicted,
55 ((x * x) - predicted).abs()
56 );
57 }
58}More examples
examples/sine_wave.rs (line 34)
12fn main() {
13 // Create a network with 1 input, two hidden layers, and 1 output
14 // Larger architecture to handle the complexity of sine function
15 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.001, Box::new(Sigmoid));
16
17 // Generate training data: sin(x) for x in [0, 2π]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19 .map(|i| {
20 let x = (i as f32) * 2.0 * PI / 200.0;
21 let normalized_x = normalize(x, 0.0, 2.0 * PI);
22 let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23 (vec![normalized_x], vec![normalized_sin])
24 })
25 .collect();
26
27 // Train the network
28 println!("Training...");
29 for epoch in 0..1000000 {
30 let mut total_error = 0.0;
31 for (input, expected) in &training_data {
32 let _outputs = nn.forward(&vec![input[0]]);
33 nn.backpropagate(&vec![expected[0]]);
34 total_error += nn.errors(&vec![expected[0]]);
35 }
36
37 if epoch % 1000 == 0 {
38 println!(
39 "Epoch {}: MSE = {:.6}",
40 epoch,
41 total_error / training_data.len() as f32
42 );
43 }
44 }
45
46 // Test the network
47 println!("\nTesting...");
48 let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
49 for x in test_points {
50 let normalized_x = normalize(x, 0.0, 2.0 * PI);
51 let predicted = denormalize(nn.forward(&vec![normalized_x])[0], -1.0, 1.0);
52 println!(
53 "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
54 x,
55 x.sin(),
56 predicted,
57 (x.sin() - predicted).abs()
58 );
59 }
60}Sourcepub fn backpropagate(&mut self, expected: &Vec<f32>)
pub fn backpropagate(&mut self, expected: &Vec<f32>)
Examples found in repository?
examples/rps.rs (line 136)
120 fn train(&mut self) {
121 if !self.initialized || self.player_history.len() < HISTORY_LENGTH + 1 {
122 return;
123 }
124
125 // Train on sequences in the history
126 for i in 0..self.player_history.len() - HISTORY_LENGTH {
127 let inputs = self.history_to_input(&self.player_history[i..i + HISTORY_LENGTH]);
128 let target = self.player_history[i + HISTORY_LENGTH].to_input_vec();
129
130 // Train multiple times on each sequence to reinforce learning
131 for _ in 0..TRAINING_ITERATIONS {
132 let outputs = self.nn.forward(&inputs);
133
134 // Calculate errors (expected - actual)
135
136 self.nn.backpropagate(&target);
137 }
138 }
139 }More examples
examples/tic_tac_toe.rs (line 267)
248 fn train_incremental(&mut self, epochs: usize) {
249 if self.training_data.len() < 3 {
250 return;
251 }
252
253 let mut rng = rand::rng();
254 let data_size = self.training_data.len();
255 let mut indices: Vec<usize> = (0..data_size).collect();
256 indices.shuffle(&mut rng);
257
258 let training_size = data_size.min(50);
259 let indices = indices[0..training_size].to_vec();
260
261 for _ in 0..epochs {
262 for &idx in &indices {
263 let (input, target) = &self.training_data[idx];
264
265 let _outputs = self.nn.forward(input);
266
267 self.nn.backpropagate(target);
268 }
269 }
270
271 if self.training_data.len() > 1000 {
272 self.training_data = self.training_data[self.training_data.len() - 1000..].to_vec();
273 }
274 }examples/xor.rs (line 19)
3fn main() {
4 // Create a neural network with 2 inputs, one hidden layer of 4 neurons, and 1 output
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/xor_file.rs (line 19)
3fn main() {
4 // Try to load the neural network from a file, or create a new one if the file does not exist
5 let mut nn = NeuralNetwork::new(vec![2, 4, 1], 0.1, Box::new(Sigmoid));
6
7 // Training data for XOR
8 let training_data = vec![
9 (vec![0.0, 0.0], vec![0.0]),
10 (vec![0.0, 1.0], vec![1.0]),
11 (vec![1.0, 0.0], vec![1.0]),
12 (vec![1.0, 1.0], vec![0.0]),
13 ];
14
15 // Train the network
16 for _ in 0..1000000 {
17 for (inputs, expected) in &training_data {
18 let _outputs = nn.forward(inputs);
19 nn.backpropagate(expected);
20 }
21 }
22
23 // Test the network
24 for (inputs, expected) in &training_data {
25 let outputs = nn.forward(inputs);
26 println!(
27 "Input: {:?}, Expected: {:?}, Got: {:.4}",
28 inputs, expected[0], outputs[0]
29 );
30 }
31}examples/square_function.rs (line 33)
13fn main() {
14 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.01, Box::new(Sigmoid));
15 // Create a neural network with 1 input, one hidden layer of 10 neurons, and 1 output
16
17 // Generate training data: f(x) = x^2 for x in [-1, 1]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (-100..=100)
19 .map(|i| {
20 let x = i as f32 / 100.0;
21 let y = x * x;
22 (vec![normalize(x, -1.0, 1.0)], vec![normalize(y, 0.0, 1.0)])
23 })
24 .collect();
25
26 // Train the network
27 println!("Training...");
28 for epoch in 0..1000000 {
29 let mut error = 0.0;
30 for (input, expected) in &training_data {
31 let _output = nn.forward(input);
32 error = nn.errors(expected);
33 nn.backpropagate(expected);
34 }
35
36 if epoch % 10000 == 0 {
37 println!(
38 "Epoch {}: MSE = {:.6}",
39 epoch,
40 error / training_data.len() as f32
41 );
42 }
43 }
44
45 // Test the network
46 println!("\nTesting...");
47 let test_points = vec![-1.0, -0.5, 0.0, 0.5, 1.0, 1.0 / PI];
48 for x in test_points {
49 let predicted = denormalize(nn.forward(&vec![normalize(x, -1.0, 1.0)])[0], 0.0, 1.0);
50 println!(
51 "x = {:.3}, x^2 = {:.3}, predicted = {:.3}, error = {:.3}",
52 x,
53 x * x,
54 predicted,
55 ((x * x) - predicted).abs()
56 );
57 }
58}examples/sine_wave.rs (line 33)
12fn main() {
13 // Create a network with 1 input, two hidden layers, and 1 output
14 // Larger architecture to handle the complexity of sine function
15 let mut nn = NeuralNetwork::new(vec![1, 4, 4, 1], 0.001, Box::new(Sigmoid));
16
17 // Generate training data: sin(x) for x in [0, 2π]
18 let training_data: Vec<(Vec<f32>, Vec<f32>)> = (0..200)
19 .map(|i| {
20 let x = (i as f32) * 2.0 * PI / 200.0;
21 let normalized_x = normalize(x, 0.0, 2.0 * PI);
22 let normalized_sin = normalize(x.sin(), -1.0, 1.0);
23 (vec![normalized_x], vec![normalized_sin])
24 })
25 .collect();
26
27 // Train the network
28 println!("Training...");
29 for epoch in 0..1000000 {
30 let mut total_error = 0.0;
31 for (input, expected) in &training_data {
32 let _outputs = nn.forward(&vec![input[0]]);
33 nn.backpropagate(&vec![expected[0]]);
34 total_error += nn.errors(&vec![expected[0]]);
35 }
36
37 if epoch % 1000 == 0 {
38 println!(
39 "Epoch {}: MSE = {:.6}",
40 epoch,
41 total_error / training_data.len() as f32
42 );
43 }
44 }
45
46 // Test the network
47 println!("\nTesting...");
48 let test_points = vec![0.0, PI / 4.0, PI / 2.0, PI, 3.0 * PI / 2.0, 2.0 * PI];
49 for x in test_points {
50 let normalized_x = normalize(x, 0.0, 2.0 * PI);
51 let predicted = denormalize(nn.forward(&vec![normalized_x])[0], -1.0, 1.0);
52 println!(
53 "x = {:.3}, sin(x) = {:.3}, predicted = {:.3}, error = {:.3}",
54 x,
55 x.sin(),
56 predicted,
57 (x.sin() - predicted).abs()
58 );
59 }
60}Trait Implementations§
Source§impl Clone for NeuralNetwork
impl Clone for NeuralNetwork
Source§fn clone(&self) -> NeuralNetwork
fn clone(&self) -> NeuralNetwork
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreAuto Trait Implementations§
impl Freeze for NeuralNetwork
impl !RefUnwindSafe for NeuralNetwork
impl !Send for NeuralNetwork
impl !Sync for NeuralNetwork
impl Unpin for NeuralNetwork
impl !UnwindSafe for NeuralNetwork
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more