pub struct Layer { /* private fields */ }Expand description
A layer in a neural network.
A layer is a collection of Neurons. It calculates the output of each neuron in the layer.
The output of the layer is the collection of the outputs of the neurons.
Implementations§
Source§impl Layer
impl Layer
Sourcepub fn new(n_inputs: u32, n_outputs: u32, activation: Activation) -> Layer
pub fn new(n_inputs: u32, n_outputs: u32, activation: Activation) -> Layer
Create a new Layer with n_inputs to the neurons and n_outputs neurons.
The layer is a collection of neurons. The number of neurons is n_outputs.
Each neuron has n_inputs inputs.
Examples found in repository?
examples/layer.rs (line 19)
14fn main() {
15 let mut target = vec![Expr::new_leaf(15.0, "t1"), Expr::new_leaf(85.0, "t2")];
16 target[0].is_learnable = false;
17 target[1].is_learnable = false;
18
19 let layer = Layer::new(3, 2, Activation::None);
20 println!("Initial values: {:}", layer);
21
22 let mut inputs = vec![
23 Expr::new_leaf(1.0, "x_1"),
24 Expr::new_leaf(2.0, "x_2"),
25 Expr::new_leaf(3.0, "x_3"),
26 ];
27
28 inputs.iter_mut().for_each(|input| {
29 input.is_learnable = false;
30 });
31
32 let mut y = layer.forward(inputs);
33 let mut y1 = y.remove(0);
34 y1.name = "y1".to_string();
35 let mut y2 = y.remove(0);
36 y2.name = "y2".to_string();
37
38 let d1 = y1 - target[0].clone();
39 let mut sqr1 = Expr::new_leaf(2.0, "square_exponent1");
40 sqr1.is_learnable = false;
41
42 let d2 = y2 - target[1].clone();
43 let mut sqr2 = Expr::new_leaf(2.0, "square_exponent2");
44 sqr2.is_learnable = false;
45
46 let mut loss = d1.pow(sqr1, "diff1") + d2.pow(sqr2, "diff2");
47
48 let t1 = loss.find("t1").unwrap();
49 let t2 = loss.find("t2").unwrap();
50 let y1 = loss.find("y1").unwrap();
51 let y2 = loss.find("y2").unwrap();
52
53 println!("Initial targets: {:.2}, {:.2}", t1.result, t2.result);
54 println!("Predicted: {:.2}, {:.2}", y1.result, y2.result);
55 println!("Initial loss: {:.2}", loss.result);
56
57 println!("\nTraining:");
58 let learning_rate = 0.004;
59 for i in 1..=100 {
60 loss.learn(learning_rate);
61 loss.recalculate();
62
63 let t1 = loss.find("t1").unwrap();
64 let t2 = loss.find("t2").unwrap();
65 let y1 = loss.find("y1").unwrap();
66 let y2 = loss.find("y2").unwrap();
67
68 println!(
69 "Iteration {:3}, loss: {:9.4} / predicted: {:5.2}, {:5.2} (targets: {:5.2}, {:5.2})",
70 i, loss.result, y1.result, y2.result, t1.result, t2.result
71 );
72 }
73
74 println!("Final values: {:}", layer);
75}Sourcepub fn forward(&self, x: Vec<Expr>) -> Vec<Expr>
pub fn forward(&self, x: Vec<Expr>) -> Vec<Expr>
Calculate the output of the layer for the given inputs.
The output of the layer is the collection of the outputs of the neurons.
Examples found in repository?
examples/layer.rs (line 32)
14fn main() {
15 let mut target = vec![Expr::new_leaf(15.0, "t1"), Expr::new_leaf(85.0, "t2")];
16 target[0].is_learnable = false;
17 target[1].is_learnable = false;
18
19 let layer = Layer::new(3, 2, Activation::None);
20 println!("Initial values: {:}", layer);
21
22 let mut inputs = vec![
23 Expr::new_leaf(1.0, "x_1"),
24 Expr::new_leaf(2.0, "x_2"),
25 Expr::new_leaf(3.0, "x_3"),
26 ];
27
28 inputs.iter_mut().for_each(|input| {
29 input.is_learnable = false;
30 });
31
32 let mut y = layer.forward(inputs);
33 let mut y1 = y.remove(0);
34 y1.name = "y1".to_string();
35 let mut y2 = y.remove(0);
36 y2.name = "y2".to_string();
37
38 let d1 = y1 - target[0].clone();
39 let mut sqr1 = Expr::new_leaf(2.0, "square_exponent1");
40 sqr1.is_learnable = false;
41
42 let d2 = y2 - target[1].clone();
43 let mut sqr2 = Expr::new_leaf(2.0, "square_exponent2");
44 sqr2.is_learnable = false;
45
46 let mut loss = d1.pow(sqr1, "diff1") + d2.pow(sqr2, "diff2");
47
48 let t1 = loss.find("t1").unwrap();
49 let t2 = loss.find("t2").unwrap();
50 let y1 = loss.find("y1").unwrap();
51 let y2 = loss.find("y2").unwrap();
52
53 println!("Initial targets: {:.2}, {:.2}", t1.result, t2.result);
54 println!("Predicted: {:.2}, {:.2}", y1.result, y2.result);
55 println!("Initial loss: {:.2}", loss.result);
56
57 println!("\nTraining:");
58 let learning_rate = 0.004;
59 for i in 1..=100 {
60 loss.learn(learning_rate);
61 loss.recalculate();
62
63 let t1 = loss.find("t1").unwrap();
64 let t2 = loss.find("t2").unwrap();
65 let y1 = loss.find("y1").unwrap();
66 let y2 = loss.find("y2").unwrap();
67
68 println!(
69 "Iteration {:3}, loss: {:9.4} / predicted: {:5.2}, {:5.2} (targets: {:5.2}, {:5.2})",
70 i, loss.result, y1.result, y2.result, t1.result, t2.result
71 );
72 }
73
74 println!("Final values: {:}", layer);
75}Trait Implementations§
Auto Trait Implementations§
impl Freeze for Layer
impl RefUnwindSafe for Layer
impl Send for Layer
impl Sync for Layer
impl Unpin for Layer
impl UnwindSafe for Layer
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more