1extern crate alpha_micrograd_rust;
10
11use alpha_micrograd_rust::nn::{Activation, Layer};
12use alpha_micrograd_rust::value::Expr;
13
14fn main() {
15 let mut target = vec![Expr::new_leaf(15.0, "t1"), Expr::new_leaf(85.0, "t2")];
16 target[0].is_learnable = false;
17 target[1].is_learnable = false;
18
19 let layer = Layer::new(3, 2, Activation::None);
20 println!("Initial values: {:}", layer);
21
22 let mut inputs = vec![
23 Expr::new_leaf(1.0, "x_1"),
24 Expr::new_leaf(2.0, "x_2"),
25 Expr::new_leaf(3.0, "x_3"),
26 ];
27
28 inputs.iter_mut().for_each(|input| {
29 input.is_learnable = false;
30 });
31
32 let mut y = layer.forward(inputs);
33 let mut y1 = y.remove(0);
34 y1.name = "y1".to_string();
35 let mut y2 = y.remove(0);
36 y2.name = "y2".to_string();
37
38 let d1 = y1 - target[0].clone();
39 let mut sqr1 = Expr::new_leaf(2.0, "square_exponent1");
40 sqr1.is_learnable = false;
41
42 let d2 = y2 - target[1].clone();
43 let mut sqr2 = Expr::new_leaf(2.0, "square_exponent2");
44 sqr2.is_learnable = false;
45
46 let mut loss = d1.pow(sqr1, "diff1") + d2.pow(sqr2, "diff2");
47
48 let t1 = loss.find("t1").unwrap();
49 let t2 = loss.find("t2").unwrap();
50 let y1 = loss.find("y1").unwrap();
51 let y2 = loss.find("y2").unwrap();
52
53 println!("Initial targets: {:.2}, {:.2}", t1.result, t2.result);
54 println!("Predicted: {:.2}, {:.2}", y1.result, y2.result);
55 println!("Initial loss: {:.2}", loss.result);
56
57 println!("\nTraining:");
58 let learning_rate = 0.004;
59 for i in 1..=100 {
60 loss.learn(learning_rate);
61 loss.recalculate();
62
63 let t1 = loss.find("t1").unwrap();
64 let t2 = loss.find("t2").unwrap();
65 let y1 = loss.find("y1").unwrap();
66 let y2 = loss.find("y2").unwrap();
67
68 println!(
69 "Iteration {:3}, loss: {:9.4} / predicted: {:5.2}, {:5.2} (targets: {:5.2}, {:5.2})",
70 i, loss.result, y1.result, y2.result, t1.result, t2.result
71 );
72 }
73
74 println!("Final values: {:}", layer);
75}