scalar_back_propagation/
scalar_back_propagation.rs

1//! This is a basic example of performing back-propagation on a two-input/weight graph.
2
3use micrograd_rs::Value;
4
5fn main() {
6    let x1 = Value::from(2.0).with_label("x1");
7    let x1_clone = x1.clone();
8    let x2 = Value::from(0.0).with_label("x2");
9
10    let w1 = Value::from(-3.0).with_label("w1");
11    let w2 = Value::from(1.0).with_label("w2");
12
13    let b = Value::from(6.8813735870195432).with_label("b");
14
15    let x1w1 = (x1 * w1).with_label("x1w1");
16    let x2w2 = (x2 * w2).with_label("x2w2");
17
18    let x1w1x2w2 = (x1w1 + x2w2).with_label("x1w1x2w2");
19
20    let n = (x1w1x2w2 + b).with_label("n");
21    let o = n.tanh().with_label("o");
22
23    o.backward();
24
25    assert_eq!(0.7071, round_to(o.data(), 4.0));
26    assert_eq!(-1.5, round_to(x1_clone.gradient(), 3.0));
27    println!("{:?}", o);
28}
29
30fn round_to(value: f64, digits: f64) -> f64 {
31    let ten: f64 = 10.0;
32    (ten.powf(digits) * value).round() / ten.powf(digits)
33}