gradient_descent/
gradient_descent.rs

1//! This is a basic example of performing gradient descent with a neural network using micrograd-rs.
2
3use micrograd_rs::{Value, MLP};
4
5fn main() {
6    let mlp = MLP::new(3, vec![4, 4, 1]);
7
8    let xs = vec![
9        vec![2.0, 3.0, -1.0],
10        vec![3.0, -1.0, 0.5],
11        vec![0.5, 1.0, 1.0],
12        vec![1.0, 1.0, -1.0],
13    ];
14
15    let ys = vec![1.0, -1.0, -1.0, 1.0];
16
17    for _ in 0..100 {
18        // Forward pass
19        let ypred: Vec<Value> = xs
20            .iter()
21            .map(|x| mlp.forward(x.iter().map(|x| Value::from(*x)).collect())[0].clone())
22            .collect();
23        let ypred_floats: Vec<f64> = ypred.iter().map(|v| v.data()).collect();
24
25        // Loss function
26        let ygt = ys.iter().map(|y| Value::from(*y));
27        let loss: Value = ypred
28            .into_iter()
29            .zip(ygt)
30            .map(|(yp, yg)| (yp - yg).pow(&Value::from(2.0)))
31            .sum();
32
33        println!("Loss: {} Predictions: {:?}", loss.data(), ypred_floats);
34
35        // Backward pass
36        mlp.parameters().iter().for_each(|p| p.clear_gradient());
37        loss.backward();
38
39        // Adjustment
40        mlp.parameters().iter().for_each(|p| p.adjust(-0.05));
41    }
42}