mnist_loop/
looping.rs

1// Copyright (C) 2024 Hallvard Høyland Lavik
2
3use neurons::{activation, feedback, network, objective, optimizer, plot, tensor};
4
5use std::fs::File;
6use std::io::{BufReader, Read, Result};
7use std::sync::Arc;
8
9fn read(reader: &mut dyn Read) -> Result<u32> {
10    let mut buffer = [0; 4];
11    reader.read_exact(&mut buffer)?;
12    Ok(u32::from_be_bytes(buffer))
13}
14
15fn load_mnist(path: &str) -> Result<Vec<tensor::Tensor>> {
16    let mut reader = BufReader::new(File::open(path)?);
17    let mut images: Vec<tensor::Tensor> = Vec::new();
18
19    let _magic_number = read(&mut reader)?;
20    let num_images = read(&mut reader)?;
21    let num_rows = read(&mut reader)?;
22    let num_cols = read(&mut reader)?;
23
24    for _ in 0..num_images {
25        let mut image: Vec<Vec<f32>> = Vec::new();
26        for _ in 0..num_rows {
27            let mut row: Vec<f32> = Vec::new();
28            for _ in 0..num_cols {
29                let mut pixel = [0];
30                reader.read_exact(&mut pixel)?;
31                row.push(pixel[0] as f32 / 255.0);
32            }
33            image.push(row);
34        }
35        images.push(tensor::Tensor::triple(vec![image]).resize(tensor::Shape::Triple(1, 14, 14)));
36    }
37
38    Ok(images)
39}
40
41fn load_labels(file_path: &str, numbers: usize) -> Result<Vec<tensor::Tensor>> {
42    let mut reader = BufReader::new(File::open(file_path)?);
43    let _magic_number = read(&mut reader)?;
44    let num_labels = read(&mut reader)?;
45
46    let mut _labels = vec![0; num_labels as usize];
47    reader.read_exact(&mut _labels)?;
48
49    Ok(_labels
50        .iter()
51        .map(|&x| tensor::Tensor::one_hot(x as usize, numbers))
52        .collect())
53}
54
55fn main() {
56    let x_train = load_mnist("./examples/datasets/mnist/train-images-idx3-ubyte").unwrap();
57    let y_train = load_labels("./examples/datasets/mnist/train-labels-idx1-ubyte", 10).unwrap();
58    let x_test = load_mnist("./examples/datasets/mnist/t10k-images-idx3-ubyte").unwrap();
59    let y_test = load_labels("./examples/datasets/mnist/t10k-labels-idx1-ubyte", 10).unwrap();
60    println!(
61        "Train: {} images, Test: {} images",
62        x_train.len(),
63        x_test.len()
64    );
65
66    let x_train: Vec<&tensor::Tensor> = x_train.iter().collect();
67    let y_train: Vec<&tensor::Tensor> = y_train.iter().collect();
68    let x_test: Vec<&tensor::Tensor> = x_test.iter().collect();
69    let y_test: Vec<&tensor::Tensor> = y_test.iter().collect();
70
71    let mut network = network::Network::new(tensor::Shape::Triple(1, 14, 14));
72
73    network.convolution(
74        1,
75        (3, 3),
76        (1, 1),
77        (1, 1),
78        (1, 1),
79        activation::Activation::ReLU,
80        None,
81    );
82    network.convolution(
83        1,
84        (3, 3),
85        (1, 1),
86        (1, 1),
87        (1, 1),
88        activation::Activation::ReLU,
89        None,
90    );
91    network.convolution(
92        1,
93        (3, 3),
94        (1, 1),
95        (1, 1),
96        (1, 1),
97        activation::Activation::ReLU,
98        None,
99    );
100    network.maxpool((2, 2), (2, 2));
101    network.dense(10, activation::Activation::Softmax, true, None);
102
103    network.loopback(
104        2,                      // From layer X's output.
105        0,                      // To layer Y's input.
106        1,                      // Number of loops.
107        Arc::new(|_loops| 1.0), // Gradient scaling.
108        false,                  // Input-to-input skip-connections.
109    );
110    network.set_accumulation(
111        feedback::Accumulation::Add, // How the skip-connection is accumulated.
112        feedback::Accumulation::Add, // How the pre- and post-activations are accumulated.
113    );
114
115    network.set_optimizer(optimizer::Adam::create(0.001, 0.9, 0.999, 1e-8, None));
116    network.set_objective(
117        objective::Objective::CrossEntropy, // Objective function
118        None,                               // Gradient clipping
119    );
120
121    println!("{}", network);
122
123    // Train the network
124    let (train_loss, val_loss, val_acc) = network.learn(
125        &x_train,
126        &y_train,
127        Some((&x_test, &y_test, 10)),
128        32,
129        25,
130        Some(5),
131    );
132    plot::loss(
133        &train_loss,
134        &val_loss,
135        &val_acc,
136        "LOOP : MNIST",
137        "./output/mnist/looping.png",
138    );
139
140    // Validate the network
141    let (val_loss, val_acc) = network.validate(&x_test, &y_test, 1e-6);
142    println!(
143        "Final validation accuracy: {:.2} % and loss: {:.5}",
144        val_acc * 100.0,
145        val_loss
146    );
147
148    // Use the network
149    let prediction = network.predict(x_test.get(0).unwrap());
150    println!(
151        "Prediction on input: Target: {}. Output: {}.",
152        y_test[0].argmax(),
153        prediction.argmax()
154    );
155
156    // let x = x_test.get(5).unwrap();
157    // let y = y_test.get(5).unwrap();
158    // plot::heatmap(
159    //     &x,
160    //     &format!("Target: {}", y.argmax()),
161    //     "./output/mnist/input.png",
162    // );
163
164    // Plot the pre- and post-activation heatmaps for each (image) layer.
165    // let (pre, post, _) = network.forward(x);
166    // for (i, (i_pre, i_post)) in pre.iter().zip(post.iter()).enumerate() {
167    //     let pre_title = format!("layer_{}_pre", i);
168    //     let post_title = format!("layer_{}_post", i);
169    //     let pre_file = format!("layer_{}_pre.png", i);
170    //     let post_file = format!("layer_{}_post.png", i);
171    //     plot::heatmap(&i_pre, &pre_title, &pre_file);
172    //     plot::heatmap(&i_post, &post_title, &post_file);
173    // }
174}