fashion_loop/
looping.rs

1// Copyright (C) 2024 Hallvard Høyland Lavik
2
3use neurons::{activation, feedback, network, objective, optimizer, plot, tensor};
4
5use std::fs::File;
6use std::io::{BufReader, Read, Result};
7use std::sync::Arc;
8
9fn read(reader: &mut dyn Read) -> Result<u32> {
10    let mut buffer = [0; 4];
11    reader.read_exact(&mut buffer)?;
12    Ok(u32::from_be_bytes(buffer))
13}
14
15fn load_mnist(path: &str) -> Result<Vec<tensor::Tensor>> {
16    let mut reader = BufReader::new(File::open(path)?);
17    let mut images: Vec<tensor::Tensor> = Vec::new();
18
19    let _magic_number = read(&mut reader)?;
20    let num_images = read(&mut reader)?;
21    let num_rows = read(&mut reader)?;
22    let num_cols = read(&mut reader)?;
23
24    for _ in 0..num_images {
25        let mut image: Vec<Vec<f32>> = Vec::new();
26        for _ in 0..num_rows {
27            let mut row: Vec<f32> = Vec::new();
28            for _ in 0..num_cols {
29                let mut pixel = [0];
30                reader.read_exact(&mut pixel)?;
31                row.push(pixel[0] as f32 / 255.0);
32            }
33            image.push(row);
34        }
35        images.push(tensor::Tensor::triple(vec![image]).resize(tensor::Shape::Triple(1, 14, 14)));
36    }
37
38    Ok(images)
39}
40
41fn load_labels(file_path: &str, numbers: usize) -> Result<Vec<tensor::Tensor>> {
42    let mut reader = BufReader::new(File::open(file_path)?);
43    let _magic_number = read(&mut reader)?;
44    let num_labels = read(&mut reader)?;
45
46    let mut _labels = vec![0; num_labels as usize];
47    reader.read_exact(&mut _labels)?;
48
49    Ok(_labels
50        .iter()
51        .map(|&x| tensor::Tensor::one_hot(x as usize, numbers))
52        .collect())
53}
54
55fn main() {
56    let x_train = load_mnist("./examples/datasets/mnist-fashion/train-images-idx3-ubyte").unwrap();
57    let y_train = load_labels(
58        "./examples/datasets/mnist-fashion/train-labels-idx1-ubyte",
59        10,
60    )
61    .unwrap();
62    let x_test = load_mnist("./examples/datasets/mnist-fashion/t10k-images-idx3-ubyte").unwrap();
63    let y_test = load_labels(
64        "./examples/datasets/mnist-fashion/t10k-labels-idx1-ubyte",
65        10,
66    )
67    .unwrap();
68    println!(
69        "Train: {} images, Test: {} images",
70        x_train.len(),
71        x_test.len()
72    );
73
74    let x_train: Vec<&tensor::Tensor> = x_train.iter().collect();
75    let y_train: Vec<&tensor::Tensor> = y_train.iter().collect();
76    let x_test: Vec<&tensor::Tensor> = x_test.iter().collect();
77    let y_test: Vec<&tensor::Tensor> = y_test.iter().collect();
78
79    let mut network = network::Network::new(tensor::Shape::Triple(1, 14, 14));
80
81    network.convolution(
82        1,
83        (3, 3),
84        (1, 1),
85        (1, 1),
86        (1, 1),
87        activation::Activation::ReLU,
88        None,
89    );
90    network.convolution(
91        1,
92        (3, 3),
93        (1, 1),
94        (1, 1),
95        (1, 1),
96        activation::Activation::ReLU,
97        None,
98    );
99    network.convolution(
100        1,
101        (3, 3),
102        (1, 1),
103        (1, 1),
104        (1, 1),
105        activation::Activation::ReLU,
106        None,
107    );
108    network.maxpool((2, 2), (2, 2));
109    network.dense(10, activation::Activation::Softmax, true, None);
110
111    network.loopback(
112        2,                      // From layer X's output.
113        0,                      // To layer Y's input.
114        1,                      // Number of loops.
115        Arc::new(|_loops| 1.0), // Gradient scaling.
116        false,                  // Input-to-input skip-connections.
117    );
118    network.set_accumulation(
119        feedback::Accumulation::Add, // How the skip-connection is accumulated.
120        feedback::Accumulation::Add, // How the pre- and post-activations are accumulated.
121    );
122
123    network.set_optimizer(optimizer::Adam::create(0.001, 0.9, 0.999, 1e-8, None));
124    network.set_objective(objective::Objective::CrossEntropy, None);
125
126    println!("{}", network);
127
128    // Train the network
129    let (train_loss, val_loss, val_acc) = network.learn(
130        &x_train,
131        &y_train,
132        Some((&x_test, &y_test, 10)),
133        32,
134        25,
135        Some(5),
136    );
137    plot::loss(
138        &train_loss,
139        &val_loss,
140        &val_acc,
141        "LOOP : Fashion-MNIST",
142        "./output/mnist-fashion/looping.png",
143    );
144
145    // Validate the network
146    let (val_loss, val_acc) = network.validate(&x_test, &y_test, 1e-6);
147    println!(
148        "Final validation accuracy: {:.2} % and loss: {:.5}",
149        val_acc * 100.0,
150        val_loss
151    );
152
153    // Use the network
154    let prediction = network.predict(x_test.get(0).unwrap());
155    println!(
156        "Prediction on input: Target: {}. Output: {}.",
157        y_test[0].argmax(),
158        prediction.argmax()
159    );
160
161    // let x = x_test.get(5).unwrap();
162    // let y = y_test.get(5).unwrap();
163    // Plot the pre- and post-activation heatmaps for each (image) layer.
164    // let (pre, post, _) = network.forward(x);
165    // for (i, (i_pre, i_post)) in pre.iter().zip(post.iter()).enumerate() {
166    //     let pre_title = format!("layer_{}_pre", i);
167    //     let post_title = format!("layer_{}_post", i);
168    //     let pre_file = format!("layer_{}_pre.png", i);
169    //     let post_file = format!("layer_{}_post.png", i);
170    //     plot::heatmap(&i_pre, &pre_title, &pre_file);
171    //     plot::heatmap(&i_post, &post_title, &post_file);
172    // }
173}