Expand description
neurotic is a library for machine-learning in Rust.
§Quickstart
§Defining the network architecture
Start by defining the amount of neurons in each layer, and the layers’ activation functions.
use neurotic::{
activation::ActivationFunction,
core::NeuralNetwork,
training::{CostFunction, NetworkTrainer},
};
let layer_sizes = &[2, 32, 16, 1]; // 2 neurons for the input layer, 32 and 16 for the hidden
layers, and 1 output neuron.
let activation_functions = &[
ActivationFunction::Identity,
ActivationFunction::ReLU,
ActivationFunction::ReLU,
ActivationFunction::Identity,
];
let cost_function = CostFunction::MeanSquaredError;
// Create a new instance of NeuralNetwork with the defined structure
let network = NeuralNetwork::new(layer_sizes, activation_functions, cost_function);§Preparing the training data
Load in or generate your training data. Here is a simple example that generates training data for a sum function.
use rand::Rng;
// This returns a vector of tuples. Each tuple is made up of inputs, and target outputs.
fn generate_sum_data(size: usize, range: f32) -> Vec<(Vec<f32>, Vec<f32>)> {
let mut data = Vec::with_capacity(usize);
for _ in 0..size {
let a = rand::thread_rng.gen_range(0.0..range);
let b = rand::thread_rng.gen_range(0.0..range);
let output = a + b;
data.push((vec![a, b], vec![output]));
}
data
}
// Store the generated training data in a variable
let training_data = generate_sum_data(1000, 10.0);§Training the network
Set up the training parameters, and train the network using a NetworkTrainer.
let learning_rate = 0.001; // Network's learning rate
let batch_size = 50; // Divide the training data into batches of this size
let epochs = 500; // Number of training iterations
let mut trainer: NetworkTrainer<NoOptimiser> = NetworkTrainer::new(network, learning_rate, batch_size, None);
trainer.train(training_data, epochs);§Saving or loading a network
Saving the trained network to a file.
trainer.get_network().save("path/to/file").expect("Failed to save network");Loading a trained network from a file.
let network = NeuralNetwork::load("path/to/file").expect("Failed to load network");§Using Optimisers
Using Optimisers. Adam optimiser for this example. Refer to the docs.rs page for other optimisers’ parameters.
// Defining optimiser parameters and creating new optimiser.
let layer_dimensions = layer_sizes
.windows(2)
.map(|w| (w[1], w[0]))
.collect::<Vec<_>>();
let beta1 = 0.9;
let beta2 = 0.999;
let epsilon = 1e-8;
let adam = Adam::new(&layer_dims, beta1, beta2, epsilon);
// Creating a new trainer with the optimiser
let mut trainer: NetworkTrainer<Adam> =
NetworkTrainer::new(network, learning_rate, batch_size, Some(adam));Modules§
- activation
- Activation functions including ReLU, LeakyReLU, Sigmoid, etc.
- core
- Core components of the library.
- optimisers
- Network training optimisers, such as the RMSProp and Adam optimiser.
- training
- Training modules, such as the network trainer, cost functions, batching, etc.