use super::activations::relu;
use rand::prelude::*;
use rand_distr::*;
pub type NNActivation = fn(f32) -> f32;
#[derive(Clone)]
pub struct NeuralLayer {
pub activation: Box<NNActivation>,
pub weights: Vec<f32>,
pub biases: Vec<f32>,
pub input_size: usize,
pub output_size: usize,
pub area: u32,
}
impl NeuralLayer {
pub fn new(
input_size: usize,
output_size: usize,
activation: Option<NNActivation>,
) -> NeuralLayer {
let activation = activation.unwrap_or(relu);
let area: u32 = input_size as u32 * output_size as u32;
let mut weights: Vec<f32> = vec![0.0; area as usize];
let mut biases: Vec<f32> = vec![0.0; output_size as usize];
let mut random_distrib = Normal::<f32>::new(0.0, 1.0)
.unwrap()
.sample_iter(thread_rng());
weights
.as_mut_slice()
.fill_with(|| random_distrib.next().unwrap());
biases
.as_mut_slice()
.fill_with(|| random_distrib.next().unwrap());
NeuralLayer {
activation: Box::from(activation),
weights,
biases,
input_size,
output_size,
area,
}
}
pub fn compute(&self, mut inputs: &[f32], mut outputs: &mut [f32]) -> Result<(), String> {
if cfg!(debug) || cfg!(tests) {
if inputs.len() < self.input_size as usize {
return Err("Source slice is smaller than the input size of this layer".to_owned());
}
if outputs.len() < self.output_size as usize {
return Err(
"Destination slice is smaller than the output size of this layer".to_owned(),
);
}
}
inputs = &inputs[0..self.input_size];
outputs = &mut outputs[0..self.output_size];
for (i, out) in outputs.iter_mut().enumerate() {
let idx_base: usize = (i * self.input_size) as usize;
let value = (self.activation)(
self.biases[i]
+ inputs
.iter()
.zip(&self.weights[idx_base..])
.map(|(inp, w)| (*inp) * (*w))
.sum::<f32>(),
);
*out = value;
}
Ok(())
}
}
#[derive(Clone)]
pub struct SimpleNeuralNetwork {
pub layers: Vec<NeuralLayer>,
}
impl SimpleNeuralNetwork {
pub fn new_simple(layer_sizes: &[usize], activations: &[Option<NNActivation>]) -> Self {
SimpleNeuralNetwork {
layers: layer_sizes
.iter()
.take(layer_sizes.len() - 1)
.zip(layer_sizes.iter().skip(1))
.enumerate()
.map(|item| {
let (i, (a, b)) = item;
NeuralLayer::new(*a, *b, activations[i])
})
.collect(),
}
}
pub fn new_simple_with_activation(
layer_sizes: &[usize],
activation: Option<NNActivation>,
) -> Self {
Self::new_simple(layer_sizes, vec![activation; layer_sizes.len()].as_slice())
}
pub fn input_size(&self) -> Result<usize, String> {
match self.layers.first() {
None => Err(
"There are no layers in this network; input size could not be determined"
.to_owned(),
),
Some(layer) => Ok(layer.input_size),
}
}
pub fn output_size(&self) -> Result<usize, String> {
match self.layers.last() {
None => Err(
"There are no layers in this network; output size could not be determined"
.to_owned(),
),
Some(layer) => Ok(layer.output_size),
}
}
pub fn compute_values(&self, inputs: &[f32], outputs: &mut [f32]) -> Result<(), String> {
if cfg!(debug) || cfg!(tests) {
if self.layers.is_empty() {
return Err("There are no layers in this network".to_owned());
}
if inputs.len() != self.input_size().unwrap() as usize {
return Err(
"The number of input values does not match the input size of this network"
.to_owned(),
);
}
if outputs.len() != self.output_size().unwrap() as usize {
return Err("The size of the destination array does not match the output size of this network".to_owned());
}
}
let mut in_values = inputs.to_vec();
for layer in &self.layers {
let mut dest = vec![0.0; layer.output_size as usize];
layer.compute(&in_values, &mut dest)?;
in_values = dest;
}
outputs.copy_from_slice(&in_values);
Ok(())
}
}