native_neural_network 0.3.1

Lib no_std Rust for native neural network (.rnn)
Documentation
use crate::activations::ActivationKind;

#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LayerError {
    EmptyPlan,
    InvalidShape,
    InvalidRange,
    IncompatibleChain,
    BufferTooSmall,
    CountMismatch,
}

#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct LayerDesc {
    pub input_size: usize,
    pub output_size: usize,
    pub weight_offset: usize,
    pub bias_offset: usize,
    pub activation: ActivationKind,
}

impl LayerDesc {
    pub fn weight_len(&self) -> Option<usize> {
        self.input_size.checked_mul(self.output_size)
    }

    pub fn validate_ranges(&self, weights_len: usize, biases_len: usize) -> bool {
        if self.input_size == 0 || self.output_size == 0 {
            return false;
        }
        let w_len = match self.weight_len() {
            Some(v) => v,
            None => return false,
        };
        let w_end = match self.weight_offset.checked_add(w_len) {
            Some(v) => v,
            None => return false,
        };
        let b_end = match self.bias_offset.checked_add(self.output_size) {
            Some(v) => v,
            None => return false,
        };
        w_end <= weights_len && b_end <= biases_len
    }
}

#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LayerSpec {
    Dense(LayerDesc),
}

impl LayerSpec {
    pub fn input_size(&self) -> usize {
        match self {
            LayerSpec::Dense(d) => d.input_size,
        }
    }

    pub fn output_size(&self) -> usize {
        match self {
            LayerSpec::Dense(d) => d.output_size,
        }
    }

    pub fn validate_ranges(&self, weights_len: usize, biases_len: usize) -> bool {
        match self {
            LayerSpec::Dense(d) => d.validate_ranges(weights_len, biases_len),
        }
    }
}

pub struct LayerPlan<'a> {
    pub layers: &'a [LayerSpec],
    pub weights: &'a [f32],
    pub biases: &'a [f32],
}

pub struct LayerPlanF64<'a> {
    pub layers: &'a [LayerSpec],
    pub weights: &'a [f64],
    pub biases: &'a [f64],
}

impl<'a> LayerPlanF64<'a> {
    pub fn validate(&self) -> Result<(), LayerError> {
        if self.layers.is_empty() {
            return Err(LayerError::EmptyPlan);
        }

        let mut prev_out = self.layers[0].input_size();
        if prev_out == 0 {
            return Err(LayerError::InvalidShape);
        }

        for (idx, layer) in self.layers.iter().enumerate() {
            if idx > 0 && layer.input_size() != prev_out {
                return Err(LayerError::IncompatibleChain);
            }
            if layer.output_size() == 0 {
                return Err(LayerError::InvalidShape);
            }
            if !layer.validate_ranges(self.weights.len(), self.biases.len()) {
                return Err(LayerError::InvalidRange);
            }
            prev_out = layer.output_size();
        }

        Ok(())
    }

    pub fn input_size(&self) -> Option<usize> {
        self.layers.first().map(|x| x.input_size())
    }

    pub fn output_size(&self) -> Option<usize> {
        self.layers.last().map(|x| x.output_size())
    }

    pub fn max_width(&self) -> Option<usize> {
        let mut max_w = 0usize;
        for l in self.layers {
            let in_w = l.input_size();
            let out_w = l.output_size();
            if in_w > max_w {
                max_w = in_w;
            }
            if out_w > max_w {
                max_w = out_w;
            }
        }
        if max_w == 0 {
            None
        } else {
            Some(max_w)
        }
    }
}

impl<'a> LayerPlan<'a> {
    pub fn validate(&self) -> Result<(), LayerError> {
        if self.layers.is_empty() {
            return Err(LayerError::EmptyPlan);
        }

        let mut prev_out = self.layers[0].input_size();
        if prev_out == 0 {
            return Err(LayerError::InvalidShape);
        }

        for (idx, layer) in self.layers.iter().enumerate() {
            if idx > 0 && layer.input_size() != prev_out {
                return Err(LayerError::IncompatibleChain);
            }
            if layer.output_size() == 0 {
                return Err(LayerError::InvalidShape);
            }
            if !layer.validate_ranges(self.weights.len(), self.biases.len()) {
                return Err(LayerError::InvalidRange);
            }
            prev_out = layer.output_size();
        }

        Ok(())
    }

    pub fn input_size(&self) -> Option<usize> {
        self.layers.first().map(|x| x.input_size())
    }

    pub fn output_size(&self) -> Option<usize> {
        self.layers.last().map(|x| x.output_size())
    }

    pub fn max_width(&self) -> Option<usize> {
        let mut max_w = 0usize;
        for l in self.layers {
            let in_w = l.input_size();
            let out_w = l.output_size();
            if in_w > max_w {
                max_w = in_w;
            }
            if out_w > max_w {
                max_w = out_w;
            }
        }
        if max_w == 0 {
            None
        } else {
            Some(max_w)
        }
    }

    pub fn total_neurons(&self) -> Option<usize> {
        let mut total = 0usize;
        for layer in self.layers {
            total = total.checked_add(layer.input_size())?;
        }
        total.checked_add(self.output_size()?)
    }
}

pub fn build_from_layers(
    layers: &[usize],
    hidden_activation: ActivationKind,
    output_activation: ActivationKind,
    weights_len: usize,
    biases_len: usize,
    out: &mut [LayerSpec],
) -> Result<usize, LayerError> {
    if layers.len() < 2 {
        return Err(LayerError::InvalidShape);
    }

    let layer_count = layers.len() - 1;
    if out.len() < layer_count {
        return Err(LayerError::BufferTooSmall);
    }

    let mut w_off = 0usize;
    let mut b_off = 0usize;

    for i in 0..layer_count {
        let input_size = layers[i];
        let output_size = layers[i + 1];
        if input_size == 0 || output_size == 0 {
            return Err(LayerError::InvalidShape);
        }

        let weight_len = input_size
            .checked_mul(output_size)
            .ok_or(LayerError::InvalidRange)?;
        let activation = if i + 1 == layer_count {
            output_activation
        } else {
            hidden_activation
        };

        out[i] = LayerSpec::Dense(LayerDesc {
            input_size,
            output_size,
            weight_offset: w_off,
            bias_offset: b_off,
            activation,
        });

        w_off = w_off
            .checked_add(weight_len)
            .ok_or(LayerError::InvalidRange)?;
        b_off = b_off
            .checked_add(output_size)
            .ok_or(LayerError::InvalidRange)?;
    }

    if w_off != weights_len || b_off != biases_len {
        return Err(LayerError::CountMismatch);
    }

    Ok(layer_count)
}