native_neural_network 0.1.6

Lib no_std Rust for native neural network (.rnn)
Documentation
use crate::network::NeuralNetwork;

#[derive(Clone, Copy, Debug)]
pub struct NeuronPoint {
    pub layer: usize,
    pub neuron: usize,
    pub bias: f32,
    pub activation: f32,
    pub position: [f32; 5],
}

impl Default for NeuronPoint {
    fn default() -> Self {
        Self {
            layer: 0,
            neuron: 0,
            bias: 0.0,
            activation: 0.0,
            position: [0.0; 5],
        }
    }
}

#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SphereError {
    InvalidRadius,
    CapacityTooSmall,
    InvalidLayout,
    BiasOutOfBounds,
}

pub struct Sphere5D<'a> {
    points: &'a mut [NeuronPoint],
    len: usize,
    radius: f32,
}

impl<'a> Sphere5D<'a> {
    pub fn new(points: &'a mut [NeuronPoint], radius: f32) -> Result<Self, SphereError> {
        if !radius.is_finite() || radius <= 0.0 {
            return Err(SphereError::InvalidRadius);
        }
        Ok(Self {
            points,
            len: 0,
            radius,
        })
    }

    pub fn from_network<'n>(
        network: &NeuralNetwork<'n>,
        points: &'a mut [NeuronPoint],
        radius: f32,
    ) -> Result<Self, SphereError> {
        let mut sphere = Self::new(points, radius)?;
        sphere.fill_from_network(network)?;
        Ok(sphere)
    }

    pub fn radius(&self) -> f32 {
        self.radius
    }

    pub fn len(&self) -> usize {
        self.len
    }

    pub fn capacity(&self) -> usize {
        self.points.len()
    }

    pub fn is_empty(&self) -> bool {
        self.len == 0
    }

    pub fn as_slice(&self) -> &[NeuronPoint] {
        &self.points[..self.len]
    }

    pub fn as_mut_slice(&mut self) -> &mut [NeuronPoint] {
        &mut self.points[..self.len]
    }

    pub fn add_neuron(
        &mut self,
        layer: usize,
        neuron: usize,
        bias: f32,
        activation: f32,
    ) -> Option<usize> {
        if self.len >= self.points.len() {
            return None;
        }
        let seed = mix_seed(layer as u64, neuron as u64);
        let position = sphere_pos_from_seed(seed, self.radius);
        let idx = self.len;
        self.points[idx] = NeuronPoint {
            layer,
            neuron,
            bias,
            activation,
            position,
        };
        self.len += 1;
        Some(idx)
    }

    pub fn nearest(&self, position: [f32; 5]) -> Option<(usize, f32)> {
        if self.len == 0 {
            return None;
        }
        let mut best_idx = 0usize;
        let mut best_d2 = dist2(self.points[0].position, position);
        for i in 1..self.len {
            let d2 = dist2(self.points[i].position, position);
            if d2 < best_d2 {
                best_d2 = d2;
                best_idx = i;
            }
        }
        Some((best_idx, crate::math::sqrtf(best_d2)))
    }

    pub fn neighbors_within(
        &self,
        position: [f32; 5],
        max_distance: f32,
        out_indices: &mut [usize],
    ) -> usize {
        if max_distance < 0.0 || out_indices.is_empty() {
            return 0;
        }
        let max_d2 = max_distance * max_distance;
        let mut written = 0usize;
        for i in 0..self.len {
            if written >= out_indices.len() {
                break;
            }
            if dist2(self.points[i].position, position) <= max_d2 {
                out_indices[written] = i;
                written += 1;
            }
        }
        written
    }

    pub fn fill_from_network<'n>(&mut self, network: &NeuralNetwork<'n>) -> Result<(), SphereError> {
        if network.layers.len() < 2 {
            return Err(SphereError::InvalidLayout);
        }
        let expected_biases = NeuralNetwork::expected_biases_count(network.layers).ok_or(SphereError::InvalidLayout)?;
        if expected_biases != network.biases.len() {
            return Err(SphereError::InvalidLayout);
        }

        let total_neurons = network.layers.iter().fold(0usize, |acc, &x| acc.saturating_add(x));
        if total_neurons > self.points.len() {
            return Err(SphereError::CapacityTooSmall);
        }

        self.len = 0;
        let mut bias_cursor = 0usize;
        for (layer_idx, &layer_size) in network.layers.iter().enumerate() {
            for neuron_idx in 0..layer_size {
                let bias = if layer_idx == 0 {
                    0.0
                } else {
                    let b = *network.biases.get(bias_cursor).ok_or(SphereError::BiasOutOfBounds)?;
                    bias_cursor += 1;
                    b
                };
                self.add_neuron(layer_idx, neuron_idx, bias, 0.0)
                    .ok_or(SphereError::CapacityTooSmall)?;
            }
        }
        Ok(())
    }
}

fn dist2(a: [f32; 5], b: [f32; 5]) -> f32 {
    let mut acc = 0.0f32;
    let mut i = 0usize;
    while i < 5 {
        let d = a[i] - b[i];
        acc += d * d;
        i += 1;
    }
    acc
}
fn sphere_pos_from_seed(seed: u64, radius: f32) -> [f32; 5] {
    let mut p = [0.0f32; 5];
    let mut s = seed;
    let mut i = 0usize;
    while i < 5 {
        s = splitmix64(s);
        let unit = (s as f64) / (u64::MAX as f64);
        p[i] = (unit as f32) * 2.0 - 1.0;
        i += 1;
    }

    let mut n2 = 0.0f32;
    i = 0;
    while i < 5 {
        n2 += p[i] * p[i];
        i += 1;
    }

    if n2 <= 1e-12 {
        return [radius, 0.0, 0.0, 0.0, 0.0];
    }

    let inv_n = 1.0 / crate::math::sqrtf(n2);
    i = 0;
    while i < 5 {
        p[i] = p[i] * inv_n * radius;
        i += 1;
    }
    p
}

fn mix_seed(a: u64, b: u64) -> u64 {
    splitmix64(a ^ b.rotate_left(17) ^ 0x9E3779B97F4A7C15)
}

fn splitmix64(mut x: u64) -> u64 {
    x = x.wrapping_add(0x9E3779B97F4A7C15);
    x = (x ^ (x >> 30)).wrapping_mul(0xBF58476D1CE4E5B9);
    x = (x ^ (x >> 27)).wrapping_mul(0x94D049BB133111EB);
    x ^ (x >> 31)
}