native_neural_network 0.3.1

Lib no_std Rust for native neural network (.rnn)
Documentation
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GradientError {
    Empty,
    InvalidThreshold,
}

pub fn l2_norm_f32(values: &[f32]) -> Result<f32, GradientError> {
    if values.is_empty() {
        return Err(GradientError::Empty);
    }
    let mut sum = 0.0f32;
    for &v in values {
        sum += v * v;
    }
    Ok(crate::math::sqrtf(sum))
}

pub fn l2_norm_f64(values: &[f64]) -> Result<f64, GradientError> {
    if values.is_empty() {
        return Err(GradientError::Empty);
    }
    let mut sum = 0.0f64;
    for &v in values {
        sum += v * v;
    }
    Ok(crate::math::sqrtd(sum))
}

pub fn clip_by_global_norm_f32(values: &mut [f32], max_norm: f32) -> Result<f32, GradientError> {
    if values.is_empty() {
        return Err(GradientError::Empty);
    }
    if !max_norm.is_finite() || max_norm <= 0.0 {
        return Err(GradientError::InvalidThreshold);
    }

    let norm = l2_norm_f32(values)?;
    if norm > max_norm {
        let scale = max_norm / norm;
        for v in values.iter_mut() {
            *v *= scale;
        }
    }
    Ok(norm)
}

pub fn clip_by_global_norm_f64(values: &mut [f64], max_norm: f64) -> Result<f64, GradientError> {
    if values.is_empty() {
        return Err(GradientError::Empty);
    }
    if !max_norm.is_finite() || max_norm <= 0.0 {
        return Err(GradientError::InvalidThreshold);
    }

    let norm = l2_norm_f64(values)?;
    if norm > max_norm {
        let scale = max_norm / norm;
        for v in values.iter_mut() {
            *v *= scale;
        }
    }
    Ok(norm)
}

pub fn all_finite_f32(values: &[f32]) -> bool {
    values.iter().all(|v| v.is_finite())
}

pub fn all_finite_f64(values: &[f64]) -> bool {
    values.iter().all(|v| v.is_finite())
}