native_neural_network 0.1.6

Lib no_std Rust for native neural network (.rnn)
Documentation
extern crate std;

use rnn::activations::ActivationKind;
use rnn::layers::{build_dense_specs_from_layers, LayerSpec};
use rnn::model_format::{encode_dense_model_v1, encoded_size_v1};
use std::env;
use std::fs;
use std::process;

fn run() -> Result<(), String> {
    let output_path = env::args()
        .nth(1)
        .unwrap_or_else(|| "/tmp/sample.rnn".to_string());

    let topology = [2usize, 1usize];
    let weights = vec![2.0f32, -1.0f32];
    let biases = vec![0.5f32];

    let mut specs = vec![
        LayerSpec::Dense(rnn::DenseLayerDesc {
            input_size: 1,
            output_size: 1,
            weight_offset: 0,
            bias_offset: 0,
            activation: ActivationKind::Identity,
        });
        topology.len() - 1
    ];

    let layer_count = build_dense_specs_from_layers(
        &topology,
        ActivationKind::Identity,
        ActivationKind::Identity,
        weights.len(),
        biases.len(),
        &mut specs,
    )
    .map_err(|e| format!("failed to build dense specs: {e:?}"))?;

    let needed = encoded_size_v1(layer_count, weights.len(), biases.len())
        .ok_or_else(|| "encoded size overflow".to_string())?;
    let mut out = vec![0u8; needed];

    let used = encode_dense_model_v1(&specs[..layer_count], &weights, &biases, &mut out)
        .map_err(|e| format!("failed to encode model: {e:?}"))?;
    out.truncate(used);

    fs::write(&output_path, &out)
        .map_err(|e| format!("failed to write model file {output_path}: {e}"))?;
    println!("wrote sample model: {output_path}");
    Ok(())
}

fn main() {
    if let Err(err) = run() {
        eprintln!("{err}");
        process::exit(1);
    }
}