astrai 2.2.0

A pretty bad neural network library
Documentation
use super::*;

mu!(
    initialization,
    utils,
    connection,
    layer,
    graddec,
    net_tests,
    params
);

#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Network {
    pub layers: Vec<Layer>,
    pub connections: Vec<Connection>,
    pub shape: Vec<usize>,
}

#[cfg(feature = "serde")]
impl Network {
    pub fn from_file(path: &str) -> Result<Network, Box<dyn std::error::Error>> {
        let file = std::fs::File::open(path)?;
        let reader = std::io::BufReader::new(file);
        let network: Network = serde_json::from_reader(reader)?;
        Ok(network)
    }

    pub fn to_file(&self, path: &str) -> Result<(), Box<dyn std::error::Error>> {
        let file = std::fs::File::create(path)?;
        let writer = std::io::BufWriter::new(file);
        serde_json::to_writer(writer, self)?;
        Ok(())
    }
}

impl std::fmt::Debug for Network {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        writeln!(f, "Network debug:")?;
        writeln!(f, "Layers:")?;

        for layer in self.layers.clone() {
            writeln!(f, "\n\n{:?}", layer)?;
        }

        writeln!(f, "\nConnections:")?;

        for connection in self.connections.iter() {
            writeln!(
                f,
                "Layer {} Neuron {:?} -> Layer {} Neuron {:?} ({})",
                connection.in_neuron_id.0,
                connection.in_neuron_id.1,
                connection.out_neuron_id.0,
                connection.out_neuron_id.1,
                connection.weight
            )?;
        }

        Ok(())
    }
}

#[profiling::all_functions]
impl Network {
    pub fn mutate(&self) -> Network {
        let mut rng = rand::thread_rng();
        let mut net = self.clone();

        // random layer
        let layer = rng.gen_range(0..net.layers.len());

        let modifier = rng.gen_range(-1..1) as f64;

        // connection or neuron?
        if rng.gen_bool(0.5) {
            // connection
            let connid = rng.gen_range(0..net.connections.len());
            let conn: Connection = net.connections[connid];

            let std_dev = (2.0
                / (self.layers[conn.in_neuron_id.0].neuron_amt
                    + self.layers[conn.out_neuron_id.0].neuron_amt) as f64)
                .sqrt();

            let normal = Normal::new(0.0, std_dev).unwrap();

            net.connections[connid].weight = normal.sample(&mut rng) + modifier;
        } else {
            // neuron
            let neuron = rng.gen_range(0..net.layers[layer].neuron_amt);

            let normal = Normal::new(
                0.0,
                (2.0 / (self.layers[layer].neuron_amt as f64
                    + self.layers[if layer == self.layers.len() - 1 {
                        layer - 1
                    } else {
                        layer + 1
                    }]
                    .neuron_amt as f64))
                    .sqrt(),
            )
            .unwrap();

            net.layers[layer].bias[neuron] = normal.sample(&mut rng) + modifier;
        }

        net
    }

    pub fn weight_matrices(&self) -> Vec<Array2<f64>> {
        let mut matrices = Vec::new();

        for shape in self.shape.windows(2) {
            matrices.push(Array2::<f64>::zeros((shape[0], shape[1])))
        }

        for c in &self.connections {
            /*
            for connection in self.connections.iter().filter(|c| {
                c.enabled && c.in_neuron_id.0 == layer_idx && c.out_neuron_id.0 == layer_idx + 1
            }) {
                let i = connection.in_neuron_id.1;
                let j = connection.out_neuron_id.1;
                weight_matrix[[i, j]] = connection.weight;
            }
            */
            if !c.enabled || c.in_neuron_id.0 != c.out_neuron_id.0 - 1 {
                continue;
            }
            matrices[c.in_neuron_id.0][[c.in_neuron_id.1, c.out_neuron_id.1]] = c.weight;
        }

        matrices
    }

    pub fn activate(&mut self, sensors: Vec<f64>, apply_backprop: bool) -> Vec<f64> {
        if sensors.len() != self.layers[0].neuron_amt {
            panic!("Invalid number of inputs!");
        }

        if apply_backprop {
            self.zero_out_network();
        }

        let mut input = Array1::from(sensors);

        let weight_matrices = self.weight_matrices();

        let mut activation_arrays = vec![];
        let mut z_arrays = vec![];

        if apply_backprop {
            activation_arrays.push(input.clone());
        }

        for (idx, layer) in self.layers.iter_mut().skip(1).enumerate() {
            let z = input.dot(&weight_matrices[idx]) + &layer.bias;

            if apply_backprop {
                z_arrays.push(z.clone());
            }

            input = z.mapv(|x| layer.activation_function.call(x));

            if apply_backprop {
                activation_arrays.push(input.clone());
            }
        }

        if apply_backprop {
            for (idx, layer) in self.layers.iter_mut().enumerate() {
                if idx != 0 {
                    layer.apply_z_array(z_arrays[idx - 1].clone());
                }
                layer.apply_activation_array(activation_arrays[idx].clone());
            }
        }

        input.to_vec()
    }
}