neural_lib/
lib.rs

1#![no_main]
2
3//! # Neural Lib
4//! In Progress !!! -> Some fixes in load  
5//! This is a small library to make group of neurons.
6//! Every group is create with own parameters and the functions required to make predictions and training
7
8/// A Neuron is defined by a weight and an impact. Every value passed in is change by the property of the neuron
9#[derive(Clone,Copy)]
10struct Neuron{
11    weight: f64,
12    bias:f64,
13    output:f64
14}
15impl Default for Neuron{
16    fn default() -> Self {
17        return Neuron{weight:0.5,bias:0.5,output:0.0}
18    }
19}
20impl Neuron{
21    fn new(weight:f64,bias:f64)->Self{
22        return Neuron{weight:weight,bias:bias,output:0.0}
23    }
24}
25
26/// Main Struct to make a Layer of neural network
27/// It contains few Neuron struct and the activation function for them
28pub struct Layer{
29    neurons:Vec<Neuron>,
30    act:Activate
31}
32impl Layer{
33    /// Add layer, need to a vec of Neurons parameters(tuple f64,f46) and type of function
34    pub fn new(fnact:&Activate,param:&Vec<(f64,f64)>)->Self{ 
35        let mut network = Layer{
36            neurons: Vec::with_capacity(param.len() as usize),
37            act:fnact.clone()
38        };
39        for i in param{
40            network.neurons.push(Neuron::new(i.0,i.1));
41        }
42        return network
43    }
44    /// Add layer, need to define number of neurons and type of function
45    pub fn default(x:u32,fnact:&Activate) -> Self {
46        let mut layer = Layer{
47            neurons: Vec::with_capacity(x as usize),
48            act:fnact.clone()
49        };
50        for _i in 0..x{
51            layer.neurons.push(Neuron::new(0.5,0.5));
52        }
53        return layer
54    }
55    /// Calculate the Weighted Sum 
56    fn sum_pond(&mut self,x:&Vec<f64>)->Vec<f64>{
57        let mut prop:Vec<f64> = vec![];
58        for (id,n) in self.neurons.iter().enumerate(){
59            prop.push(x[id] * n.weight + n.bias); 
60        }
61        return prop
62    }
63}
64
65#[derive(Clone)]
66/// Enum to choose your Activation function, by default => Sigmoid function
67pub enum Activate{  // Choose your function to activate neurons
68    Sig,
69    Rel,
70    Tan
71}
72impl Default for Activate{  // Default Status => Sigmoid function
73    fn default()->Self{
74        return Activate::Sig
75    }
76}
77
78fn activate(net:&mut Layer,x:&Vec<f64>)->Vec<f64>{
79    let mut sum = net.sum_pond(x);
80    for (id,r) in sum.iter_mut().enumerate(){
81        match net.act{
82            Activate::Sig=>{sigmoid(r)},
83            Activate::Rel=>{relu(r)},
84            Activate::Tan=>{tanh(r)}
85        }
86        net.neurons[id].output = *r;
87    }
88    return sum
89}
90
91fn sigmoid(x:&mut f64){
92    *x = 1.0/(1.0+(-*x).exp());
93} 
94
95fn relu(x:&mut f64){
96    if *x < 0.0{
97        *x = 0.0;
98    }
99}
100
101fn tanh(x:&mut f64){
102    *x = ((2.0 * *x).exp()-1.0) / ((2.0 * *x).exp()+1.0);
103}
104
105/// Define a Neural Network
106pub struct Network{
107    pub inputs:Vec<f64>,
108    layers:Vec<Layer>,
109    pub output:Layer
110}
111impl Network{
112    pub fn new(nb_lay:usize,lays_params:Vec<Vec<(f64,f64)>>,lays_fn:Vec<Activate>)->Network{
113        let mut lays:Vec<Layer> = Vec::new();
114        for l in 0..nb_lay{
115            lays.push(Layer::new(&lays_fn[l], &lays_params[l]));
116        }
117        let mut output = vec![];
118        output.push((0.5,0.5));
119        return Network { inputs:vec![],layers: lays,output:Layer::new(&Activate::Sig,&output)};
120    }
121    pub fn default(nb_lay:usize,nb_n:u32,lays_fn:Vec<Activate>)->Network{
122        let mut lays:Vec<Layer> = Vec::new();
123        for l in 0..nb_lay{
124            lays.push(Layer::default(nb_n,&lays_fn[l]));
125        }
126        return Network { inputs:vec![],layers: lays,output:Layer::default(1, &Activate::Sig)};
127    }
128
129    /// Launching a prediction with values -> This function can be call manually or by the training session
130    pub fn prediction(&mut self)->Vec<f64>{   
131        let mut res = self.inputs.clone();// Call a result on an entry
132        // Propagation on hidden layers
133        for layer in self.layers.iter_mut(){
134            res = activate(layer,&res);
135        }
136        // Propagation on output layer
137        res = activate(&mut self.output, &res);
138        return res
139    } 
140
141    /// Initiate a training session
142    /// You can specifie the inputs and the number of repetitions from the nb variable
143    pub fn train(&mut self, inputs:&Vec<Vec<f64>>,targets:&Vec<f64>,learning_rate:f64,nb:usize){
144        for _ in 0..nb{  
145            for (input,target) in inputs.iter().zip(targets.iter()){
146                self.inputs = input.to_owned();
147                self.train_single(*target, learning_rate);
148            }
149        }
150    }
151
152    fn train_single(&mut self, target:f64, learning_rate:f64){
153        let output = self.prediction();
154        let error = target - output[0];
155
156        for neuron in self.output.neurons.iter_mut(){
157            let neuron_error = neuron.weight * error * neuron.output * (1.0 - neuron.output);
158            neuron.weight += learning_rate * neuron_error * neuron.output;
159            neuron.bias += learning_rate * neuron_error;
160        }
161
162        for layer in self.layers.iter_mut().rev(){
163            for neuron in layer.neurons.iter_mut(){
164                let neuron_error= neuron.weight * error * neuron.output* (1.0-neuron.output);
165                neuron.weight += learning_rate * neuron_error * neuron.output;
166                neuron.bias += learning_rate*neuron_error;
167            }
168        }
169    }
170    /// Generate a file which contains the parameters of your neurons and layers
171    pub fn output(&self,n_file:&str)->Result<(),Error>{
172        match fs::OpenOptions::new().create_new(true).write(true).open(n_file){
173            Ok(mut f)=>{
174                f.write_all("Schematic Neural Network\n".as_bytes());
175                for (id,layer) in self.layers.iter().enumerate(){
176                    f.write_all(format!("HiddenLayer :{}\n",id).as_bytes());
177                    match layer.act{
178                        Activate::Sig=>{f.write_all("Sigmoid fn\n".as_bytes());}
179                        Activate::Rel=>{f.write_all("ReLu fn\n".as_bytes());}
180                        Activate::Tan=>{f.write_all("Tangente fn\n".as_bytes());}
181                    }
182                    for n in layer.neurons.iter(){
183                        f.write_all(format!("[{}:{}] ",n.weight,n.bias).as_bytes());
184                    }
185                    f.write_all("\n".as_bytes());
186                }
187                f.write_all(format!("OutputLayer\n").as_bytes());
188                for n in self.output.neurons.iter(){
189                    f.write_all(format!("[{}:{}] ",n.weight,n.bias).as_bytes());
190                }
191                f.write_all("\n".as_bytes());
192                return Ok(())
193            },
194            Err(err)=>{
195                return Err(err)
196            }
197        }
198        
199    }
200}
201
202enum Class{
203    Binary,
204    MultiClass
205}
206
207use std::{fs, io::{Error, Read, Write}};