[−][src]Struct cogent::NeuralNetwork
The fundemtnal neural network struct.
All other types are ancillary to this structure.
Implementations
impl<'a> NeuralNetwork
[src]
pub fn new(inputs: u64, layers: &[Layer]) -> NeuralNetwork
[src]
Constructs network of given layers.
Returns constructed network.
use cogent::{NeuralNetwork,Layer,Activation}; let mut net = NeuralNetwork::new(2,&[ Layer::Dense(3,Activation::Sigmoid), Layer::Dense(2,Activation::Softmax) ]);
pub fn new_constant(inputs: u64, layers: &[Layer], val: f32) -> NeuralNetwork
[src]
Constructs network of given layers with all weights and biases set to given value. IMPORTANT: This function seems to cause issues in training and HAS NOT been properly tested, I DO NOT recommend you use this.
pub fn activation(&mut self, index: usize, activation: Activation)
[src]
Sets activation of layer specified by index (excluding input layer).
use cogent::{NeuralNetwork,Layer,Activation}; // Net (2 -Sigmoid-> 3 -Sigmoid-> 2) let mut net = NeuralNetwork::new(2,&[ Layer::Dense(3,Activation::Sigmoid), Layer::Dense(2,Activation::Sigmoid) ]); net.activation(1,Activation::Softmax); // Changes activation of output layer. // Net will now be (2 -Sigmoid-> 3 -Softmax-> 2)
pub fn run(&mut self, input: &Array2<f32>) -> Vec<usize>
[src]
Runs a batch of examples through the network.
Returns classes.
pub fn inner_run(&mut self, inputs: &Array<f32>) -> Array<f32>
[src]
Runs a batch of examples through the network.
Returns output.
pub fn train(
&'a mut self,
data: &'a mut Array2<f32>,
labels: &'a mut Array2<usize>
) -> Trainer<'a>
[src]
&'a mut self,
data: &'a mut Array2<f32>,
labels: &'a mut Array2<usize>
) -> Trainer<'a>
Begins setting hyperparameters for training.
Returns Trainer
struct used to specify hyperparameters
Training a network to learn an XOR gate:
use cogent::{ NeuralNetwork,Layer, Activation, EvaluationData }; // Sets network let mut neural_network = NeuralNetwork::new(2,&[ Layer::Dense(3,Activation::Sigmoid), Layer::Dense(2,Activation::Softmax) ]); // Sets data // 0=false, 1=true. let mut data:Array2<f32> = array![[0.,0.],[1.,0.],[0.,1.],[1.,1.]]; let mut labels:Array2<usize> = array![[0],[1],[1],[0]]; // Trains network net.train(&mut data.clone(),&mut labels.clone()) // `.clone()` neccessary to satisfy borrow checker concerning later immutable borrow as evaluation data. .learning_rate(2f32) .evaluation_data(EvaluationData::Actual(&data,&labels)) // Use testing data as evaluation data. .go();
pub fn check_dataset(&self, data: &Array2<f32>, labels: &Array2<usize>)
[src]
Checks a dataset has an equal number of example and labels and fits the network.
This is called whenever you give a dataset to the library, you do not need to call this yourself.
For example this is called when you pass a dataset to .train(..)
.
pub fn train_details(
&mut self,
training_data: ArrayViewMut2<f32>,
training_labels: ArrayViewMut2<usize>,
evaluation_data: ArrayView2<f32>,
evaluation_labels: ArrayView2<usize>,
cost: &Cost,
halt_condition: Option<HaltCondition>,
log_interval: Option<MeasuredCondition>,
batch_size: usize,
intial_learning_rate: f32,
l2: Option<f32>,
early_stopping_n: MeasuredCondition,
evaluation_min_change: Proportion,
learning_rate_decay: f32,
learning_rate_interval: MeasuredCondition,
checkpoint_interval: Option<MeasuredCondition>,
name: Option<&str>,
tracking: bool
)
[src]
&mut self,
training_data: ArrayViewMut2<f32>,
training_labels: ArrayViewMut2<usize>,
evaluation_data: ArrayView2<f32>,
evaluation_labels: ArrayView2<usize>,
cost: &Cost,
halt_condition: Option<HaltCondition>,
log_interval: Option<MeasuredCondition>,
batch_size: usize,
intial_learning_rate: f32,
l2: Option<f32>,
early_stopping_n: MeasuredCondition,
evaluation_min_change: Proportion,
learning_rate_decay: f32,
learning_rate_interval: MeasuredCondition,
checkpoint_interval: Option<MeasuredCondition>,
name: Option<&str>,
tracking: bool
)
Runs training.
In most cases you shouldn't call this, instead call .train()
then call the functions to set the hyperparameters, then call .go()
(which calls this).
Using this function directly is ugly. Would not recommend.
pub fn evaluate(
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>,
cost: Option<&Cost>
) -> (f32, u32)
[src]
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>,
cost: Option<&Cost>
) -> (f32, u32)
Evaluates dataset using network.
Returns tuple: (Average cost across dataset, Number of examples correctly classified).
let mut data:Array2<f32> = array![[0.,0.],[1.,0.],[0.,1.],[1.,1.]]; let mut labels:Array2<usize> = array![[0],[1],[1],[0]]; // `net` is neural network trained to 100% accuracy to mimic an XOR gate. // Passing `None` for the cost uses the default cost function (crossentropy). let (cost,accuracy) = net.evaluate(&data,&labels,None); assert_eq!(accuracy,4);
pub fn analyze(
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>
) -> (Vec<f32>, Vec<Vec<f32>>)
[src]
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>
) -> (Vec<f32>, Vec<Vec<f32>>)
Not deprecated, just broken until ArrayFire update installer to match git (where issue has been reported and fixed).
Returns tuple of: (Vector of class percentage accuracies, Percentage confusion matrix).
let mut data:Array2<f32> = array![[0.,0.],[1.,0.],[0.,1.],[1.,1.]]; let mut labels:Array2<usize> = array![[0],[1],[1],[0]]; // `net` is neural network trained to 100% accuracy to mimic an XOR gate. let (correct_vector,confusion_matrix) = net.analyze(&mut data); assert_eq!(correct_vector,vec![1f32,1f32]); assert_eq!(confusion_matrix,vec![[1f32,0f32],[0f32,1f32]]);
pub fn analyze_string(
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>,
precision: usize,
dict_opt: Option<HashMap<usize, &str>>
) -> (String, String)
[src]
&mut self,
data: &Array2<f32>,
labels: &Array2<usize>,
precision: usize,
dict_opt: Option<HashMap<usize, &str>>
) -> (String, String)
Not deprecated, just broken until ArrayFire update installer to match git (where issue has been reported and fixed).
Returns tuple of pretty strings of: (Vector of class percentage accuracies, Percentage confusion matrix).
Example without dictionairy:
let mut data = vec![ (vec![0f32,0f32],0usize), (vec![1f32,0f32],1usize), (vec![0f32,1f32],1usize), (vec![1f32,1f32],0usize) ]; // `net` is neural network trained to 100% accuracy to mimic an XOR gate. let (correct_vector,confusion_matrix) = net.analyze_string(&mut data,2,None); let expected_vector:&str = " 0 1 ┌ ┐ % │ 1.00 1.00 │ └ ┘\n"; assert_eq!(&correct_vector,expected_vector); let expected_matrix:&str = "% 0 1 ┌ ┐ 0 │ 1.00 0.00 │ 1 │ 0.00 1.00 │ └ ┘\n"; assert_eq!(&confusion_matrix,expected_matrix);
Example with dictionairy:
let mut data = vec![ (vec![0f32,0f32],0usize), (vec![1f32,0f32],1usize), (vec![0f32,1f32],1usize), (vec![1f32,1f32],0usize) ]; let mut dictionairy:HashMap<usize,&str> = HashMap::new(); dictionairy.insert(0,"False"); dictionairy.insert(1,"True"); // `net` is neural network trained to 100% accuracy to mimic an XOR gate. let (correct_vector,confusion_matrix) = net.analyze_string(&mut data,2,Some(dictionairy)); let expected_vector:&str = " False True ┌ ┐ % │ 1.00 1.00 │ └ ┘\n"; assert_eq!(&correct_vector,expected_vector); let expected_matrix:&str = " % False True ┌ ┐ False │ 1.00 0.00 │ True │ 0.00 1.00 │ └ ┘\n"; assert_eq!(&confusion_matrix,expected_matrix);
pub fn export(&self, path: &str)
[src]
Exports neural network to path.json
.
use cogent::{Activation,Layer,NeuralNetwork}; let net = NeuralNetwork::new(2,&[ Layer::new(3,Activation::Sigmoid), Layer::new(2,Activation::Softmax) ]); net.export("my_neural_network");
pub fn import(path: &str) -> NeuralNetwork
[src]
Imports neural network from path.json
.
use cogent::NeuralNetwork; let net = NeuralNetwork::import("my_neural_network");
Auto Trait Implementations
impl RefUnwindSafe for NeuralNetwork
impl Send for NeuralNetwork
impl Sync for NeuralNetwork
impl Unpin for NeuralNetwork
impl UnwindSafe for NeuralNetwork
Blanket Implementations
impl<T> Any for T where
T: 'static + ?Sized,
[src]
T: 'static + ?Sized,
impl<T> Borrow<T> for T where
T: ?Sized,
[src]
T: ?Sized,
impl<T> BorrowMut<T> for T where
T: ?Sized,
[src]
T: ?Sized,
fn borrow_mut(&mut self) -> &mut T
[src]
impl<T> From<T> for T
[src]
impl<T, U> Into<U> for T where
U: From<T>,
[src]
U: From<T>,
impl<T, U> TryFrom<U> for T where
U: Into<T>,
[src]
U: Into<T>,
type Error = Infallible
The type returned in the event of a conversion error.
fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>
[src]
impl<T, U> TryInto<U> for T where
U: TryFrom<T>,
[src]
U: TryFrom<T>,
type Error = <U as TryFrom<T>>::Error
The type returned in the event of a conversion error.
fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>
[src]
impl<V, T> VZip<V> for T where
V: MultiLane<T>,
V: MultiLane<T>,