1mod activation_functions;
2mod basic_dense_layer;
3mod basic_loss;
4mod dense_layers;
5mod leaky_relu_layer;
6mod loss_functions;
7mod losses;
8mod softmax_layer;
9
10pub use activation_functions::ActivationFn;
11pub use basic_dense_layer::BasicDenseLayer;
12pub use basic_loss::BasicLoss;
13pub use dense_layers::{LinearLayer, SigmoidLayer, SwishLayer, TanhLayer};
14pub use leaky_relu_layer::LeakyReLULayer;
15pub use loss_functions::LossFn;
16pub use losses::{CrossEntropyLoss, MSELoss};
17use rand;
18pub use softmax_layer::SoftMaxLayer;
19
20use crate::la::Vector;
21
22
23pub trait FillLayerWith {
24 fn fill_with_rng<Rng>(&mut self, rng: &mut Rng)
26 where
27 Rng: rand::Rng;
28
29 fn fill_with<F>(&mut self, f: F)
32 where
33 F: FnMut() -> f32;
34
35 fn fill(&mut self, value: f32);
37
38 fn fill_params_with<F>(&mut self, f: F)
41 where
42 F: FnMut() -> f32;
43
44 fn fill_params(&mut self, value: f32);
47}
48
49
50pub trait RecursiveLayer<const INPUTS: usize, const OUTPUTS: usize> {
51 fn forward(&mut self, input: &Vector<INPUTS>) -> Vector<OUTPUTS>;
56
57 fn train(
70 &mut self,
71 input: &Vector<INPUTS>,
72 desired: &Vector<OUTPUTS>,
73 eta: f32,
74 ) -> (Vector<INPUTS>, f32);
75}