concision_neural/layers/traits/
layers.rs1use super::{Activator, ActivatorGradient};
6
7use cnc::params::ParamsBase;
8use cnc::tensor::NdTensor;
9use cnc::{Backward, Forward};
10use ndarray::{Data, Dimension, RawData};
11
12pub trait Layer<S, D>
14where
15 D: Dimension,
16 S: RawData<Elem = Self::Elem>,
17{
18 type Elem;
21 type Rho: Activator<Self::Elem>;
23
24 fn rho(&self) -> &Self::Rho;
25 fn params(&self) -> &ParamsBase<S, D>;
27 fn params_mut(&mut self) -> &mut ParamsBase<S, D>;
29}
30pub trait LayerExt<S, D>: Layer<S, D>
34where
35 D: Dimension,
36 S: RawData<Elem = Self::Elem>,
37{
38 fn set_params(&mut self, params: ParamsBase<S, D>) {
40 *self.params_mut() = params;
41 }
42 fn backward<X, Y, Z, Dt>(&mut self, input: X, error: Y, gamma: Self::Elem) -> cnc::Result<Z>
44 where
45 S: Data,
46 Self: ActivatorGradient<X, Input = Y, Output = Z, Delta = Dt>,
47 Self::Elem: Clone,
48 ParamsBase<S, D>: Backward<X, Dt, Elem = Self::Elem, Output = Z>,
49 {
50 let delta = self.activate_gradient(error);
52 self.params_mut().backward(&input, &delta, gamma)
54 }
55 fn forward<X, Y>(&self, input: &X) -> cnc::Result<Y>
57 where
58 Y: NdTensor<S::Elem, D, Repr = S>,
59 ParamsBase<S, D>: Forward<X, Output = Y>,
60 Self: Activator<Y, Output = Y>,
61 {
62 self.params().forward_then(input, |y| self.activate(y))
63 }
64}