concision_neural/layers/traits/
layers.rs1use super::{Activator, ActivatorGradient};
6
7use cnc::params::ParamsBase;
8use cnc::tensor::NdTensor;
9use cnc::{Backward, Forward};
10use ndarray::{Data, Dimension, RawData};
11pub trait Layer<S, D>
16where
17 D: Dimension,
18 S: RawData<Elem = Self::Scalar>,
19{
20 type Scalar;
21
22 fn params(&self) -> &ParamsBase<S, D>;
24 fn params_mut(&mut self) -> &mut ParamsBase<S, D>;
26 fn set_params(&mut self, params: ParamsBase<S, D>) {
28 *self.params_mut() = params;
29 }
30 fn backward<X, Y, Z, Delta>(
32 &mut self,
33 input: X,
34 error: Y,
35 gamma: Self::Scalar,
36 ) -> cnc::Result<Z>
37 where
38 S: Data,
39 Self: ActivatorGradient<X, Input = Y, Delta = Delta>,
40 Self::Scalar: Clone,
41 ParamsBase<S, D>: Backward<X, Delta, Elem = Self::Scalar, Output = Z>,
42 {
43 let delta = self.activate_gradient(error);
45 self.params_mut().backward(&input, &delta, gamma)
47 }
48 fn forward<X, Y>(&self, input: &X) -> cnc::Result<Y>
50 where
51 Y: NdTensor<S::Elem, D, Repr = S>,
52 ParamsBase<S, D>: Forward<X, Output = Y>,
53 Self: Activator<Y, Output = Y>,
54 {
55 self.params().forward_then(input, |y| self.activate(y))
56 }
57}