concision_neural/layers/traits/
layers.rs1use super::{Activator, ActivatorGradient};
6
7use cnc::params::ParamsBase;
8use cnc::{Backward, Forward, Tensor};
9use ndarray::{Data, Dimension, RawData};
10pub trait Layer<S, D>
15where
16    D: Dimension,
17    S: RawData<Elem = Self::Scalar>,
18{
19    type Scalar;
20
21    fn params(&self) -> &ParamsBase<S, D>;
23    fn params_mut(&mut self) -> &mut ParamsBase<S, D>;
25    fn set_params(&mut self, params: ParamsBase<S, D>) {
27        *self.params_mut() = params;
28    }
29    fn backward<X, Y, Z, Delta>(
31        &mut self,
32        input: X,
33        error: Y,
34        gamma: Self::Scalar,
35    ) -> cnc::Result<Z>
36    where
37        S: Data,
38        Self: ActivatorGradient<X, Input = Y, Delta = Delta>,
39        Self::Scalar: Clone,
40        ParamsBase<S, D>: Backward<X, Delta, Elem = Self::Scalar, Output = Z>,
41    {
42        let delta = self.activate_gradient(error);
44        self.params_mut().backward(&input, &delta, gamma)
46    }
47    fn forward<X, Y>(&self, input: &X) -> cnc::Result<Y>
49    where
50        Y: Tensor<S::Elem, D, Repr = S>,
51        ParamsBase<S, D>: Forward<X, Output = Y>,
52        Self: Activator<Y, Output = Y>,
53    {
54        self.params().forward_then(input, |y| self.activate(y))
55    }
56}