concision_neural/layers/layer/
impl_layer_repr.rs

1/*
2    appellation: impl_layer_repr <module>
3    authors: @FL03
4*/
5use crate::layers::layer::LayerBase;
6
7use crate::layers::{Linear, ReLU, Sigmoid, Tanh};
8use cnc::ParamsBase;
9use ndarray::{Dimension, RawData};
10
11impl<S, D> LayerBase<Linear, S, D>
12where
13    D: Dimension,
14    S: RawData<Elem = f32>,
15{
16    /// initialize a new [`LayerBase`] using a [`Linear`] activation function and the given
17    /// parameters.
18    pub const fn linear(params: ParamsBase<S, D>) -> Self {
19        Self {
20            rho: Linear,
21            params,
22        }
23    }
24}
25
26impl<S, D> LayerBase<Sigmoid, S, D>
27where
28    D: Dimension,
29    S: RawData<Elem = f32>,
30{
31    /// initialize a new [`LayerBase`] using a [`Sigmoid`] activation function and the given
32    /// parameters.
33    pub const fn sigmoid(params: ParamsBase<S, D>) -> Self {
34        Self {
35            rho: Sigmoid,
36            params,
37        }
38    }
39}
40
41impl<S, D> LayerBase<Tanh, S, D>
42where
43    D: Dimension,
44    S: RawData<Elem = f32>,
45{
46    /// initialize a new [`LayerBase`] using a [`Tanh`] activation function and the given
47    /// parameters.
48    pub const fn tanh(params: ParamsBase<S, D>) -> Self {
49        Self { rho: Tanh, params }
50    }
51}
52
53impl<S, D> LayerBase<ReLU, S, D>
54where
55    D: Dimension,
56    S: RawData<Elem = f32>,
57{
58    /// initialize a new [`LayerBase`] using a [`ReLU`] activation function and the given
59    pub const fn relu(params: ParamsBase<S, D>) -> Self {
60        Self { rho: ReLU, params }
61    }
62}