concision_core/nn/
layer.rs

1/*
2    Appellation: layer <module>
3    Created At: 2026.01.12:09:34:59
4    Contrib: @FL03
5*/
6mod impl_layer;
7mod impl_layer_ext;
8mod impl_layer_repr;
9
10#[doc(inline)]
11pub use self::types::*;
12
13/// The [`LayerBase`] implementation works to provide a generic interface for layers within a
14/// neural network by associating an activation function `F` with a set of parameters `P`.
15#[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
16#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
17pub struct LayerBase<F, P> {
18    /// the activation function of the layer
19    pub rho: F,
20    /// the parameters of the layer; often weights and biases
21    pub params: P,
22}
23
24mod types {
25    use super::LayerBase;
26    use crate::activate::{HeavySide, HyperbolicTangent, Linear, ReLU, Sigmoid};
27    #[cfg(feature = "alloc")]
28    use alloc::boxed::Box;
29    use concision_params::{Params, ParamsBase};
30
31    /// A type alias for a layer configured to use the [`ParamsBase`] instance
32    pub type LayerParamsBase<F, S, D = ndarray::Ix2, A = f32> = LayerBase<F, ParamsBase<S, D, A>>;
33    /// A type alias for an owned [`Layer`] configured to use the standard [`Params`] instance
34    pub type LayerParams<F, A = f32, D = ndarray::Ix2> = LayerBase<F, Params<A, D>>;
35    /// A type alias for a layer using a linear activation function.
36    pub type LinearLayer<T> = LayerBase<Linear, T>;
37    /// A type alias for a [`Layer`] using a sigmoid activation function.
38    pub type SigmoidLayer<T> = LayerBase<Sigmoid, T>;
39    /// An alias for a [`Layer`] that uses the hyperbolic tangent function.
40    pub type TanhLayer<T> = LayerBase<HyperbolicTangent, T>;
41    /// A [`Layer`] type using the ReLU activation function.
42    pub type ReluLayer<T> = LayerBase<ReLU, T>;
43    /// A [`Layer`] type using the heavyside activation function.
44    pub type HeavySideLayer<T> = LayerBase<HeavySide, T>;
45
46    #[cfg(feature = "alloc")]
47    /// A dynamic instance of the layer using a boxed activator.
48    pub type LayerDyn<'a, T> =
49        LayerBase<Box<dyn crate::activate::Activator<T, Output = T> + 'a>, T>;
50    #[cfg(feature = "alloc")]
51    /// A dynamic, functional alias of the [`Layer`] implementation leveraging boxed closures.
52    pub type FnLayer<'a, T> = LayerBase<Box<dyn Fn(T) -> T + 'a>, T>;
53}
54
55#[cfg(test)]
56mod tests {
57    use super::*;
58    use concision_params::Params;
59    use ndarray::Array1;
60
61    #[test]
62    #[ignore = "need to fix the test"]
63    fn test_func_layer() {
64        let params = Params::<f32>::from_elem((3, 2), 0.5);
65        let layer = LayerBase::new(|x: Array1<f32>| x.pow2(), params);
66        // initialize some inputs
67        let inputs = Array1::<f32>::linspace(1.0, 2.0, 3);
68        // verify the shape of the layer's parameters
69        assert_eq!(layer.params().shape(), &[3, 2]);
70        // compare the actual output against the expected output
71        assert_eq!(layer.forward(&inputs), Array1::from_elem(2, 7.5625).pow2());
72    }
73
74    #[test]
75    fn test_linear_layer() {
76        let params = Params::from_elem((3, 2), 0.5_f32);
77        let layer = LayerBase::linear(params);
78        // verify the shape of the layer's parameters
79        assert_eq!(layer.params().shape(), &[3, 2]);
80        // initialize some inputs
81        let inputs = Array1::<f32>::linspace(1.0, 2.0, 3);
82        // compare the actual output against the expected output
83        assert_eq!(layer.forward(&inputs), Array1::from_elem(2, 2.75));
84    }
85
86    #[test]
87    fn test_relu_layer() {
88        let params = Params::from_elem((3, 2), 0.5_f32);
89        let layer = LayerBase::relu(params);
90        // initialize some inputs
91        let inputs = Array1::<f32>::linspace(1.0, 2.0, 3);
92        // verify the shape of the layer's parameters
93        assert_eq!(layer.params().shape(), &[3, 2]);
94        // compare the actual output against the expected output
95        assert_eq!(layer.forward(&inputs), Array1::from_elem(2, 2.75));
96    }
97
98    #[test]
99    #[ignore = "need to fix the test"]
100    fn test_tanh_layer() {
101        let params = Params::from_elem((3, 2), 0.5_f32);
102        let layer = LayerBase::tanh(params);
103        // initialize some inputs
104        let inputs = Array1::<f32>::linspace(1.0, 2.0, 3);
105        // verify the shape of the layer's parameters
106        assert_eq!(layer.params().shape(), &[3, 2]);
107        // compare the actual output against the expected output
108        let y = layer.forward(&inputs);
109        let exp = Array1::from_elem(2, 0.99185973).tanh();
110        assert!((y - exp).abs().iter().all(|&i| i < 1e-4));
111    }
112}