concision_core/nn/layer/impl_layer.rs
1/*
2 appellation: impl_layer <module>
3 authors: @FL03
4*/
5use crate::activate::Activator;
6use crate::nn::layer::LayerBase;
7use concision_params::RawParams;
8use concision_traits::Forward;
9
10impl<F, P, A> LayerBase<F, P>
11where
12 P: RawParams<Elem = A>,
13{
14 /// create a new [`LayerBase`] from the given activation function and parameters.
15 pub const fn new(rho: F, params: P) -> Self {
16 Self { rho, params }
17 }
18 /// create a new [`LayerBase`] from the given parameters assuming the logical default for
19 /// the activation of type `F`.
20 pub fn from_params(params: P) -> Self
21 where
22 F: Default,
23 {
24 Self::new(<F>::default(), params)
25 }
26 /// create a new [`LayerBase`] from the given activation function and shape.
27 pub fn from_rho<Sh>(rho: F) -> Self
28 where
29 P: Default,
30 {
31 Self::new(rho, <P>::default())
32 }
33 /// returns an immutable reference to the layer's parameters
34 pub const fn params(&self) -> &P {
35 &self.params
36 }
37 /// returns a mutable reference to the layer's parameters
38 pub const fn params_mut(&mut self) -> &mut P {
39 &mut self.params
40 }
41 /// returns an immutable reference to the activation function of the layer
42 pub const fn rho(&self) -> &F {
43 &self.rho
44 }
45 /// returns a mutable reference to the activation function of the layer
46 pub const fn rho_mut(&mut self) -> &mut F {
47 &mut self.rho
48 }
49 #[inline]
50 /// consumes the current instance and returns another with the given parameters.
51 pub fn with_params<Y>(self, params: Y) -> LayerBase<F, Y>
52 where
53 F: Activator<Y>,
54 {
55 LayerBase {
56 rho: self.rho,
57 params,
58 }
59 }
60 #[inline]
61 /// consumes the current instance and returns another with the given activation function.
62 /// This is useful during the creation of the model, when the activation function is not known yet.
63 pub fn with_rho<G>(self, rho: G) -> LayerBase<G, P>
64 where
65 G: Activator<P>,
66 {
67 LayerBase {
68 rho,
69 params: self.params,
70 }
71 }
72 #[inline]
73 /// apply the configured activation function onto some input, producing some output
74 pub fn activate<X, Y>(&self, input: X) -> Y
75 where
76 F: Activator<X, Output = Y>,
77 {
78 self.rho().activate(input)
79 }
80 /// given some input, complete a single forward pass through the layer
81 pub fn forward<U, V>(&self, input: &U) -> V
82 where
83 Self: Forward<U, Output = V>,
84 {
85 <Self as Forward<U>>::forward(self, input)
86 }
87}