concision_neural/types/
dropout.rs

1/*
2    Appellation: dropout <module>
3    Contrib: FL03 <jo3mccain@icloud.com>
4*/
5use concision_core::DropOut;
6
7/// The [Dropout] layer is randomly zeroizes inputs with a given probability (`p`).
8/// This regularization technique is often used to prevent overfitting.
9///
10///
11/// ### Config
12///
13/// - (p) Probability of dropping an element
14#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
15#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
16pub struct Dropout {
17    pub(crate) p: f64,
18}
19
20/*
21 ************* Implementations *************
22*/
23
24impl Dropout {
25    pub fn new(p: f64) -> Self {
26        Self { p }
27    }
28
29    pub fn scale(&self) -> f64 {
30        (1f64 - self.p).recip()
31    }
32}
33
34impl Default for Dropout {
35    fn default() -> Self {
36        Self::new(0.5)
37    }
38}
39
40#[cfg(feature = "rand")]
41mod impls {
42    use super::*;
43
44    use concision_core::{Forward, init::InitializeExt};
45    use ndarray::{Array, ArrayBase, DataOwned, Dimension, ScalarOperand};
46    use num::traits::Num;
47
48    pub(crate) fn _dropout<S, A, D>(array: &ArrayBase<S, D>, p: f64) -> Array<A, D>
49    where
50        A: Num + ScalarOperand,
51        D: Dimension,
52        S: DataOwned<Elem = A>,
53    {
54        // Create a mask of the same shape as the input array
55        let mask: ndarray::Array<bool, D> =
56            ndarray::Array::bernoulli(array.dim(), p).expect("Failed to create mask");
57        let mask = mask.mapv(|x| if x { A::zero() } else { A::one() });
58
59        // Element-wise multiplication to apply dropout
60        array.to_owned() * mask
61    }
62
63    impl Dropout {
64        pub fn apply<A, S, D>(&self, input: &ArrayBase<S, D>) -> Array<A, D>
65        where
66            A: Num + ScalarOperand,
67            D: Dimension,
68            S: DataOwned<Elem = A>,
69        {
70            _dropout(input, self.p)
71        }
72    }
73
74    impl<A, S, D> Forward<ArrayBase<S, D>> for Dropout
75    where
76        A: Num + ScalarOperand,
77        D: Dimension,
78        S: DataOwned<Elem = A>,
79    {
80        type Output = Array<A, D>;
81
82        fn forward(&self, input: &ArrayBase<S, D>) -> Result<Self::Output, concision_core::Error> {
83            let res = input.dropout(self.p);
84            Ok(res)
85        }
86    }
87}