concision_neural/types/
dropout.rs

1/*
2    Appellation: dropout <module>
3    Contrib: FL03 <jo3mccain@icloud.com>
4*/
5
6/// The [Dropout] layer is randomly zeroizes inputs with a given probability (`p`).
7/// This regularization technique is often used to prevent overfitting.
8///
9///
10/// ### Config
11///
12/// - (p) Probability of dropping an element
13#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
14#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
15pub struct Dropout {
16    pub(crate) p: f64,
17}
18
19impl Dropout {
20    pub fn new(p: f64) -> Self {
21        Self { p }
22    }
23
24    pub fn scale(&self) -> f64 {
25        (1f64 - self.p).recip()
26    }
27}
28
29impl Default for Dropout {
30    fn default() -> Self {
31        Self::new(0.5)
32    }
33}
34
35#[cfg(feature = "rand")]
36impl<U> cnc::Forward<U> for Dropout
37where
38    U: cnc::DropOut,
39{
40    type Output = <U as cnc::DropOut>::Output;
41
42    fn forward(&self, input: &U) -> cnc::Result<Self::Output> {
43        Ok(input.dropout(self.p))
44    }
45}