concision_core/nn/
dropout.rs1#![allow(unused_imports)]
6use crate::Forward;
7use nd::prelude::*;
8use nd::{DataOwned, ScalarOperand};
9#[cfg(feature = "rand")]
10use ndrand::{rand_distr::Bernoulli, RandomExt};
11use num::traits::Num;
12
13#[cfg(feature = "rand")]
14pub fn dropout<A, S, D>(array: &ArrayBase<S, D>, p: f64) -> Array<A, D>
15where
16 A: Num + ScalarOperand,
17 D: Dimension,
18 S: DataOwned<Elem = A>,
19{
20 let distribution = Bernoulli::new(p).unwrap();
22
23 let mask: Array<bool, D> = Array::random(array.dim(), distribution);
25 let mask = mask.mapv(|x| if x { A::zero() } else { A::one() });
26
27 array * mask
29}
30
31pub trait Dropout {
33 type Output;
34
35 fn dropout(&self, p: f64) -> Self::Output;
36}
37
38#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
46#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
47pub struct DropoutLayer {
48 pub(crate) p: f64,
49}
50
51#[cfg(feature = "rand")]
55impl<A, S, D> Dropout for ArrayBase<S, D>
56where
57 A: Num + ScalarOperand,
58 D: Dimension,
59 S: DataOwned<Elem = A>,
60{
61 type Output = Array<A, D>;
62
63 fn dropout(&self, p: f64) -> Self::Output {
64 dropout(self, p)
65 }
66}
67
68impl DropoutLayer {
69 pub fn new(p: f64) -> Self {
70 Self { p }
71 }
72
73 pub fn scale(&self) -> f64 {
74 (1f64 - self.p).recip()
75 }
76}
77
78impl Default for DropoutLayer {
79 fn default() -> Self {
80 Self::new(0.5)
81 }
82}
83
84#[cfg(feature = "rand")]
85impl<A, S, D> Forward<ArrayBase<S, D>> for DropoutLayer
86where
87 A: Num + ScalarOperand,
88 D: Dimension,
89 S: DataOwned<Elem = A>,
90{
91 type Output = Array<A, D>;
92
93 fn forward(&self, input: &ArrayBase<S, D>) -> Self::Output {
94 input.dropout(self.p)
95 }
96}