1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
use ndarray::{ArrayD, ArrayViewD};
use nodes::Operation;

/// Elementwise Activation, which could be leaky relu, sigmoid or tanh
#[derive(Debug, Serialize, Deserialize)]
pub enum Activation {
    Relu { leak: f32 },
    Sigmoid,
    Tanh,
}

impl Operation for Activation {
    fn eval(&self, inputs: &[ArrayViewD<f32>]) -> ArrayD<f32> {
        assert_eq!(inputs.len(), 1, "Activation accepts one input");
        match self {
            Activation::Relu { leak } => inputs[0].mapv(|x| if x > 0.0 { x } else { x * leak }),
            Activation::Sigmoid => inputs[0].mapv(sig),
            Activation::Tanh => inputs[0].mapv(f32::tanh),
        }
    }
    fn grad(&self, inputs: &[ArrayViewD<f32>], loss: ArrayViewD<f32>) -> Vec<ArrayD<f32>> {
        assert_eq!(inputs.len(), 1, "Activation accepts one input");

        let mut res = loss.to_owned();
        match self {
            Activation::Relu { leak } => {
                res.zip_mut_with(&inputs[0], |l, i| {
                    if *i < 0.0 {
                        *l *= leak
                    }
                });
            }
            Activation::Sigmoid => {
                res.zip_mut_with(&inputs[0], |l, i| {
                    let s = sig(*i);
                    *l *= s * (1.0 - s);
                });
            }
            Activation::Tanh => {
                res.zip_mut_with(&inputs[0], |l, i| {
                    *l *= 1.0 - i.tanh().powi(2);
                });
            }
        }
        vec![res]
    }
}
fn sig(x: f32) -> f32 {
    1.0 / (1.0 + (-x).exp())
}