Skip to main content

newron/layers/
sigmoid.rs

1use crate::layers::layer::Layer;
2use crate::tensor::Tensor;
3use crate::layers::layer::LearnableParams;
4
5pub struct Sigmoid {
6    input: Tensor
7}
8
9impl Layer for Sigmoid {
10    fn get_info(&self) -> String {
11        format!("Sigmoid Layer")
12    }
13
14    fn forward(&mut self, input: Tensor, _training: bool) -> Tensor {
15        self.input = input;
16        self.input.map(|x| Sigmoid::sigmoid(x))
17    }
18
19    fn backward(&mut self, gradient: &Tensor) -> Tensor {
20        let tanh_grad = self.input.map(|x| Sigmoid::sigmoid_prime(x));
21        gradient.mult_el(&tanh_grad)
22    }
23
24    fn get_params_list(&self) -> Vec<LearnableParams> {
25        vec![]
26    }
27    
28    fn get_grad(&self, _param: &LearnableParams) -> &Tensor {
29        panic!("Layer does not have learnable parameters.")
30    }
31
32    fn get_param(&mut self, _param: &LearnableParams) -> &mut Tensor {
33        panic!("Layer does not have learnable parameters.")
34
35    }
36}
37
38impl Sigmoid {
39    pub fn sigmoid(x: f64) -> f64 {
40        1. / (1. + (-x).exp())
41    }
42
43    pub fn sigmoid_prime(x: f64) -> f64 {
44        Self::sigmoid(x) * (1. - Self::sigmoid(x))
45    }
46
47    pub fn new() -> Sigmoid {
48        Sigmoid {
49            input: Tensor::new(vec![], vec![])
50        }
51    }
52
53}