neural_network_rs/neural_network/activation_function/
sigmoid.rs

1use super::ActivationFunction;
2
3// The sigmoid activation function: f(x) = 1 / (1 + e^(-x))
4pub static SIGMOID: ActivationFunction = ActivationFunction {
5    f: |x| {
6        if x > 45.0 {
7            1.0
8        } else if x < -45.0 {
9            0.0
10        } else {
11            1.0 / (1.0 + (-x).exp())
12        }
13    },
14
15    d: |x| {
16        let x = (SIGMOID.f)(x);
17        x * (1.0 - x)
18    },
19};