neural_network_rs/neural_network/activation_function/
relu.rs

1use super::ActivationFunction;
2
3// The relu activation function: f(x) = max(0, x)
4pub static RELU: ActivationFunction = ActivationFunction {
5    f: (|x: f64| x.max(0.0)),
6    d: (|x: f64| if x > 0.0 { 1.0 } else { 0.0 }),
7};