opensrdk_kernel_method/neural_network/relu.rs
1use super::ActivationFunction;
2use std::f64::consts::PI;
3
4#[derive(Clone, Debug)]
5pub struct ReLU;
6
7impl ActivationFunction for ReLU {
8 fn f(&self, previous_layer_kernel: (f64, f64, f64)) -> f64 {
9 let sqrt = (previous_layer_kernel.1 * previous_layer_kernel.2).sqrt();
10 let theta = (previous_layer_kernel.0 / sqrt).acos();
11
12 sqrt * (theta.sin() + (PI - theta) * theta.cos()) / 2.0 * PI
13 }
14}