/// The sigmoid activation function. Scales all values nonlinearly to the range (0, 1).
pubfnsigmoid(n:f32)->f32{1./(1.+std::f32::consts::E.powf(-n))}/// The ReLU activation function. Equal to `n.max(0)`
pubfnrelu(n:f32)->f32{
n.max(0.)}/// Activation function that does nothing.
pubfnlinear_activation(n:f32)->f32{
n
}