//! # activation functions
//!
//! `activation_functions` is a collection of functions, which can be used as
//! activation function for machine learning
/// containing the activation functions which need an f32 parameter and return a f32
/// calculate the `tanh` to the given f32 number
///
/// # Examples
///
/// ```
/// let x:f32 = 0.5;
/// let answer:f32 = tanh(x);
///
/// println!("tanh({}) => {}",x,answer);
/// ```
/// calculate the `rectified linear unit` to the given f32 number
///
/// # Examples
///
/// ```
/// let x:f32 = 0.5;
/// let answer:f32 = relu(x);
///
/// println!("relu({}) => {}",x,answer);
/// ```
else
}
/// calculate the `sigmoid linear unit` to the given f32 number
///
/// # Examples
///
/// ```
/// let x:f32 = 0.5;
/// let answer:f32 = silu(x);
///
/// println!("silu({}) => {}",x,answer);
/// ```
/// calculate the `gaussian` to the given f32 number
///
/// # Examples
///
/// ```
/// let x:f32 = 0.5;
/// let answer:f32 = gaussian(x);
///
/// println!("gaussian({}) => {}",x,answer);
/// ```
}
/// containing the activation functions which need an f64 parameter and return a f64
/// calculate the `tanh` to the given f64 number
///
/// # Examples
///
/// ```
/// let x:f64 = 0.5;
/// let answer:f64 = tanh(x);
///
/// println!("tanh({}) => {}",x,answer);
/// ```
/// calculate the `rectified linear unit` to the given f64 number
///
/// # Examples
///
/// ```
/// let x:f64 = 0.5;
/// let answer:f64 = relu(x);
///
/// println!("relu({}) => {}",x,answer);
/// ```
else
}
/// calculate the `sigmoid linear unit` to the given f64 number
///
/// # Examples
///
/// ```
/// let x:f64 = 0.5;
/// let answer:f64 = silu(x);
///
/// println!("silu({}) => {}",x,answer);
/// ```
/// calculate the `gaussian` to the given f64 number
///
/// # Examples
///
/// ```
/// let x:f64 = 0.5;
/// let answer:f64 = gaussian(x);
///
/// println!("gaussian({}) => {}",x,answer);
/// ```
}