#[repr(i32)]pub enum ActivationType {
Show 14 variants
kRELU = 0,
kSIGMOID = 1,
kTANH = 2,
kLEAKY_RELU = 3,
kELU = 4,
kSELU = 5,
kSOFTSIGN = 6,
kSOFTPLUS = 7,
kCLIP = 8,
kHARD_SIGMOID = 9,
kSCALED_TANH = 10,
kTHRESHOLDED_RELU = 11,
kGELU_ERF = 12,
kGELU_TANH = 13,
}Expand description
! ! \enum ActivationType ! ! \brief Enumerates the types of activation to perform in an activation layer. !
Variants§
kRELU = 0
!< Rectified linear activation.
kSIGMOID = 1
!< Sigmoid activation.
kTANH = 2
!< TanH activation.
kLEAKY_RELU = 3
!< LeakyRelu activation: x>=0 ? x : alpha * x.
kELU = 4
!< Elu activation: x>=0 ? x : alpha * (exp(x) - 1).
kSELU = 5
!< Selu activation: x>0 ? beta * x : beta * (alpha*exp(x) - alpha)
kSOFTSIGN = 6
!< Softsign activation: x / (1+|x|)
kSOFTPLUS = 7
!< Parametric softplus activation: alphalog(exp(betax)+1)
kCLIP = 8
!< Clip activation: max(alpha, min(beta, x))
kHARD_SIGMOID = 9
!< Hard sigmoid activation: max(0, min(1, alpha*x+beta))
kSCALED_TANH = 10
!< Scaled tanh activation: alphatanh(betax)
kTHRESHOLDED_RELU = 11
!< Thresholded ReLU activation: x>alpha ? x : 0
kGELU_ERF = 12
!< GELU erf activation: 0.5 * x * (1 + erf(sqrt(0.5) * x))
kGELU_TANH = 13
!< GELU tanh activation: 0.5 * x * (1 + tanh(sqrt(2/pi) * (0.044715F * pow(x, 3) + x)))
Trait Implementations§
Source§impl Clone for ActivationType
impl Clone for ActivationType
Source§fn clone(&self) -> ActivationType
fn clone(&self) -> ActivationType
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read more