rust_bert/common/
activations.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
use serde::{Deserialize, Serialize};
use std::f64::consts::PI;
use tch::Tensor;

pub fn _gelu(x: &Tensor) -> Tensor {
    x * 0.5 * (1.0 + (x / ((2.0_f64).sqrt())).erf())
}

pub fn _relu(x: &Tensor) -> Tensor {
    x.relu()
}

pub fn _swish(x: &Tensor) -> Tensor {
    x * x.sigmoid()
}

pub fn _mish(x: &Tensor) -> Tensor {
    x * (x.softplus().tanh())
}

pub fn _gelu_new(x: &Tensor) -> Tensor {
    x * 0.5 * (((x.pow_tensor_scalar(3.0f64) * 0.044715 + x) * ((2f64 / PI).sqrt())).tanh() + 1)
}

pub fn _tanh(x: &Tensor) -> Tensor {
    x.tanh()
}

pub fn _identity(x: &Tensor) -> Tensor {
    x.shallow_clone()
}

pub struct TensorFunction(Box<fn(&Tensor) -> Tensor>);

impl TensorFunction {
    pub fn new(fun: Box<fn(&Tensor) -> Tensor>) -> Self {
        Self(fun)
    }

    pub fn get_fn(&self) -> &fn(&Tensor) -> Tensor {
        &self.0
    }
}
impl std::fmt::Debug for TensorFunction {
    fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
        write!(f, "TensorFunction")
    }
}
#[allow(non_camel_case_types)]
#[derive(Clone, Debug, Serialize, Deserialize, Copy)]
/// # Activation function used in the attention layer and masked language model head
pub enum Activation {
    /// Gaussian Error Linear Unit ([Hendrycks et al., 2016,](https://arxiv.org/abs/1606.08415))
    gelu,
    /// Rectified Linear Unit
    relu,
    /// Swish ([Ramachandran, 2017](https://arxiv.org/abs/1710.05941))
    swish,
    /// Mish ([Misra, 2019](https://arxiv.org/abs/1908.08681))
    mish,
    /// Gaussian Error Linear Unit (New) ([Hendrycks et al., 2016,](https://arxiv.org/abs/1606.08415))
    gelu_new,
    /// Tanh
    tanh,
    /// Identity
    identity,
}

impl Activation {
    pub fn get_function(&self) -> TensorFunction {
        TensorFunction::new(Box::new(match self {
            Activation::gelu => _gelu,
            Activation::relu => _relu,
            Activation::swish => _swish,
            Activation::gelu_new => _gelu_new,
            Activation::mish => _mish,
            Activation::tanh => _tanh,
            Activation::identity => _identity,
        }))
    }
}

#[cfg(test)]
mod test {
    use super::*;
    #[test]
    #[ignore]
    fn tensorfunction_send() {
        let _: Box<dyn Send> = Box::new(Activation::gelu.get_function());
    }
}