1use candle::{Result, Tensor};
4
5#[derive(Debug, Clone, Copy, PartialEq, serde::Deserialize, serde::Serialize, Default)]
6#[serde(rename_all = "lowercase")]
7pub enum Activation {
8 #[default]
9 #[serde(alias = "gelu")]
10 Gelu,
11 #[serde(alias = "gelu_new")]
12 NewGelu,
13 Relu,
14 Relu2,
15 Relu6,
16 Silu,
17 Sigmoid,
18 HardSigmoid,
19 Swiglu,
20 Swish,
21 HardSwish,
22 Elu(f64),
23 LeakyRelu(f64),
24 #[serde(alias = "gelu_pytorch_tanh")]
25 GeluPytorchTanh,
26}
27
28impl super::Module for Activation {
29 fn forward(&self, xs: &Tensor) -> Result<Tensor> {
30 match self {
31 Self::Gelu => xs.gelu_erf(),
32 Self::NewGelu => xs.gelu(),
34 Self::Relu => xs.relu(),
35 Self::Relu2 => xs.relu()?.sqr(),
36 Self::Relu6 => xs.clamp(0f32, 6f32),
37 Self::Silu => xs.silu(),
38 Self::Sigmoid => crate::ops::sigmoid(xs),
39 Self::HardSigmoid => crate::ops::hard_sigmoid(xs),
40 Self::Swiglu => crate::ops::swiglu(xs),
41 Self::Swish => xs * crate::ops::sigmoid(xs)?,
42 Self::HardSwish => xs * crate::ops::hard_sigmoid(xs)?,
43 &Self::Elu(alpha) => xs.elu(alpha),
44 &Self::LeakyRelu(negative_slope) => crate::ops::leaky_relu(xs, negative_slope),
45 Self::GeluPytorchTanh => xs.gelu(),
46 }
47 }
48}
49
50#[derive(Clone, Debug)]
51pub struct PReLU {
52 weight: Tensor,
53 is_scalar: bool,
54}
55
56impl PReLU {
57 pub fn new(weight: Tensor, is_scalar: bool) -> Self {
58 Self { weight, is_scalar }
59 }
60
61 pub fn weight(&self) -> &Tensor {
62 &self.weight
63 }
64
65 pub fn is_scalar(&self) -> bool {
66 self.is_scalar
67 }
68}
69
70impl candle::Module for PReLU {
71 fn forward(&self, xs: &Tensor) -> Result<Tensor> {
72 let weight = if self.is_scalar {
73 self.weight.reshape(())?
74 } else if xs.shape() == self.weight.shape() {
75 self.weight.clone()
76 } else if xs.rank() >= 2 {
77 let num_channels = xs.dim(1)?;
78 let num_weights = self.weight.elem_count();
79 if num_weights != num_channels {
80 candle::bail!("error in prelu: unexpected number of channels for the input, got {num_channels}, weight dim is {num_weights}")
81 }
82 let mut s = vec![1; xs.rank()];
83 s[1] = num_weights;
84 self.weight.reshape(s)?
85 } else {
86 self.weight.clone()
87 };
88 let zeros = xs.zeros_like()?;
89 xs.maximum(&zeros)? + xs.minimum(&zeros)?.broadcast_mul(&weight)?
90 }
91}
92
93pub fn prelu(num_channels: Option<usize>, vs: crate::VarBuilder) -> Result<PReLU> {
103 let init_ws = crate::init::Init::Const(0.25);
104 let ws = vs.get_with_hints((num_channels.unwrap_or(1),), "weight", init_ws)?;
106 Ok(PReLU::new(ws, num_channels.is_none()))
107}