1use candle::{Result, Tensor};
4
5#[derive(Debug, Clone, Copy, PartialEq, serde::Deserialize, serde::Serialize, Default)]
6#[serde(rename_all = "lowercase")]
7pub enum Activation {
8 #[default]
9 #[serde(alias = "gelu")]
10 Gelu,
11 #[serde(alias = "gelu_new")]
12 NewGelu,
13 Relu,
14 Relu2,
15 Relu6,
16 Silu,
17 Sigmoid,
18 HardSigmoid,
19 Swiglu,
20 Swish,
21 Mish,
22 HardSwish,
23 Elu(f64),
24 LeakyRelu(f64),
25 #[serde(alias = "gelu_pytorch_tanh")]
26 GeluPytorchTanh,
27}
28
29impl super::Module for Activation {
30 fn forward(&self, xs: &Tensor) -> Result<Tensor> {
31 match self {
32 Self::Gelu => xs.gelu_erf(),
33 Self::NewGelu => xs.gelu(),
35 Self::Relu => xs.relu(),
36 Self::Relu2 => xs.relu()?.sqr(),
37 Self::Relu6 => xs.clamp(0f32, 6f32),
38 Self::Silu => xs.silu(),
39 Self::Sigmoid => crate::ops::sigmoid(xs),
40 Self::HardSigmoid => crate::ops::hard_sigmoid(xs),
41 Self::Swiglu => crate::ops::swiglu(xs),
42 Self::Swish => xs * crate::ops::sigmoid(xs)?,
43 Self::HardSwish => xs * crate::ops::hard_sigmoid(xs)?,
44 Self::Mish => crate::ops::mish(xs),
45 &Self::Elu(alpha) => xs.elu(alpha),
46 &Self::LeakyRelu(negative_slope) => crate::ops::leaky_relu(xs, negative_slope),
47 Self::GeluPytorchTanh => xs.gelu(),
48 }
49 }
50}
51
52#[derive(Clone, Debug)]
53pub struct PReLU {
54 weight: Tensor,
55 is_scalar: bool,
56}
57
58impl PReLU {
59 pub fn new(weight: Tensor, is_scalar: bool) -> Self {
60 Self { weight, is_scalar }
61 }
62
63 pub fn weight(&self) -> &Tensor {
64 &self.weight
65 }
66
67 pub fn is_scalar(&self) -> bool {
68 self.is_scalar
69 }
70}
71
72impl candle::Module for PReLU {
73 fn forward(&self, xs: &Tensor) -> Result<Tensor> {
74 let weight = if self.is_scalar {
75 self.weight.reshape(())?
76 } else if xs.shape() == self.weight.shape() {
77 self.weight.clone()
78 } else if xs.rank() >= 2 {
79 let num_channels = xs.dim(1)?;
80 let num_weights = self.weight.elem_count();
81 if num_weights != num_channels {
82 candle::bail!("error in prelu: unexpected number of channels for the input, got {num_channels}, weight dim is {num_weights}")
83 }
84 let mut s = vec![1; xs.rank()];
85 s[1] = num_weights;
86 self.weight.reshape(s)?
87 } else {
88 self.weight.clone()
89 };
90 let zeros = xs.zeros_like()?;
91 xs.maximum(&zeros)? + xs.minimum(&zeros)?.broadcast_mul(&weight)?
92 }
93}
94
95pub fn prelu(num_channels: Option<usize>, vs: crate::VarBuilder) -> Result<PReLU> {
105 let init_ws = crate::init::Init::Const(0.25);
106 let ws = vs.get_with_hints((num_channels.unwrap_or(1),), "weight", init_ws)?;
108 Ok(PReLU::new(ws, num_channels.is_none()))
109}