ghostflow_nn/
activation.rs

1//! Activation function modules
2
3use ghostflow_core::Tensor;
4use crate::module::Module;
5
6/// ReLU activation module
7pub struct ReLU;
8
9impl ReLU {
10    pub fn new() -> Self { ReLU }
11}
12
13impl Default for ReLU {
14    fn default() -> Self { Self::new() }
15}
16
17impl Module for ReLU {
18    fn forward(&self, input: &Tensor) -> Tensor {
19        input.relu()
20    }
21    fn parameters(&self) -> Vec<Tensor> { vec![] }
22    fn train(&mut self) {}
23    fn eval(&mut self) {}
24    fn is_training(&self) -> bool { false }
25}
26
27/// Leaky ReLU activation module
28pub struct LeakyReLU {
29    alpha: f32,
30}
31
32impl LeakyReLU {
33    pub fn new(alpha: f32) -> Self {
34        LeakyReLU { alpha }
35    }
36}
37
38impl Default for LeakyReLU {
39    fn default() -> Self { Self::new(0.01) }
40}
41
42impl Module for LeakyReLU {
43    fn forward(&self, input: &Tensor) -> Tensor {
44        input.leaky_relu(self.alpha)
45    }
46    fn parameters(&self) -> Vec<Tensor> { vec![] }
47    fn train(&mut self) {}
48    fn eval(&mut self) {}
49    fn is_training(&self) -> bool { false }
50}
51
52/// GELU activation module
53pub struct GELU;
54
55impl GELU {
56    pub fn new() -> Self { GELU }
57}
58
59impl Default for GELU {
60    fn default() -> Self { Self::new() }
61}
62
63impl Module for GELU {
64    fn forward(&self, input: &Tensor) -> Tensor {
65        input.gelu()
66    }
67    fn parameters(&self) -> Vec<Tensor> { vec![] }
68    fn train(&mut self) {}
69    fn eval(&mut self) {}
70    fn is_training(&self) -> bool { false }
71}
72
73/// Sigmoid activation module
74pub struct Sigmoid;
75
76impl Sigmoid {
77    pub fn new() -> Self { Sigmoid }
78}
79
80impl Default for Sigmoid {
81    fn default() -> Self { Self::new() }
82}
83
84impl Module for Sigmoid {
85    fn forward(&self, input: &Tensor) -> Tensor {
86        input.sigmoid()
87    }
88    fn parameters(&self) -> Vec<Tensor> { vec![] }
89    fn train(&mut self) {}
90    fn eval(&mut self) {}
91    fn is_training(&self) -> bool { false }
92}
93
94/// Tanh activation module
95pub struct Tanh;
96
97impl Tanh {
98    pub fn new() -> Self { Tanh }
99}
100
101impl Default for Tanh {
102    fn default() -> Self { Self::new() }
103}
104
105impl Module for Tanh {
106    fn forward(&self, input: &Tensor) -> Tensor {
107        input.tanh()
108    }
109    fn parameters(&self) -> Vec<Tensor> { vec![] }
110    fn train(&mut self) {}
111    fn eval(&mut self) {}
112    fn is_training(&self) -> bool { false }
113}
114
115/// SiLU/Swish activation module
116pub struct SiLU;
117
118impl SiLU {
119    pub fn new() -> Self { SiLU }
120}
121
122impl Default for SiLU {
123    fn default() -> Self { Self::new() }
124}
125
126impl Module for SiLU {
127    fn forward(&self, input: &Tensor) -> Tensor {
128        input.silu()
129    }
130    fn parameters(&self) -> Vec<Tensor> { vec![] }
131    fn train(&mut self) {}
132    fn eval(&mut self) {}
133    fn is_training(&self) -> bool { false }
134}
135
136/// Softmax activation module
137pub struct Softmax {
138    dim: i32,
139}
140
141impl Softmax {
142    pub fn new(dim: i32) -> Self {
143        Softmax { dim }
144    }
145}
146
147impl Default for Softmax {
148    fn default() -> Self { Self::new(-1) }
149}
150
151impl Module for Softmax {
152    fn forward(&self, input: &Tensor) -> Tensor {
153        input.softmax(self.dim)
154    }
155    fn parameters(&self) -> Vec<Tensor> { vec![] }
156    fn train(&mut self) {}
157    fn eval(&mut self) {}
158    fn is_training(&self) -> bool { false }
159}