Skip to main content

burn_nn/activation/
thresholded_relu.rs

1use burn::config::Config;
2use burn::module::Module;
3use burn::module::{Content, DisplaySettings, ModuleDisplay};
4use burn::tensor::Tensor;
5use burn::tensor::backend::Backend;
6use burn_core as burn;
7
8use burn::tensor::activation::thresholded_relu;
9
10/// Thresholded ReLU layer.
11///
12/// Should be created with [ThresholdedReluConfig](ThresholdedReluConfig).
13#[derive(Module, Clone, Debug)]
14#[module(custom_display)]
15pub struct ThresholdedRelu {
16    /// The alpha threshold.
17    pub alpha: f64,
18}
19
20/// Configuration to create a [ThresholdedRelu](ThresholdedRelu) layer using the [init function](ThresholdedReluConfig::init).
21#[derive(Config, Debug)]
22pub struct ThresholdedReluConfig {
23    /// The alpha threshold. Default is 1.0
24    #[config(default = "1.0")]
25    pub alpha: f64,
26}
27
28impl ThresholdedReluConfig {
29    /// Initialize a new [ThresholdedRelu](ThresholdedRelu) layer.
30    pub fn init(&self) -> ThresholdedRelu {
31        ThresholdedRelu { alpha: self.alpha }
32    }
33}
34
35impl ModuleDisplay for ThresholdedRelu {
36    fn custom_settings(&self) -> Option<DisplaySettings> {
37        DisplaySettings::new()
38            .with_new_line_after_attribute(false)
39            .optional()
40    }
41
42    fn custom_content(&self, content: Content) -> Option<Content> {
43        content.add("alpha", &self.alpha).optional()
44    }
45}
46
47impl ThresholdedRelu {
48    /// Forward pass for the Thresholded ReLU layer.
49    ///
50    /// See [thresholded_relu](burn::tensor::activation::thresholded_relu) for more information.
51    ///
52    /// # Shapes
53    /// - input: `[..., any]`
54    /// - output: `[..., any]`
55    pub fn forward<B: Backend, const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
56        thresholded_relu(input, self.alpha)
57    }
58}
59
60#[cfg(test)]
61mod tests {
62    use super::*;
63    use crate::TestBackend;
64    use burn::tensor::TensorData;
65
66    #[test]
67    fn test_thresholded_relu_forward() {
68        let device = Default::default();
69        let model: ThresholdedRelu = ThresholdedReluConfig::new().init();
70        let input =
71            Tensor::<TestBackend, 2>::from_data(TensorData::from([[0.5, 1.5, -0.2]]), &device);
72        let out = model.forward(input);
73        let expected = TensorData::from([[0.0, 1.5, 0.0]]);
74        out.to_data().assert_eq(&expected, false);
75    }
76
77    #[test]
78    fn display() {
79        let config = ThresholdedReluConfig::new().init();
80        assert_eq!(alloc::format!("{config}"), "ThresholdedRelu {alpha: 1}");
81    }
82}