Skip to main content

burn_nn/activation/
prelu.rs

1use burn::config::Config;
2use burn::module::{Content, DisplaySettings, Initializer, Module, ModuleDisplay, Param};
3use burn::tensor::Tensor;
4use burn::tensor::backend::Backend;
5use burn_core as burn;
6/// Parametric Relu layer.
7///
8/// Should be created using [PReluConfig]
9#[derive(Module, Debug)]
10#[module(custom_display)]
11pub struct PRelu<B: Backend> {
12    /// the weights learnt for PReLu. can be of shape \[1\] or \[num_parameters\] in which case it must
13    /// be the same as number of channels in the input tensor
14    pub alpha: Param<Tensor<B, 1>>,
15
16    /// Alpha value for the PRelu layer
17    pub alpha_value: f64,
18}
19
20impl<B: Backend> ModuleDisplay for PRelu<B> {
21    fn custom_settings(&self) -> Option<DisplaySettings> {
22        DisplaySettings::new()
23            .with_new_line_after_attribute(false)
24            .optional()
25    }
26
27    fn custom_content(&self, content: Content) -> Option<Content> {
28        let [num_parameters] = self.alpha.shape().dims();
29
30        content
31            .add("num_parameters", &num_parameters)
32            .add("alpha_value", &self.alpha_value)
33            .optional()
34    }
35}
36
37/// Configuration to create a [Parametric Relu](PRelu) layer using the [init function](PReluConfig::init).
38#[derive(Config, Debug)]
39pub struct PReluConfig {
40    /// The number of parameters.
41    #[config(default = "1")]
42    pub num_parameters: usize,
43    /// The learnable weight alpha. Default is 0.25
44    #[config(default = "0.25")]
45    pub alpha: f64,
46}
47
48impl PReluConfig {
49    /// Initialize a new [Parametric Relu](PRelu) Layer
50    pub fn init<B: Backend>(&self, device: &B::Device) -> PRelu<B> {
51        PRelu {
52            // alpha is a tensor of length num_parameters
53            alpha: Initializer::Constant { value: self.alpha }.init([self.num_parameters], device),
54            alpha_value: self.alpha,
55        }
56    }
57}
58
59impl<B: Backend> PRelu<B> {
60    /// Applies the forward pass on the input tensor.
61    ///
62    /// # Shapes
63    ///
64    /// - input: `[..., any]`
65    /// - output: `[..., any]`
66    ///
67    /// See also [prelu](burn::tensor::activation::prelu) for more information.
68    pub fn forward<const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
69        burn::tensor::activation::prelu(input, self.alpha.val())
70    }
71}
72
73#[cfg(test)]
74mod tests {
75    use super::*;
76    use crate::TestBackend;
77
78    #[test]
79    fn display() {
80        let layer = PReluConfig::new().init::<TestBackend>(&Default::default());
81
82        assert_eq!(
83            alloc::format!("{layer}"),
84            "PRelu {num_parameters: 1, alpha_value: 0.25, params: 1}"
85        );
86    }
87}