burn_nn/activation/
prelu.rs1use burn::config::Config;
2use burn::module::{Content, DisplaySettings, Initializer, Module, ModuleDisplay, Param};
3use burn::tensor::Tensor;
4use burn::tensor::backend::Backend;
5use burn_core as burn;
6#[derive(Module, Debug)]
10#[module(custom_display)]
11pub struct PRelu<B: Backend> {
12 pub alpha: Param<Tensor<B, 1>>,
15
16 pub alpha_value: f64,
18}
19
20impl<B: Backend> ModuleDisplay for PRelu<B> {
21 fn custom_settings(&self) -> Option<DisplaySettings> {
22 DisplaySettings::new()
23 .with_new_line_after_attribute(false)
24 .optional()
25 }
26
27 fn custom_content(&self, content: Content) -> Option<Content> {
28 let [num_parameters] = self.alpha.shape().dims();
29
30 content
31 .add("num_parameters", &num_parameters)
32 .add("alpha_value", &self.alpha_value)
33 .optional()
34 }
35}
36
37#[derive(Config, Debug)]
39pub struct PReluConfig {
40 #[config(default = "1")]
42 pub num_parameters: usize,
43 #[config(default = "0.25")]
45 pub alpha: f64,
46}
47
48impl PReluConfig {
49 pub fn init<B: Backend>(&self, device: &B::Device) -> PRelu<B> {
51 PRelu {
52 alpha: Initializer::Constant { value: self.alpha }.init([self.num_parameters], device),
54 alpha_value: self.alpha,
55 }
56 }
57}
58
59impl<B: Backend> PRelu<B> {
60 pub fn forward<const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
69 burn::tensor::activation::prelu(input, self.alpha.val())
70 }
71}
72
73#[cfg(test)]
74mod tests {
75 use super::*;
76 use crate::TestBackend;
77
78 #[test]
79 fn display() {
80 let layer = PReluConfig::new().init::<TestBackend>(&Default::default());
81
82 assert_eq!(
83 alloc::format!("{layer}"),
84 "PRelu {num_parameters: 1, alpha_value: 0.25, params: 1}"
85 );
86 }
87}