use crate as burn;
use crate::config::Config;
use crate::module::Param;
use crate::module::{Content, DisplaySettings, Module, ModuleDisplay};
use crate::nn::Initializer;
use crate::tensor::backend::Backend;
use crate::tensor::Tensor;
#[derive(Module, Debug)]
#[module(custom_display)]
pub struct PRelu<B: Backend> {
pub alpha: Param<Tensor<B, 1>>,
pub alpha_value: f64,
}
impl<B: Backend> ModuleDisplay for PRelu<B> {
fn custom_settings(&self) -> Option<DisplaySettings> {
DisplaySettings::new()
.with_new_line_after_attribute(false)
.optional()
}
fn custom_content(&self, content: Content) -> Option<Content> {
let [num_parameters] = self.alpha.shape().dims;
content
.add("num_parameters", &num_parameters)
.add("alpha_value", &self.alpha_value)
.optional()
}
}
#[derive(Config, Debug)]
pub struct PReluConfig {
#[config(default = "1")]
pub num_parameters: usize,
#[config(default = "0.25")]
pub alpha: f64,
}
impl PReluConfig {
pub fn init<B: Backend>(&self, device: &B::Device) -> PRelu<B> {
PRelu {
alpha: Initializer::Constant { value: self.alpha }.init([self.num_parameters], device),
alpha_value: self.alpha,
}
}
}
impl<B: Backend> PRelu<B> {
pub fn forward<const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
crate::tensor::activation::prelu(input, self.alpha.val())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::TestBackend;
#[test]
fn display() {
let layer = PReluConfig::new().init::<TestBackend>(&Default::default());
assert_eq!(
alloc::format!("{}", layer),
"PRelu {num_parameters: 1, alpha_value: 0.25, params: 1}"
);
}
}