1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
use crate as burn;
use crate::module::Module;
use crate::tensor::backend::Backend;
use crate::tensor::Tensor;
/// Applies the rectified linear unit function element-wise
/// See also [relu](burn::tensor::activation::relu)
///
#[derive(Module, Clone, Debug, Default)]
pub struct Relu;
impl Relu {
/// Create the module.
pub fn new() -> Self {
Self {}
}
/// Applies the forward pass on the input tensor.
///
/// # Shapes
///
/// - input: `[..., any]`
/// - output: `[..., any]`
pub fn forward<B: Backend, const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
crate::tensor::activation::relu(input)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn display() {
let layer = Relu::new();
assert_eq!(alloc::format!("{}", layer), "Relu");
}
}