use burn_core as burn;
use burn::module::Module;
use burn::tensor::Tensor;
use burn::tensor::backend::Backend;
#[derive(Module, Clone, Debug, Default)]
pub struct GLU {
dim: usize,
}
impl GLU {
pub fn new(dim: usize) -> Self {
Self { dim }
}
pub fn forward<B: Backend, const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
burn::tensor::activation::glu(input, self.dim)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn display() {
let layer = GLU::new(1);
assert_eq!(alloc::format!("{layer}"), "GLU {\n dim: 1\n}");
}
}