use crate::autograd::{Variable, layer_norm};
use crate::tensor::{Device, DType, Result, Tensor, TensorOptions};
use super::parameter::Parameter;
use super::Module;
pub struct LayerNorm {
pub weight: Parameter, pub bias: Parameter, size: i64,
eps: f64,
}
impl LayerNorm {
pub fn new(size: i64) -> Result<Self> {
Self::on_device(size, Device::CPU)
}
pub fn on_device(size: i64, device: Device) -> Result<Self> {
let opts = TensorOptions { dtype: DType::Float32, device };
let weight = Variable::new(Tensor::ones(&[size], opts)?, true);
let bias = Variable::new(Tensor::zeros(&[size], opts)?, true);
Ok(LayerNorm {
weight: Parameter {
variable: weight,
name: "weight".into(),
},
bias: Parameter {
variable: bias,
name: "bias".into(),
},
size,
eps: 1e-5,
})
}
}
impl Module for LayerNorm {
fn name(&self) -> &str { "layernorm" }
fn forward(&self, input: &Variable) -> Result<Variable> {
layer_norm(input, &self.weight.variable, &self.bias.variable, self.size, self.eps)
}
fn parameters(&self) -> Vec<Parameter> {
vec![self.weight.clone(), self.bias.clone()]
}
}