[][src]Function autograd::ops::batch_norm

pub fn batch_norm<A, B, C, T>(x: A, scale: B, shift: C) -> Tensor<T> where
    T: Float,
    A: AsRef<Tensor<T>>,
    B: AsRef<Tensor<T>>,
    C: AsRef<Tensor<T>>, 

Applies batch normalization.

scale and shift should be shared variables. Since normalization is performed along 1st axis of x, both of them should have shape (1, x.shape[1])

extern crate ndarray;
extern crate autograd as ag;

let ref x = ag::standard_normal(&[3, 4]);
let ref scale = ag::variable(ag::ndarray_ext::ones::<f32>(&[1, 4]));
let ref shift = ag::variable(ag::ndarray_ext::zeros::<f32>(&[1, 4]));
let ref norm = ag::batch_norm(x, scale, shift);

assert_eq!(norm.eval(&[]).unwrap().shape(), &[3, 4]);