[][src]Function autograd::ops::grad

pub fn grad<T, A, B>(ys: &[A], xs: &[B]) -> Vec<Tensor<T>> where
    T: Float,
    A: AsRef<Tensor<T>>,
    B: AsRef<Tensor<T>>, 

Returns gradient tensors wrt input tensors.

Arguments

  • ys - Targets of differentiation that are arbitrary shapes.
  • xs - Tensors with which differentiate ys.

Returns

Symbolic gradient tensors of xs in the same order as xs's.

Example

Partial derivatives of z = 2x^2 + 3y + 1.

extern crate ndarray;
extern crate autograd as ag;

let ref x = ag::placeholder::<f64>(&[]);
let ref y = ag::placeholder::<f64>(&[]);
let ref z = 2.*x*x + 3.*y + 1.;

// dz/dy
let ref gy = ag::grad(&[z], &[y])[0];
// dz/dx
let ref gx = ag::grad(&[z], &[x])[0];

// ddz/dx (differentiates `z` again)
let ref ggx = ag::grad(&[gx], &[x])[0];

// evaluation of symbolic gradients
assert_eq!(3., gy.eval(&[]).unwrap()[ndarray::IxDyn(&[])]);
assert_eq!(4., ggx.eval(&[]).unwrap()[ndarray::IxDyn(&[])]);

// dz/dx requires to fill the placeholder `x`
assert_eq!(8., gx.eval(&[ag::Feed(x, ndarray::arr0(2.).into_dyn().view())]).unwrap()[ndarray::IxDyn(&[])]);

See also grad_with_default.