Function autograd::ops::grad
[−]
[src]
pub fn grad(ys: &[&Tensor], xs: &[&Tensor]) -> Vec<Tensor>
Returns gradient tensors wrt input tensors.
Arguments
ys
- Targets of differentiation.xs
- tensors with which differentiateys
. So the length must be same asys
's.
NOTE: Each objective must be a scalar (0-ranked tensor). For multi dimensional objectives, use grad_with_default.
Returns
Symbolic gradient tensors corresponding to xs
in the same order as xs
Example
Partial derivatives of z = 2x^2 + 3y + 1
.
extern crate ndarray; extern crate autograd as ag; let ref x = ag::placeholder(&[]); let ref y = ag::placeholder(&[]); let ref z = 2*x*x + 3*y + 1; // dz/dy let ref gy = ag::grad(&[z], &[y])[0]; // dz/dx let ref gx = ag::grad(&[z], &[x])[0]; // ddz/dx (differentiates `z` again) let ref ggx = ag::grad(&[gx], &[x])[0]; // evaluation of symbolic gradients let mut ctx = ag::Context::new(); assert_eq!(3., gy.eval(&mut ctx)[ndarray::IxDyn(&[])]); assert_eq!(4., ggx.eval(&mut ctx)[ndarray::IxDyn(&[])]); // dz/dx requires to fill the placeholder `x` ctx.feed_input(x, ndarray::arr0(2.)); assert_eq!(8., gx.eval(&mut ctx)[ndarray::IxDyn(&[])]);