use super::{
assert_almost_equals, new_backward_input, new_input, new_tensor, Backward, Data, Forward,
Gradient, Overwrite, Rc, TanH, TanHBackward, Tensor,
};
mod forward {
use super::{assert_almost_equals, new_input, new_tensor, Data, Forward, TanH, Tensor};
#[test]
fn creation() {
let input = new_input((3, 3), vec![-4., -3., -2., -1., 0., 1., 2., 3., 4.]);
let node = TanH::new(input);
assert_eq!(*node.data(), Tensor::from_elem((3, 3), 0.));
assert_eq!(*node.data_mut(), Tensor::from_elem((3, 3), 0.));
assert!(!node.was_computed());
}
#[test]
fn computation_was_computed_transition() {
let input = new_input((3, 3), vec![-4., -3., -2., -1., 0., 1., 2., 3., 4.]);
let node = TanH::new(input);
node.forward();
assert!(node.was_computed());
node.forward();
assert!(node.was_computed());
node.reset_computation();
assert!(!node.was_computed());
node.reset_computation();
assert!(!node.was_computed());
}
#[test]
fn forward() {
let input = new_input((3, 3), vec![-4., -3., -2., -1., 0., 1., 2., 3., 4.]);
let node = TanH::new(input.clone());
node.forward();
assert_almost_equals(
&*node.data(),
&new_tensor(
(3, 3),
vec![
-0.99933, -0.99505, -0.96403, -0.76159, 0., 0.76159, 0.96403, 0.99505, 0.99933,
],
),
);
{
let mut data = input.data_mut();
*data = &*data + &Tensor::from_elem(1, 1.);
}
assert_almost_equals(
&*input.data(),
&new_tensor((3, 3), vec![-3., -2., -1., 0., 1., 2., 3., 4., 5.]),
);
node.forward();
assert_almost_equals(
&*node.data(),
&new_tensor(
(3, 3),
vec![
-0.99933, -0.99505, -0.96403, -0.76159, 0., 0.76159, 0.96403, 0.99505, 0.99933,
],
),
);
node.reset_computation();
node.forward();
assert_almost_equals(
&*node.data(),
&new_tensor(
(3, 3),
vec![
-0.99505, -0.96403, -0.76159, 0., 0.76159, 0.96403, 0.99505, 0.99933, 0.999909,
],
),
);
}
#[test]
fn debug() {
let input = new_input((3, 3), vec![1., 2., 3., 4., 5., 6., 7., 8., 9.]);
let node = TanH::new(input.clone());
let output = "TanH { data: [[0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0]], shape=[3, 3], strides=[3, 1], layout=Cc (0x5), const ndim=2, computed: false }";
assert_eq!(output, format!("{:?}", node));
}
#[test]
fn display() {
let input = new_input((3, 3), vec![1., 2., 3., 4., 5., 6., 7., 8., 9.]);
let node = TanH::new(input.clone());
assert_eq!(format!("{}", node.data()), format!("{}", node));
}
}
mod backward {
use super::{
assert_almost_equals, new_backward_input, new_input, new_tensor, Backward, Forward,
Gradient, Overwrite, Rc, TanH, TanHBackward, Tensor,
};
#[test]
fn creation() {
let node = TanHBackward::new(
new_backward_input(3, vec![0.; 3]),
Rc::new(TanH::new(new_input(3, vec![1., 2., 3.]))),
);
assert_eq!(*node.gradient(), Tensor::from_elem(3, 0.));
assert_eq!(*node.gradient_mut(), Tensor::from_elem(3, 0.));
assert!(node.can_overwrite());
}
#[test]
fn computation_state_transition() {
let diff = new_backward_input(3, vec![0.; 3]);
let node = TanHBackward::new(
diff.clone(),
Rc::new(TanH::new(new_input(3, vec![1., 2., 3.]))),
);
node.backward();
assert!(node.can_overwrite());
assert!(!diff.can_overwrite());
node.backward();
assert!(node.can_overwrite());
assert!(!diff.can_overwrite());
diff.set_overwrite(true);
assert!(node.can_overwrite());
assert!(diff.can_overwrite());
diff.set_overwrite(true);
assert!(node.can_overwrite());
assert!(diff.can_overwrite());
node.set_overwrite(false);
assert!(!node.can_overwrite());
assert!(diff.can_overwrite());
node.set_overwrite(false);
assert!(!node.can_overwrite());
assert!(diff.can_overwrite());
node.backward();
assert!(!node.can_overwrite());
assert!(!diff.can_overwrite());
node.backward();
assert!(!node.can_overwrite());
assert!(!diff.can_overwrite());
}
#[test]
fn backward() {
let diff = new_backward_input(3, vec![0.; 3]);
let not_diff = Rc::new(TanH::new(new_input(3, vec![1., 2., 3.])));
not_diff.forward();
let node = TanHBackward::new(diff.clone(), not_diff);
*node.gradient_mut() = new_tensor(3, vec![1.; 3]);
assert_almost_equals(&*node.gradient(), &new_tensor(3, vec![1.; 3]));
node.backward();
assert_almost_equals(
&*diff.gradient(),
&new_tensor(3, vec![0.4199, 0.07065, 0.009865]),
);
node.backward();
assert_almost_equals(
&*diff.gradient(),
&new_tensor(3, vec![0.8398, 0.1413, 0.01973]),
);
diff.set_overwrite(true);
node.backward();
assert_almost_equals(
&*diff.gradient(),
&new_tensor(3, vec![0.4199, 0.07065, 0.009865]),
);
}
#[test]
fn debug() {
let diff = new_backward_input(3, vec![0.; 3]);
let not_diff = Rc::new(TanH::new(new_input(3, vec![1., 2., 3.])));
let node = TanHBackward::new(diff.clone(), not_diff);
let output = "TanHBackward { gradient: Some([0.0, 0.0, 0.0], shape=[3], strides=[1], layout=CFcf (0xf), const ndim=1), overwrite: true }";
assert_eq!(output, format!("{:?}", node));
}
#[test]
fn display() {
let diff = new_backward_input(3, vec![0.; 3]);
let not_diff = Rc::new(TanH::new(new_input(3, vec![1., 2., 3.])));
let node = TanHBackward::new(diff.clone(), not_diff);
assert_eq!(format!("{}", node.gradient()), format!("{}", node));
}
#[test]
fn no_grad() {
let node = TanHBackward::new(
new_backward_input((3, 3), vec![0.; 9]),
new_input((3, 3), vec![0.; 9]),
);
node.no_grad();
assert!(node.gradient.borrow().is_none());
node.with_grad();
assert_eq!(&*node.gradient(), Tensor::zeros(node.shape));
}
}