1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#![warn(missing_docs)]

//! Machine learning, and dynamic automatic differentiation implementation.

#[cfg(feature = "blas")]
extern crate libc;

pub mod numbers;
#[macro_use]
pub mod array;
pub mod activation;
#[cfg(feature = "blas")]
pub mod blas;
pub mod initializer;
pub mod layer;
pub mod layers;
pub mod model;
pub mod optimizer;
pub mod optimizers;

#[cfg(test)]
mod tests {
    use super::*;

    use std::sync::Arc;

    use array::*;
    use numbers::*;

    #[test]
    fn test_op() {
        let op: array::ForwardOp = Arc::new(|x: &[&Array]| {
            Arrays::new((
                x[0].dimensions(),
                x[0].values()
                    .iter()
                    .zip(x[1].values())
                    .map(|(x, y)| x * y)
                    .collect::<Vec<Float>>(),
            ))
        });

        let op_clone = Arc::clone(&op);
        let backward_op: array::BackwardOp = Arc::new(move |c: &mut Vec<Array>, x: &Array| {
            vec![
                Some(Array::op(&vec![&c[1], x], Arc::clone(&op_clone), None)),
                Some(Array::op(&vec![&c[0], x], Arc::clone(&op_clone), None)),
            ]
        });

        let a = arr![1.0, 2.0, 3.0];
        let b = arr![3.0, 2.0, 1.0];
        let mut product = Array::op(&vec![&a, &b], op, Some(backward_op));
        assert_eq!(product, arr![3.0, 4.0, 3.0]);
        product.backward(None);
        assert_eq!(product.gradient().unwrap(), arr![1.0, 1.0, 1.0]);
        assert_eq!(b.gradient().unwrap(), arr![1.0, 2.0, 3.0]);
        assert_eq!(a.gradient().unwrap(), arr![3.0, 2.0, 1.0]);
    }
}