use af;
use af::{Array, MatProp};
use std::sync::{Arc, Mutex};
use activations;
use params::Params;
use layer::Layer;
pub struct Dense {
pub input_size: usize,
pub output_size: usize,
}
impl Layer for Dense
{
fn forward(&self, params: Arc<Mutex<Params>>, inputs: &Array)-> Array
{
let mut ltex = params.lock().unwrap();
let wx = af::matmul(&inputs , <ex.weights[0] , MatProp::NONE
, MatProp::NONE);
let z_t = af::transpose(&af::add(&af::transpose(&wx, false)
, <ex.biases[0], true), false);
let a_t = activations::get_activation(<ex.activations[0], &z_t).unwrap();
let current_unroll = ltex.current_unroll;
if ltex.inputs.len() > current_unroll { ltex.inputs[current_unroll] = inputs.clone();
ltex.outputs[current_unroll] = a_t.clone();
}else{ ltex.inputs.push(inputs.clone());
ltex.outputs.push(a_t.clone());
}
ltex.current_unroll += 1;
a_t.clone() }
fn backward(&self, params: Arc<Mutex<Params>>, delta: &Array) -> Array {
let mut ltex = params.lock().unwrap();
let current_unroll = ltex.current_unroll;
assert!(current_unroll > 0
, "Cannot call backward pass without at least 1 forward pass");
let dz = activations::get_derivative(<ex.activations[0]
, <ex.outputs[current_unroll - 1]).unwrap();
let delta_t = af::mul(delta, &dz, false);
let dw = af::matmul(<ex.inputs[current_unroll - 1]
, &delta_t , af::MatProp::TRANS
, af::MatProp::NONE);
let db = af::transpose(&af::sum(&delta_t, 0), false);
ltex.deltas[0] = af::add(<ex.deltas[0], &dw, false);
ltex.deltas[1] = af::add(<ex.deltas[1], &db, false);
ltex.current_unroll -= 1;
af::matmul(&delta_t, <ex.weights[0], af::MatProp::NONE, af::MatProp::TRANS)
}
}