hal-ml 0.2.0

HAL: a machine learning library that is able to run on Nvidia, OpenCL or CPU BLAS based compute backends. It currently provides stackable classical neural networks, RNN's and soon to be LSTM's. A differentiation of this package is that we are looking to implement RTRL (instead of just BPTT) for the recurrent layers in order to provide a solid framework for online learning. We will also (in the future) be implementing various layers such as unitary RNN's, NTM's and Adaptive Computation time based LSTM's. HAL also comes with the ability to plot and do many basic math operations on arrays.
pub use self::sgd::SGD;
mod sgd;

pub use self::adam::Adam;
mod adam;

use af;
use af::{Array, Dim4, NormType};
use std::collections::HashMap;

use utils;
use error::HALError;
use params::ParamManager;

pub trait Optimizer {
  fn new(params: &HashMap<&str, &str>) -> Self where Self: Sized;
  //fn setup(&mut self, w_dim: Vec<Dim4>, b_dim: Vec<Dim4>);
  fn setup(&mut self, dims: Vec<Dim4>);
  fn update(&mut self, parameter_manager: &mut ParamManager, batch_size: u64);
  fn info(&self);
}

pub fn get_optimizer(name: &str, params: &HashMap<&str, &str>) -> Result<Box<Optimizer>, HALError>{
  match name.to_lowercase().as_str() {
    "sgd"  => Ok(Box::new(SGD::new(params))),
    "adam" => Ok(Box::new(Adam::new(params))),
    _     => Err(HALError::UNKNOWN),
  }
}

pub fn get_optimizer_with_defaults(name: &str) -> Result<Box<Optimizer>, HALError>{
  match name.to_lowercase().as_str() {
    "sgd" =>  Ok(Box::new(SGD::default())),
    "adam" => Ok(Box::new(Adam::default())),
    _     => Err(HALError::UNKNOWN),
  }
}

pub fn clip_grads(input: &Array, rescale: f32) -> Array {
  let norm = af::norm(input, NormType::VECTOR_2, 0f64, 0f64) as f32;
  let scale = rescale / norm.max(rescale);
  utils::cast(&af::mul(input, &scale, false), input.get_type())
}