#[cfg_attr(feature = "serde", macro_use)]
#[cfg(feature = "serde")]
extern crate serde_crate as serde;
#[cfg(test)]
#[macro_use(quickcheck)]
extern crate quickcheck;
mod macros;
mod utils;
#[macro_use]
mod error;
mod features;
pub use self::features::*;
#[macro_use]
pub mod basis;
pub mod optim;
pub use self::error::*;
pub type ScalarLFA<B, O> = LFA<B, ndarray::Array1<f64>, O>;
pub type VectorLFA<B, O> = LFA<B, ndarray::Array2<f64>, O>;
#[derive(Clone, Debug)]
#[cfg_attr(
feature = "serde",
derive(Serialize, Deserialize),
serde(crate = "serde_crate")
)]
pub struct LFA<B, W, O = optim::SGD> {
pub basis: B,
pub weights: W,
pub optimiser: O,
}
impl<B, W, O> LFA<B, W, O> {
pub fn new(basis: B, weights: W, optimiser: O) -> Self {
LFA {
basis,
weights,
optimiser,
}
}
}
impl<B, O> ScalarLFA<B, O> {
pub fn scalar(basis: B, optimiser: O) -> Self
where B: spaces::Space {
let n: usize = basis.dim().into();
let weights = ndarray::Array1::zeros(n);
LFA {
basis,
weights,
optimiser,
}
}
pub fn evaluate<I>(&self, input: I) -> Result<f64>
where B: basis::Basis<I, Value = Features> {
self.basis
.project(input)
.map(|b| b.dot(&self.weights.view()))
}
pub fn update<I>(&mut self, input: I, error: f64) -> Result<()>
where
B: basis::Basis<I, Value = Features>,
O: optim::Optimiser,
{
self.basis.project(input).and_then(|ref b| {
self.optimiser
.step_scaled(&mut self.weights.view_mut(), b, error)
})
}
pub fn update_with<I>(&mut self, input: I, f: impl Fn(&Features, f64) -> f64) -> Result<()>
where
B: basis::Basis<I, Value = Features>,
O: optim::Optimiser,
{
self.basis
.project(input)
.map(|b| (b.dot(&self.weights.view()), b))
.map(|(v, b)| (f(&b, v), b))
.and_then(|(e, b)| {
self.optimiser
.step_scaled(&mut self.weights.view_mut(), &b, e)
})
}
}
pub struct OutputIter<'a> {
basis: Features,
lanes: ndarray::iter::LanesIter<'a, f64, ndarray::Ix1>,
}
impl<'a> Iterator for OutputIter<'a> {
type Item = f64;
fn next(&mut self) -> Option<Self::Item> { self.lanes.next().map(|ref c| self.basis.dot(c)) }
fn size_hint(&self) -> (usize, Option<usize>) { self.lanes.size_hint() }
}
impl<'a> ExactSizeIterator for OutputIter<'a> {
fn len(&self) -> usize { self.lanes.len() }
}
impl<B, O> VectorLFA<B, O> {
pub fn vector(basis: B, optimiser: O, n_outputs: usize) -> Self
where B: spaces::Space {
let n: usize = basis.dim().into();
let weights = ndarray::Array2::zeros((n, n_outputs));
LFA {
basis,
weights,
optimiser,
}
}
pub fn n_outputs(&self) -> usize { self.weights.ncols() }
pub fn evaluate<I>(&self, input: I) -> Result<ndarray::Array1<f64>>
where B: basis::Basis<I, Value = Features> {
self.try_iter(input).map(|it| it.collect())
}
pub fn evaluate_index<I>(&self, input: I, index: usize) -> Result<f64>
where B: basis::Basis<I, Value = Features> {
self.basis
.project(input)
.map(|b| b.dot(&self.weights.column(index)))
}
pub fn iter<'a, I>(&'a self, input: I) -> OutputIter<'a>
where B: basis::Basis<I, Value = Features> {
OutputIter {
basis: self.basis.project(input).unwrap(),
lanes: self.weights.gencolumns().into_iter(),
}
}
pub fn try_iter<'a, I>(&'a self, input: I) -> Result<OutputIter<'a>>
where B: basis::Basis<I, Value = Features> {
self.basis.project(input).map(move |basis| OutputIter {
basis,
lanes: self.weights.gencolumns().into_iter(),
})
}
pub fn update<I, E>(&mut self, input: I, errors: E) -> Result<()>
where
B: basis::Basis<I, Value = Features>,
O: optim::Optimiser,
E: IntoIterator<Item = f64>,
{
self.basis.project(input).and_then(|ref b| {
let opt = &mut self.optimiser;
errors
.into_iter()
.zip(self.weights.gencolumns_mut().into_iter())
.fold(Ok(()), |acc, (e, mut c)| {
acc.and(opt.step_scaled(&mut c, b, e))
})
})
}
pub fn update_index<I>(&mut self, input: I, index: usize, error: f64) -> Result<()>
where
B: basis::Basis<I, Value = Features>,
O: optim::Optimiser,
{
self.basis.project(input).and_then(|ref b| {
self.optimiser
.step_scaled(&mut self.weights.column_mut(index), b, error)
})
}
pub fn update_with<I>(
&mut self,
input: I,
f: impl Fn(&Features, ndarray::Array1<f64>) -> ndarray::Array1<f64>,
) -> Result<()>
where
B: basis::Basis<I, Value = Features>,
O: optim::Optimiser,
{
self.basis.project(input).and_then(|ref b| {
let opt = &mut self.optimiser;
let values = b.matmul(&self.weights);
let errors = f(b, values).into_raw_vec();
errors
.into_iter()
.zip(self.weights.gencolumns_mut().into_iter())
.fold(Ok(()), |acc, (e, mut c)| {
acc.and(opt.step_scaled(&mut c, b, e))
})
})
}
}