use std::collections::HashMap;
use nalgebra::{Dynamic, RealField, U1, VectorN, MatrixMN, MatrixN, Dim, DefaultAllocator, DimName};
use nalgebra::storage::Storage;
use bayes_estimate::estimators::sir;
use bayes_estimate::estimators::sir::SampleState;
use bayes_estimate::models::{KalmanEstimator, KalmanState};
use bayes_estimate::noise::CorrelatedNoise;
use num_traits::ToPrimitive;
use nalgebra::allocator::Allocator;
pub struct FastSLAM<N: RealField, DL: Dim, DF: Dim>
where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, DF, DF> + Allocator<N, DF>
{
pub loc: SampleState<N, DL>,
pub map: FeatureMap<N, DF>,
}
pub type Feature<N, DF> = KalmanState<N, DF>;
pub type FeatureCondMap<N, DF> = Vec<Feature<N, DF>>;
type FeatureMap<N, DF> = HashMap<u32, FeatureCondMap<N, DF>>;
impl<N: RealField + ToPrimitive, DL: Dim, DF: DimName> FastSLAM<N, DL, DF>
where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, DF, DF> + Allocator<N, U1, DF> + Allocator<N, DF>
{
pub fn new_equal_likelihood(loc: SampleState<N, DL>) -> Self {
FastSLAM {
loc,
map: FeatureMap::new(),
}
}
pub fn observe_new(&mut self, feature: u32, obs_model: impl Fn(&VectorN<N, DL>) -> Feature<N,DF>) {
let fmap: FeatureCondMap<N,DF> = self.loc.s.iter().map(|s| obs_model(s)).collect();
self.map.insert(feature, fmap);
}
pub fn observe<DZ: Dim>(&mut self, feature: u32,
innovation_model: impl Fn(&VectorN<N, DL>, &VectorN<N, DF>) -> (VectorN<N, DZ>, CorrelatedNoise<N, DZ>),
hx: &MatrixMN<N, DZ, DF>)
where DefaultAllocator: Allocator<N, U1, DF> +
Allocator<N, DZ, DZ> + Allocator<N, DZ, DF> + Allocator<N, DF, DZ> + Allocator<N, DZ>
{
let afm = self.map.get_mut(&feature).unwrap();
let nsamples = self.loc.s.len();
for si in 0..nsamples {
let m1 = &mut afm[si];
let loc = &self.loc.s[si];
let (innov, noise) = innovation_model(loc, &m1.x);
let mut cov = noise.Q.clone();
cov.quadform_tr(N::one(), hx, &m1.X, N::one());
let s_cholesky = cov.clone().cholesky().unwrap();
let sinv = s_cholesky.inverse();
let sinv_diagonal = s_cholesky.l_dirty().iter().step_by(innov.nrows()+1);
let determinate_sinv = sinv_diagonal.fold(N::one(), |prod: N, n: &N| prod * *n).to_f32().unwrap();
let logl = innov.dot(&(&sinv * &innov)).to_f32().unwrap();
self.loc.w[si] *= (logl.exp() * determinate_sinv).powf(-0.5);
let w = &m1.X * hx.transpose() * &sinv;
m1.x += &w * &innov;
m1.X.quadform_tr(-N::one(), &w, &cov, N::one());
}
}
pub fn forget(&mut self, feature: u32) -> Option<FeatureCondMap<N,DF>>
{
self.map.remove(&feature)
}
pub fn update_resample(&mut self, resampler: &mut bayes_estimate::estimators::sir::Resampler,
roughener: &mut sir::Roughener<N, DL>) -> Result<(u32, f32), &'static str>
{
let nsamples = self.loc.s.len();
let (resamples, unqiue, lcond) = resampler(&mut self.loc.w, &mut self.loc.rng)?;
sir::live_samples(&mut self.loc.s, &resamples);
self.loc.w.fill(1.);
for (_feature, fm) in self.map.iter_mut() {
let mut fmr = FeatureCondMap::with_capacity(nsamples);
for fmi in fm.iter().enumerate() {
for _res in 0..resamples[fmi.0] {
fmr.push(fmi.1.clone());
}
}
*fm = fmr;
}
roughener(&mut self.loc.s, &mut self.loc.rng);
Ok((unqiue, lcond))
}
fn feature_statistics(&self, kstat: &mut KalmanState<N, Dynamic>,
fs: usize, fm: &FeatureCondMap<N, DF>,
fi: &mut dyn Iterator<Item = &FeatureCondMap<N, DF>>)
{
let nl = self.loc.s.first().unwrap().nrows();
let nsamples = N::from_usize(self.loc.s.len()).unwrap();
let mean_f = fm.iter().map(|f| &f.x).sum::<VectorN<N, DF>>() / nsamples;
kstat.x.fixed_rows_mut::<DF>(fs).copy_from(&mean_f);
let nf = mean_f.data.shape().0;
let mut var_f = MatrixMN::zeros_generic(nf, nf);
for fp in fm.iter() {
let xx = &fp.x - &mean_f;
var_f += &fp.X + &xx * xx.transpose();
}
var_f /= nsamples;
kstat.X.fixed_slice_mut::<DF,DF>(fs,fs).copy_from(&var_f);
for ls in 0..nl {
let mut cov = VectorN::zeros_generic(nf, U1);
let mean_si = kstat.x[ls];
for fp in fm.iter().enumerate() {
cov += (&fp.1.x - &mean_f) * (self.loc.s[fp.0][ls] - mean_si);
}
cov /= nsamples;
kstat.X.fixed_slice_mut::<DF, U1>(ls, fs).copy_from(&cov);
}
let mut fjs = nl;
for fj in fi {
let mut cov = MatrixN::zeros_generic(nf, nf);
let mut fpj = fj.iter();
let mean_fi = kstat.x.fixed_rows::<DF>(fjs);
for fp in fm.iter() {
cov += (&fp.x - &mean_f) * (&fpj.next().unwrap().x - &mean_fi).transpose();
}
cov /= nsamples as N;
kstat.X.fixed_slice_mut::<DF, DF>(fjs, fs).copy_from(&cov);
fjs += DF::dim();
}
}
pub fn statistics_compressed(&self, kstat: &mut KalmanState<N, Dynamic>)
where DefaultAllocator: Allocator<N, U1, DL>
{
let nl = self.loc.s.first().unwrap().data.shape().0;
assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");
kstat.x.fill(N::zero());
kstat.X.fill(N::zero());
let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);
let mut fs = nl.value();
for f in self.map.iter() {
let mut until_fi = self.map.iter().take_while(|it| {it.0 != f.0}).map(|it| {it.1});
self.feature_statistics(kstat, fs, f.1, &mut until_fi);
fs += DF::dim();
}
kstat.X.fill_lower_triangle_with_upper_triangle();
}
pub fn statistics_sparse(&self, kstat: &mut KalmanState<N, Dynamic>)
where DefaultAllocator: Allocator<N, U1, DL>
{
let nl = self.loc.s.first().unwrap().data.shape().0;
assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");
kstat.x.fill(N::zero());
kstat.X.fill(N::zero());
let loc_state = KalmanEstimator::<N,DL>::kalman_state(&self.loc).unwrap();
kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);
let mut orderd_features = self.map.iter().map(|m| { *m.0 }).collect::<Vec<_>>();
orderd_features.sort();
for fi in orderd_features.iter().cloned() {
let fs = nl.value() + DF::dim() * fi as usize;
let mut until_fi = orderd_features.iter().take_while(|it| {**it != fi}).map(|it| {self.map.get(it).unwrap()});
self.feature_statistics(kstat, fs, self.map.get(&fi).unwrap(), &mut until_fi);
}
kstat.X.fill_lower_triangle_with_upper_triangle();
}
}