fast-slam 0.3.1

SLAM : Simultaneous Localization and Mapping. 'Fast SLAM' implemntation. Numerically and dimensionally generic implementation using nalgebra.
Documentation
/// SLAM: Simultaneous Localization and Mapping using FastSLAM algorithm.
///
///
/// See Ref[1].
///
/// [1] "FastSLAM: Factored Solution to the Simultaneous Localization and Mapping Problem"
/// Montemerlo, M. and Thrun, S. and Koller, D. and Wegbreit, B. Proceedings of the AAAI National Conference on Artificial Intelligence  2002

use std::collections::HashMap;

use bayes_estimate::estimators::sir;
use bayes_estimate::estimators::sir::SampleState;
use bayes_estimate::models::{KalmanEstimator, KalmanState};
use bayes_estimate::noise::CorrelatedNoise;
use nalgebra::{allocator::Allocator, Const, DefaultAllocator, Dim, Dynamic, OMatrix, OVector, RealField, U1};
use num_traits::{ToPrimitive, FromPrimitive};

/// Fast SLAM state.
///
/// A sample state of the location with augmented Kalman states of map features.
pub struct FastSLAM<N: RealField, DL: Dim, const DF: usize>
    where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, Const<DF>, Const<DF>> + Allocator<N, Const<DF>>
{
    /// Location as a sample state.
    pub loc: SampleState<N, DL>,
    /// Hashmap of augmented map feature states.
    pub map: FeatureMap<N, Const<DF>>,
}

pub type Feature<N, DF> = KalmanState<N, DF>;
pub type FeatureCondMap<N, DF> = Vec<Feature<N, DF>>;
type FeatureMap<N, DF> = HashMap<u32, FeatureCondMap<N, DF>>;

impl<N: RealField + Copy + FromPrimitive + ToPrimitive, DL: Dim, const DF: usize> FastSLAM<N, DL, DF>
    where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, Const<DF>, Const<DF>> + Allocator<N, U1, Const<DF>> + Allocator<N, Const<DF>>
{
    /// Creates a [FastSLAM] with empty feature map.
    pub fn new_equal_likelihood(loc: SampleState<N, DL>) -> Self {
        FastSLAM {
            loc,
            map: FeatureMap::new(),
        }
    }

    /// SLAM new feature observation (overwrite).
    ///
    /// Assumes there is no prior information about the feature (strictly a uniform un-informative prior)
    /// This implies:
    /// a) There has no information about location so no resampling is requires
    /// b) Feature state estimates comes directly from the location and observation
    pub fn observe_new(&mut self, feature: u32, obs_model: impl Fn(&OVector<N, DL>) -> Feature<N, Const<DF>>) {
        let fmap: FeatureCondMap<N, Const<DF>> = self.loc.s.iter().map(|s| obs_model(s)).collect();
        self.map.insert(feature, fmap);
    }

    /// SLAM Feature observation.
    ///  Uses Extended Fast_SLAM observation equations
    /// Note: Mathematically only weight ratios are important. Numerically however the range should be restricted.
    /// The weights are computed here using the simplest form with common factor Ht removed.
    pub fn observe<DZ: Dim>(&mut self, feature: u32,
                   innovation_model: impl Fn(&OVector<N, DL>, &OVector<N, Const<DF>>) -> (OVector<N, DZ>, CorrelatedNoise<N, DZ>),
                   hx: &OMatrix<N, DZ, Const<DF>>)
    where DefaultAllocator: Allocator<N, U1, Const<DF>> +
        Allocator<N, DZ, DZ> + Allocator<N, DZ, Const<DF>> + Allocator<N, Const<DF>, DZ> + Allocator<N, DZ>
    {
        // Existing feature - reference the associated feature map
        let afm = self.map.get_mut(&feature).unwrap();

        let nsamples = self.loc.s.len();

        // Iterate over samples
        for si in 0..nsamples {
            let m1 = &mut afm[si];        // Feature map state

            let loc = &self.loc.s[si];
            // Observation innovatio and noise
            let (innov, noise) = innovation_model(loc, &m1.x);

            // Innovation variance hx.X.hx' + Q
            let mut cov = noise.Q.clone();
            cov.quadform_tr(N::one(), hx, &m1.X, N::one());

            // Inverse innovation covariance
            let s_cholesky = cov.clone().cholesky().unwrap();
            let sinv = s_cholesky.inverse();

            // Multiplicative fusion of observation weights, integral of Gaussian product g(p,P)*g(q,Q)
            // Drop Ht which is a common factor for all weights
            let sinv_diagonal = s_cholesky.l_dirty().iter().step_by(innov.nrows()+1);
            let determinate_sinv = sinv_diagonal.fold(N::one(), |prod: N, n: &N| prod * *n).to_f32().unwrap();
            let logl = innov.dot(&(&sinv * &innov)).to_f32().unwrap();
            self.loc.w[si] *=  (logl.exp() * determinate_sinv).powf(-0.5);

            // Estimate conditional state of feature
            let w = &m1.X * hx.transpose() * &sinv;    // EKF for conditional feature observation - specialised for 1D and zero state uncertianty
            m1.x += &w * &innov;
            m1.X.quadform_tr(-N::one(), &w, &cov, N::one());
        }
    }

    /// Forget all feature information, feature no can be reused for a new feature
    pub fn forget(&mut self, feature: u32) -> Option<FeatureCondMap<N, Const<DF>>>
    {
        self.map.remove(&feature)
    }

    /// Resampling Update.
    ///
    /// Resample using weights. Propagate resampling to All features.
    /// Only resamples if weights have been updated.
    pub fn update_resample(&mut self, resampler: &mut bayes_estimate::estimators::sir::Resampler,
                           roughener: &mut sir::Roughener<N, DL>) -> Result<(u32, f32), &'static str>
    {
        // Determine resampling
        let (resamples, unqiue, lcond) = resampler(&mut self.loc.w, &mut self.loc.rng)?;
        // Keep live sample
        sir::live_samples(&mut self.loc.s, &resamples);
        // Resampling results in uniform likelihoods
        self.loc.w.fill(1.);

        // Propagate resampling to the features
        let nsamples = self.loc.s.len();
        for (_feature, fm) in self.map.iter_mut() { // All Features
            let mut fmr = FeatureCondMap::with_capacity(nsamples);		// Resampled feature map
            for fmi in fm.iter().enumerate() {
                for _res in 0..resamples[fmi.0] { // Multiple copies of this resampled feature
                    fmr.push(fmi.1.clone());
                }
            }
            *fm = fmr; // move resamples into feature map
        }

        roughener(&mut self.loc.s, &mut self.loc.rng);				// Roughen location
        Ok((unqiue, lcond))
    }

    /// Compute sample mean and covariance statistics of feature.
    ///
    /// Use the Maximum Likelihood (bias) estimate definition of covariance (1/n).
    ///
    /// fs is subscript in kstat to return statisics
    /// fm feature map of interest
    /// fi iterator of feature (statistics are computed for this map subset)
    ///
    /// Numerics
    /// No check is made for the conditioning of samples with regard to mean and covariance.
    /// Extreme ranges or very large sample sizes will result in inaccuracy.
    /// The covariance should always remain PSD however.
    ///
    /// Precond: kstat contains the location mean and previous (smaller fs) feature statistics
    fn feature_statistics(&self, kstat: &mut KalmanState<N, Dynamic>,
                          fs: usize, fm: &FeatureCondMap<N, Const<DF>>,
                          fi: &mut dyn Iterator<Item = &FeatureCondMap<N, Const<DF>>>)
    {
        let nl = self.loc.s.first().unwrap().nrows();    // No of location states
        let nsamples = N::from_usize(self.loc.s.len()).unwrap();

        // Feature mean
        let mean_f = fm.iter().map(|f| &f.x).sum::<OVector<N, Const<DF>>>() / nsamples;
        kstat.x.fixed_rows_mut::<DF>(fs).copy_from(&mean_f);

        // Feature variance: is ML estimate given estimated mean
        let nf = mean_f.shape_generic().0;
        let mut var_f = OMatrix::zeros_generic(nf, nf);
        for fp in fm.iter() {
            let xx = &fp.x - &mean_f;
            var_f += &fp.X + &xx * xx.transpose();
        }
        var_f /= nsamples;
        kstat.X.fixed_slice_mut::<DF,DF>(fs,fs).copy_from(&var_f);

        // Location,feature covariance
        for ls in 0..nl {
            let mut cov = OVector::zeros_generic(nf, Const::<1>);
            let mean_si = kstat.x[ls];
            for fp in fm.iter().enumerate() {
                cov += (&fp.1.x - &mean_f) * (self.loc.s[fp.0][ls] - mean_si);
            }
            cov /= nsamples;
            kstat.X.fixed_slice_mut::<DF, 1>(ls, fs).copy_from(&cov);
        }

        // Feature,feature covariance. Iterate over previous features with means already computed
        let mut fjs = nl;                // Feature subscript
        for fj in fi {
            let mut cov = OMatrix::zeros_generic(nf, nf);
            let mut fpj = fj.iter();
            let mean_fi = kstat.x.fixed_rows::<DF>(fjs);
            for fp in fm.iter() {
                cov += (&fp.x - &mean_f) * (&fpj.next().unwrap().x - &mean_fi).transpose();
            }
            cov /= nsamples as N;
            kstat.X.fixed_slice_mut::<DF, DF>(fjs, fs).copy_from(&cov);
            fjs += DF;
        }
    }

    /// Compute sample mean and covariance statistics of filter.
    ///
    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
    /// Feature statistics are are computed in feature number order and only for those for which there is space in kstat.
    ///
    /// Precond:
    ///  kstat must have space for location statistics
    pub fn statistics_compressed(&self, kstat: &mut KalmanState<N, Dynamic>)
      where DefaultAllocator: Allocator<N, U1, DL>
    {
        let nl =  self.loc.s.first().unwrap().shape_generic().0;	// No of location states
        assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");

        kstat.x.fill(N::zero());			// Zero everything (required only for non existing feature states
        kstat.X.fill(N::zero());			// Zero everything (required only for non existing feature states

        // Get Location statistics
        let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
        kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
        kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);

        // Iterated over feature statistics
        let mut fs = nl.value();						// Feature subscript
        for f in self.map.iter() {
            let mut until_fi = self.map.iter().take_while(|it| {it.0 != f.0}).map(|it| {it.1});
            self.feature_statistics(kstat, fs, f.1, &mut until_fi);	// build statistics of fi with other features up to fi
            fs += DF;
        }
        kstat.X.fill_lower_triangle_with_upper_triangle();
    }

    /// Compute sample mean and covariance statistics of filter.
    ///
    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
    /// Feature statistics are are computed in feature number as index (after location) and only for those for which there is space in kstat.
    ///
    /// Precond:
    ///  kstat must have space for Location statistics
    pub fn statistics_sparse(&self, kstat: &mut KalmanState<N, Dynamic>)
        where DefaultAllocator: Allocator<N, U1, DL>
    {
        let nl =  self.loc.s.first().unwrap().shape_generic().0;	// No of location states
        assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");

        kstat.x.fill(N::zero());			// Zero everything (required only for non existing feature states
        kstat.X.fill(N::zero());			// Zero everything (required only for non existing feature states

        // Get Location statistics
        let loc_state = KalmanEstimator::<N,DL>::kalman_state(&self.loc).unwrap();
        kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
        kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);

        // Iterated over feature statistics
        let mut orderd_features = self.map.iter().map(|m| { *m.0 }).collect::<Vec<_>>();
        orderd_features.sort();
        for fi in orderd_features.iter().cloned() {
            let fs = nl.value() + DF * fi as usize;		// Feature subscript
            let mut until_fi = orderd_features.iter().take_while(|it| {**it != fi}).map(|it| {self.map.get(it).unwrap()});
            self.feature_statistics(kstat, fs, self.map.get(&fi).unwrap(), &mut until_fi);	// build statistics of fi with other features up to fi
        }
        kstat.X.fill_lower_triangle_with_upper_triangle();
    }

}