1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
/// SLAM : Simultaneous Localization and Mapping.
///
///  FastSLAM augmented particle algorithm see Ref[1].
///
/// [1] "FastSLAM: Factored Solution to the Simultaneous Localization and Mapping Problem"
/// Montemerlo, M. and Thrun, S. and Koller, D. and Wegbreit, B. Proceedings of the AAAI National Conference on Artificial Intelligence  2002

use std::collections::HashMap;

use nalgebra::{Dynamic, RealField, U1, VectorN, MatrixMN, MatrixN, Dim, DefaultAllocator, DimName};
use nalgebra::storage::Storage;

use bayes_estimate::estimators::sir;
use bayes_estimate::estimators::sir::SampleState;
use bayes_estimate::models::{KalmanEstimator, KalmanState};
use bayes_estimate::noise::CorrelatedNoise;
use num_traits::ToPrimitive;
use nalgebra::allocator::Allocator;

/// Fast SLAM state.
///
/// A sample state of the location with augmented Kalman states of map features.
pub struct FastSLAM<N: RealField, DL: Dim, DF: Dim>
    where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, DF, DF> + Allocator<N, DF>
{
    /// Location as a sample state.
    pub loc: SampleState<N, DL>,
    /// Hashmap of augmented map feature states.
    pub map: FeatureMap<N, DF>,
}

pub type Feature<N, DF> = KalmanState<N, DF>;
pub type FeatureCondMap<N, DF> = Vec<Feature<N, DF>>;
type FeatureMap<N, DF> = HashMap<u32, FeatureCondMap<N, DF>>;

impl<N: RealField + ToPrimitive, DL: Dim, DF: DimName> FastSLAM<N, DL, DF>
    where DefaultAllocator: Allocator<N, DL, DL> + Allocator<N, DL> + Allocator<N, DF, DF> + Allocator<N, U1, DF> + Allocator<N, DF>
{
    /// Creates a [FastSLAM] with empty feature map.
    pub fn new_equal_likelihood(loc: SampleState<N, DL>) -> Self {
        FastSLAM {
            loc,
            map: FeatureMap::new(),
        }
    }

    /// SLAM new feature observation (overwrite).
    ///
    /// Assumes there is no prior information about the feature (strictly a uniform un-informative prior)
    /// This implies:
    /// a) There has no information about location so no resampling is requires
    /// b) Feature state estimates comes directly from the location and observation
    pub fn observe_new(&mut self, feature: u32, obs_model: impl Fn(&VectorN<N, DL>) -> Feature<N,DF>) {
        let fmap: FeatureCondMap<N,DF> = self.loc.s.iter().map(|s| obs_model(s)).collect();
        self.map.insert(feature, fmap);
    }

    /// SLAM Feature observation.
    ///  Uses Extended Fast_SLAM observation equations
    /// Note: Mathematically only weight ratios are important. Numerically however the range should be restricted.
    /// The weights are computed here using the simplest form with common factor Ht removed.
    pub fn observe<DZ: Dim>(&mut self, feature: u32,
                   innovation_model: impl Fn(&VectorN<N, DL>, &VectorN<N, DF>) -> (VectorN<N, DZ>, CorrelatedNoise<N, DZ>),
                   hx: &MatrixMN<N, DZ, DF>)
    where DefaultAllocator: Allocator<N, U1, DF> +
        Allocator<N, DZ, DZ> + Allocator<N, DZ, DF> + Allocator<N, DF, DZ> + Allocator<N, DZ>
    {
        // Existing feature - reference the associated feature map
        let afm = self.map.get_mut(&feature).unwrap();

        let samples = self.loc.s.len();

        // Iterate over samples
        for si in 0..samples {
            let m1 = &mut afm[si];        // Associated feature's map particle

            let loc = &self.loc.s[si];
            // Observation innovatio and noise
            let (innov, noise) = innovation_model(loc, &m1.x);

            // Innovation variance hx.X.hx' + Q
            let mut cov = noise.Q.clone();
            cov.quadform_tr(N::one(), hx, &m1.X, N::one());

            // Inverse innovation covariance
            let s_cholesky = cov.clone().cholesky().unwrap();
            let sinv = s_cholesky.inverse();

            // Multiplicative fusion of observation weights, integral of Gaussian product g(p,P)*g(q,Q)
            // Drop Ht which is a common factor for all weights
            let sinv_diagonal = s_cholesky.l_dirty().iter().step_by(innov.nrows()+1);
            let determinate_sinv = sinv_diagonal.fold(N::one(), |prod: N, n: &N| prod * *n).to_f32().unwrap();
            let logl = innov.dot(&(&sinv * &innov)).to_f32().unwrap();
            self.loc.w[si] *=  (logl.exp() * determinate_sinv).powf(-0.5);

            // Estimate conditional state of feature
            let w = &m1.X * hx.transpose() * &sinv;    // EKF for conditional feature observation - specialised for 1D and zero state uncertianty
            m1.x += &w * &innov;
            m1.X.quadform_tr(-N::one(), &w, &cov, N::one());
        }
    }

    /// Forget all feature information, feature no can be reused for a new feature
    pub fn forget(&mut self, feature: u32) -> Option<FeatureCondMap<N,DF>>
    {
        self.map.remove(&feature)
    }

    /// Resampling Update.
    ///
    /// Resample particles using weights. Propagate resampling to All features.
    /// Only resamples if weights have been updated.
    pub fn update_resample(&mut self, resampler: &mut bayes_estimate::estimators::sir::Resampler,
                           roughener: &mut sir::Roughener<N, DL>) -> Result<(u32, f32), &'static str>
    {
        let nparticles = self.loc.s.len();
        let (resamples, unqiue, lcond) = resampler(&mut self.loc.w, &mut self.loc.rng)?;

        // Keep live sample
        sir::live_samples(&mut self.loc.s, &resamples);
        // Resampling results in uniform likelihoods
        self.loc.w.fill(1.);

        // Propagate resampling to All features
        for (_feature, fm) in self.map.iter_mut() { // All Features
            let mut fmr = FeatureCondMap::with_capacity(nparticles);		// Resampled feature map
            for fmi in fm.iter().enumerate() { // Iterate over all feature particles
                                        // Multiple copies of this resampled feature
                for _res in 0..resamples[fmi.0] {
                   fmr.push(fmi.1.clone());
                }
            }
            *fm = fmr; // Copy in resamples feature map
        }

        roughener(&mut self.loc.s, &mut self.loc.rng);				// Roughen location
        Ok((unqiue, lcond))
    }

    /// Compute sample mean and covariance statistics of feature.
    ///
    /// Use the Maximum Likelihood (bias) estimate definition of covariance (1/n).
    ///
    /// fs is subscript in kstat to return statisics
    /// fm feature map of interest
    /// fi iterator of feature (statistics are computed for this map subset)
    ///
    /// Numerics
    /// No check is made for the conditioning of samples with regard to mean and covariance.
    /// Extreme ranges or very large sample sizes will result in inaccuracy.
    /// The covariance should always remain PSD however.
    ///
    /// Precond: kstat contains the location mean and previous (smaller fs) feature statistics
    fn feature_statistics(&self, kstat: &mut KalmanState<N, Dynamic>,
                          fs: usize, fm: &FeatureCondMap<N, DF>,
                          fi: &mut dyn Iterator<Item = &FeatureCondMap<N, DF>>)
    {
        let nl = self.loc.s.first().unwrap().nrows();    // No of location states
        let nparticles = N::from_usize(self.loc.s.len()).unwrap();

        // Feature mean
        let mean_f = fm.iter().map(|f| &f.x).sum::<VectorN<N, DF>>() / nparticles;
        kstat.x.fixed_rows_mut::<DF>(fs).copy_from(&mean_f);

        // Feature variance: is ML estimate given estimated mean
        let nf = mean_f.data.shape().0;
        let mut var_f = MatrixMN::zeros_generic(nf, nf);
        for fp in fm.iter() {
            let xx = &fp.x - &mean_f;
            var_f += &fp.X + &xx * xx.transpose();
        }
        var_f /= nparticles;
        kstat.X.fixed_slice_mut::<DF,DF>(fs,fs).copy_from(&var_f);

        // Location,feature covariance
        for ls in 0..nl {
            let mut cov = VectorN::zeros_generic(nf, U1);
            let mean_si = kstat.x[ls];
            for fp in fm.iter().enumerate() {
                cov += (&fp.1.x - &mean_f) * (self.loc.s[fp.0][ls] - mean_si);
            }
            cov /= nparticles;
            kstat.X.fixed_slice_mut::<DF, U1>(ls, fs).copy_from(&cov);
        }

        // Feature,feature covariance. Iterate over previous features with means already computed
        let mut fjs = nl;                // Feature subscript
        for fj in fi {
            let mut cov = MatrixN::zeros_generic(nf, nf);
            let mut fpj = fj.iter();
            let mean_fi = kstat.x.fixed_rows::<DF>(fjs);
            for fp in fm.iter() {
                cov += (&fp.x - &mean_f) * (&fpj.next().unwrap().x - &mean_fi).transpose();
            }
            cov /= nparticles as N;
            kstat.X.fixed_slice_mut::<DF, DF>(fjs, fs).copy_from(&cov);
            fjs += 1;
        }
    }

    /// Compute sample mean and covariance statistics of filter.
    ///
    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
    /// Feature statistics are are computed in feature number order and only for those for which there is space in kstat.
    ///
    /// Note: Covariance values are indeterminate for nparticles ==1
    /// Precond:
    ///  kstat must have space for Location statistics
    pub fn statistics_compressed(&self, kstat: &mut KalmanState<N, Dynamic>)
      where DefaultAllocator: Allocator<N, U1, DL>
    {
        let nl =  self.loc.s.first().unwrap().data.shape().0;	// No of location states
        assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");

        kstat.x.fill(N::zero());			// Zero everything (required only for non existing feature states
        kstat.X.fill(N::zero());			// Zero everything (required only for non existing feature states

        // Get Location statistics
        let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
        kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
        kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);

        // Iterated over feature statistics
        let mut fs = nl.value();						// Feature subscript
        for f in self.map.iter() {
            let mut until_fi = self.map.iter().take_while(|it| {it.0 != f.0}).map(|it| {it.1});
            self.feature_statistics(kstat, fs, f.1, &mut until_fi);	// build statistics of fi with other features up to fi
            fs += f.1.first().unwrap().x.nrows();
        }
        kstat.X.fill_lower_triangle_with_upper_triangle();
    }

    /// Compute sample mean and covariance statistics of filter.
    ///
    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
    /// Feature statistics are are computed in feature number as index (after location) and only for those for which there is space in kstat.
    ///
    /// Note: Covariance values are indeterminate for nparticles ==1
    /// Precond:
    ///  kstat must have space for Location statistics
    pub fn statistics_sparse(&self, kstat: &mut KalmanState<N, Dynamic>)
        where DefaultAllocator: Allocator<N, U1, DL>
    {
        let nl =  self.loc.s.first().unwrap().data.shape().0;	// No of location states
        assert!(nl.value() + self.map.len() <= kstat.x.nrows(), "kstat to small to hold location and features");

        kstat.x.fill(N::zero());			// Zero everything (required only for non existing feature states
        kstat.X.fill(N::zero());			// Zero everything (required only for non existing feature states

        // Get Location statistics
        let loc_state = KalmanEstimator::<N,DL>::kalman_state(&self.loc).unwrap();
        kstat.x.rows_generic_mut(0,nl).copy_from(&loc_state.x);
        kstat.X.generic_slice_mut((0, 0), (nl,nl)).copy_from(&loc_state.X);


        // Iterated over feature statistics
        let mut orderd_features = self.map.iter().map(|m| { *m.0 }).collect::<Vec<_>>();
        orderd_features.sort();
        for fi in orderd_features.iter().cloned() {
            let fs = nl.value() + fi as usize;		// Feature subscript
            let mut until_fi = orderd_features.iter().take_while(|it| {**it != fi}).map(|it| {self.map.get(it).unwrap()});
            self.feature_statistics(kstat, fs, self.map.get(&fi).unwrap(), &mut until_fi);	// build statistics of fi with other features up to fi
        }
        kstat.X.fill_lower_triangle_with_upper_triangle();
    }

}