fast_slam/
fast.rs

1//! SLAM: Simultaneous Localization and Mapping using FastSLAM algorithm.
2//!
3//! See Ref [1].
4//!
5//! [1] "FastSLAM: Factored Solution to the Simultaneous Localization and Mapping Problem"
6//! Montemerlo, M. and Thrun, S. and Koller, D. and Wegbreit, B. Proceedings of the AAAI National Conference on Artificial Intelligence  2002
7use std::collections::HashMap;
8
9use bayes_estimate::estimators::sir;
10use bayes_estimate::estimators::sir::SampleState;
11use bayes_estimate::models::{KalmanEstimator, KalmanState};
12use bayes_estimate::noise::CorrelatedNoise;
13use nalgebra::{
14    allocator::Allocator, Const, DefaultAllocator, Dim, Dyn, OMatrix, OVector, RealField, U1,
15};
16use num_traits::{FromPrimitive, ToPrimitive};
17use rand_core::RngCore;
18
19/// Fast SLAM state.
20///
21/// A sample state of the location with augmented Kalman states of map features.
22pub struct FastSLAM<N: RealField, DL: Dim, const DF: usize>
23where
24    DefaultAllocator: Allocator<DL, DL>
25        + Allocator<DL>
26        + Allocator<Const<DF>, Const<DF>>
27        + Allocator<Const<DF>>,
28{
29    /// Location as a sample state.
30    pub loc: SampleState<N, DL>,
31    /// Hashmap of augmented map feature states.
32    pub map: FeatureMap<N, Const<DF>>,
33}
34
35pub type Feature<N, DF> = KalmanState<N, DF>;
36pub type FeatureCondMap<N, DF> = Vec<Feature<N, DF>>;
37type FeatureMap<N, DF> = HashMap<u32, FeatureCondMap<N, DF>>;
38
39impl<N: RealField + Copy + FromPrimitive + ToPrimitive, DL: Dim, const DF: usize>
40    FastSLAM<N, DL, DF>
41where
42    DefaultAllocator: Allocator<DL, DL>
43        + Allocator<DL>
44        + Allocator<Const<DF>, Const<DF>>
45        + Allocator<U1, Const<DF>>
46        + Allocator<Const<DF>>,
47{
48    /// Creates a [FastSLAM] with empty feature map.
49    pub fn new(loc: SampleState<N, DL>) -> Self {
50        FastSLAM {
51            loc,
52            map: FeatureMap::new(),
53        }
54    }
55
56    /// SLAM new feature observation (overwrite).
57    ///
58    /// Assumes there is no prior information about the feature (strictly a uniform un-informative prior)
59    /// This implies:
60    /// a) There has no information about location so no resampling is required
61    /// b) Feature state estimates comes directly from the location and observation
62    pub fn observe_new(
63        &mut self,
64        feature: u32,
65        obs_model: impl Fn(&OVector<N, DL>) -> Feature<N, Const<DF>>,
66    ) {
67        let fmap: FeatureCondMap<N, Const<DF>> = self.loc.s.iter().map(|s| obs_model(s)).collect();
68        self.map.insert(feature, fmap);
69    }
70
71    /// SLAM Feature observation.
72    ///  Uses Extended Fast_SLAM observation equations
73    /// Note: Mathematically only weight ratios are important. Numerically however the range should be restricted.
74    /// The weights are computed here using the simplest form with common factor Ht removed.
75    pub fn observe<DZ: Dim>(
76        &mut self,
77        feature: u32,
78        innovation_model: impl Fn(
79            &OVector<N, DL>,
80            &OVector<N, Const<DF>>,
81        ) -> (OVector<N, DZ>, CorrelatedNoise<N, DZ>),
82        hx: &OMatrix<N, DZ, Const<DF>>,
83    ) where
84        DefaultAllocator: Allocator<U1, Const<DF>>
85            + Allocator<DZ, DZ>
86            + Allocator<DZ, Const<DF>>
87            + Allocator<Const<DF>, DZ>
88            + Allocator<DZ>,
89    {
90        // Existing feature - reference the associated feature map
91        let afm = self.map.get_mut(&feature).unwrap();
92
93        let nsamples = self.loc.s.len();
94
95        // Iterate over samples
96        for si in 0..nsamples {
97            let m1 = &mut afm[si]; // Feature map state
98
99            let loc = &self.loc.s[si];
100            // Observation innovation and noise
101            let (innov, noise) = innovation_model(loc, &m1.x);
102
103            // Innovation variance hx.X.hx' + Q
104            let mut cov = noise.Q.clone();
105            cov.quadform_tr(N::one(), hx, &m1.X, N::one());
106
107            // Inverse innovation covariance
108            let s_cholesky = cov.clone().cholesky().unwrap();
109            let sinv = s_cholesky.inverse();
110
111            // Multiplicative fusion of observation weights, integral of Gaussian product g(p,P)*g(q,Q)
112            // Drop Ht which is a common factor for all weights
113            let sinv_diagonal = s_cholesky.l_dirty().iter().step_by(innov.nrows() + 1);
114            let determinate_sinv = sinv_diagonal
115                .fold(N::one(), |prod: N, n: &N| prod * *n)
116                .to_f32()
117                .unwrap();
118            let logl = innov.dot(&(&sinv * &innov)).to_f32().unwrap();
119            self.loc.w[si] *= (logl.exp() * determinate_sinv).powf(-0.5);
120
121            // Estimate conditional state of feature
122            let w = &m1.X * hx.transpose() * &sinv; // EKF for conditional feature observation - specialised for 1D and zero state uncertainty
123            m1.x += &w * &innov;
124            m1.X.quadform_tr(-N::one(), &w, &cov, N::one());
125        }
126    }
127
128    /// Forget all feature information, feature no can be reused for a new feature
129    pub fn forget(&mut self, feature: u32) -> Option<FeatureCondMap<N, Const<DF>>> {
130        self.map.remove(&feature)
131    }
132
133    /// Resampling Update.
134    ///
135    /// Resample using weights. Propagate resampling to All features.
136    /// Only resamples if weights have been updated.
137    pub fn update_resample(
138        &mut self,
139        resampler: &mut sir::Resampler,
140        roughener: &mut sir::Roughener<N, DL>,
141        rng: &mut dyn RngCore
142    ) -> Result<(u32, f32), &str> {
143        // Determine resampling
144        let (resamples, unique, lcond) = resampler(&mut self.loc.w, rng)?;
145        // Keep live sample
146        sir::live_samples(&mut self.loc.s, &resamples);
147        // Resampling results in uniform likelihoods
148        self.loc.w.fill(1.);
149
150        // Propagate resampling to the features
151        let nsamples = self.loc.s.len();
152        for (_feature, fm) in self.map.iter_mut() {
153            // All Features
154            let mut fmr = FeatureCondMap::with_capacity(nsamples); // Resampled feature map
155            for fmi in fm.iter().enumerate() {
156                for _res in 0..resamples[fmi.0] {
157                    // Multiple copies of this resampled feature
158                    fmr.push(fmi.1.clone());
159                }
160            }
161            *fm = fmr; // move resamples into feature map
162        }
163
164        roughener(&mut self.loc.s, rng); // Roughen location
165        Ok((unique, lcond))
166    }
167
168    /// Compute sample mean and covariance statistics of feature.
169    ///
170    /// Use the Maximum Likelihood (bias) estimate definition of covariance (1/n).
171    ///
172    /// fs is subscript in kstat to return statistics
173    /// fm feature map of interest
174    /// fi iterator of feature (statistics are computed for this map subset)
175    ///
176    /// Numerics
177    /// No check is made for the conditioning of samples with regard to mean and covariance.
178    /// Extreme ranges or very large sample sizes will result in inaccuracy.
179    /// The covariance should always remain PSD, however.
180    ///
181    /// Precond: kstat contains the location mean and previous (smaller fs) feature statistics
182    fn feature_statistics(
183        &self,
184        kstat: &mut KalmanState<N, Dyn>,
185        fs: usize,
186        fm: &FeatureCondMap<N, Const<DF>>,
187        fi: &mut dyn Iterator<Item = &FeatureCondMap<N, Const<DF>>>,
188    ) {
189        let nl = self.loc.s.first().unwrap().nrows(); // No of location states
190        let nsamples = N::from_usize(self.loc.s.len()).unwrap();
191
192        // Feature mean
193        let mean_f = fm.iter().map(|f| &f.x).sum::<OVector<N, Const<DF>>>() / nsamples;
194        kstat.x.fixed_rows_mut::<DF>(fs).copy_from(&mean_f);
195
196        // Feature variance: is ML estimate given estimated mean
197        let nf = mean_f.shape_generic().0;
198        let mut var_f = OMatrix::zeros_generic(nf, nf);
199        for fp in fm.iter() {
200            let xx = &fp.x - &mean_f;
201            var_f += &fp.X + &xx * xx.transpose();
202        }
203        var_f /= nsamples;
204        kstat.X.fixed_view_mut::<DF, DF>(fs, fs).copy_from(&var_f);
205
206        // Location,feature covariance
207        for ls in 0..nl {
208            let mut cov = OVector::zeros_generic(nf, Const::<1>);
209            let mean_si = kstat.x[ls];
210            for fp in fm.iter().enumerate() {
211                cov += (&fp.1.x - &mean_f) * (self.loc.s[fp.0][ls] - mean_si);
212            }
213            cov /= nsamples;
214            kstat.X.fixed_view_mut::<DF, 1>(ls, fs).copy_from(&cov);
215        }
216
217        // Feature,feature covariance. Iterate over previous features with means already computed
218        let mut fjs = nl; // Feature subscript
219        for fj in fi {
220            let mut cov = OMatrix::zeros_generic(nf, nf);
221            let mut fpj = fj.iter();
222            let mean_fi = kstat.x.fixed_rows::<DF>(fjs);
223            for fp in fm.iter() {
224                cov += (&fp.x - &mean_f) * (&fpj.next().unwrap().x - &mean_fi).transpose();
225            }
226            cov /= nsamples;
227            kstat.X.fixed_view_mut::<DF, DF>(fjs, fs).copy_from(&cov);
228            fjs += DF;
229        }
230    }
231
232    /// Compute sample mean and covariance statistics of filter.
233    ///
234    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
235    /// Feature statistics are computed in feature number order and only for those for which there is space in kstat.
236    ///
237    /// Precond:
238    ///  kstat must have space for location statistics
239    pub fn statistics_compressed(&self, kstat: &mut KalmanState<N, Dyn>)
240    where
241        DefaultAllocator: Allocator<U1, DL>,
242    {
243        let nl = self.loc.s.first().unwrap().shape_generic().0; // No of location states
244        assert!(
245            nl.value() + self.map.len() <= kstat.x.nrows(),
246            "kstat to small to hold location and features"
247        );
248
249        kstat.x.fill(N::zero()); // Zero everything (required only for non-existing feature states
250        kstat.X.fill(N::zero()); // Zero everything (required only for non-existing feature states
251
252        // Get Location statistics
253        let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
254        kstat.x.rows_generic_mut(0, nl).copy_from(&loc_state.x);
255        kstat
256            .X
257            .generic_view_mut((0, 0), (nl, nl))
258            .copy_from(&loc_state.X);
259
260        // Iterated over feature statistics
261        let mut fs = nl.value(); // Feature subscript
262        for f in self.map.iter() {
263            let mut until_fi = self.map.iter().take_while(|it| it.0 != f.0).map(|it| it.1);
264            self.feature_statistics(kstat, fs, f.1, &mut until_fi); // build statistics of fi with other features up to fi
265            fs += DF;
266        }
267        kstat.X.fill_lower_triangle_with_upper_triangle();
268    }
269
270    /// Compute sample mean and covariance statistics of filter.
271    ///
272    /// kstat elements are filled first with Location statistics and then the Map feature statistics.
273    /// Feature statistics are computed with the feature number as index (after location) and only for those for which there is space in kstat.
274    ///
275    /// Precond:
276    ///  kstat must have space for Location statistics
277    pub fn statistics_sparse(&self, kstat: &mut KalmanState<N, Dyn>)
278    where
279        DefaultAllocator: Allocator<U1, DL>,
280    {
281        let nl = self.loc.s.first().unwrap().shape_generic().0; // No of location states
282        assert!(
283            nl.value() + self.map.len() <= kstat.x.nrows(),
284            "kstat to small to hold location and features"
285        );
286
287        kstat.x.fill(N::zero()); // Zero everything (required only for non-existing feature states
288        kstat.X.fill(N::zero()); // Zero everything (required only for non-existing feature states
289
290        // Get Location statistics
291        let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
292        kstat.x.rows_generic_mut(0, nl).copy_from(&loc_state.x);
293        kstat
294            .X
295            .generic_view_mut((0, 0), (nl, nl))
296            .copy_from(&loc_state.X);
297
298        // Iterated over feature statistics
299        let mut ordered_features = self.map.iter().map(|m| *m.0).collect::<Vec<_>>();
300        ordered_features.sort();
301        for fi in ordered_features.iter().cloned() {
302            let fs = nl.value() + DF * fi as usize; // Feature subscript
303            let mut until_fi = ordered_features
304                .iter()
305                .take_while(|it| **it != fi)
306                .map(|it| self.map.get(it).unwrap());
307            self.feature_statistics(kstat, fs, self.map.get(&fi).unwrap(), &mut until_fi);
308            // build statistics of fi with other features up to fi
309        }
310        kstat.X.fill_lower_triangle_with_upper_triangle();
311    }
312}