1use std::collections::HashMap;
8
9use bayes_estimate::estimators::sir;
10use bayes_estimate::estimators::sir::SampleState;
11use bayes_estimate::models::{KalmanEstimator, KalmanState};
12use bayes_estimate::noise::CorrelatedNoise;
13use nalgebra::{
14 allocator::Allocator, Const, DefaultAllocator, Dim, Dyn, OMatrix, OVector, RealField, U1,
15};
16use num_traits::{FromPrimitive, ToPrimitive};
17use rand_core::RngCore;
18
19pub struct FastSLAM<N: RealField, DL: Dim, const DF: usize>
23where
24 DefaultAllocator: Allocator<DL, DL>
25 + Allocator<DL>
26 + Allocator<Const<DF>, Const<DF>>
27 + Allocator<Const<DF>>,
28{
29 pub loc: SampleState<N, DL>,
31 pub map: FeatureMap<N, Const<DF>>,
33}
34
35pub type Feature<N, DF> = KalmanState<N, DF>;
36pub type FeatureCondMap<N, DF> = Vec<Feature<N, DF>>;
37type FeatureMap<N, DF> = HashMap<u32, FeatureCondMap<N, DF>>;
38
39impl<N: RealField + Copy + FromPrimitive + ToPrimitive, DL: Dim, const DF: usize>
40 FastSLAM<N, DL, DF>
41where
42 DefaultAllocator: Allocator<DL, DL>
43 + Allocator<DL>
44 + Allocator<Const<DF>, Const<DF>>
45 + Allocator<U1, Const<DF>>
46 + Allocator<Const<DF>>,
47{
48 pub fn new(loc: SampleState<N, DL>) -> Self {
50 FastSLAM {
51 loc,
52 map: FeatureMap::new(),
53 }
54 }
55
56 pub fn observe_new(
63 &mut self,
64 feature: u32,
65 obs_model: impl Fn(&OVector<N, DL>) -> Feature<N, Const<DF>>,
66 ) {
67 let fmap: FeatureCondMap<N, Const<DF>> = self.loc.s.iter().map(|s| obs_model(s)).collect();
68 self.map.insert(feature, fmap);
69 }
70
71 pub fn observe<DZ: Dim>(
76 &mut self,
77 feature: u32,
78 innovation_model: impl Fn(
79 &OVector<N, DL>,
80 &OVector<N, Const<DF>>,
81 ) -> (OVector<N, DZ>, CorrelatedNoise<N, DZ>),
82 hx: &OMatrix<N, DZ, Const<DF>>,
83 ) where
84 DefaultAllocator: Allocator<U1, Const<DF>>
85 + Allocator<DZ, DZ>
86 + Allocator<DZ, Const<DF>>
87 + Allocator<Const<DF>, DZ>
88 + Allocator<DZ>,
89 {
90 let afm = self.map.get_mut(&feature).unwrap();
92
93 let nsamples = self.loc.s.len();
94
95 for si in 0..nsamples {
97 let m1 = &mut afm[si]; let loc = &self.loc.s[si];
100 let (innov, noise) = innovation_model(loc, &m1.x);
102
103 let mut cov = noise.Q.clone();
105 cov.quadform_tr(N::one(), hx, &m1.X, N::one());
106
107 let s_cholesky = cov.clone().cholesky().unwrap();
109 let sinv = s_cholesky.inverse();
110
111 let sinv_diagonal = s_cholesky.l_dirty().iter().step_by(innov.nrows() + 1);
114 let determinate_sinv = sinv_diagonal
115 .fold(N::one(), |prod: N, n: &N| prod * *n)
116 .to_f32()
117 .unwrap();
118 let logl = innov.dot(&(&sinv * &innov)).to_f32().unwrap();
119 self.loc.w[si] *= (logl.exp() * determinate_sinv).powf(-0.5);
120
121 let w = &m1.X * hx.transpose() * &sinv; m1.x += &w * &innov;
124 m1.X.quadform_tr(-N::one(), &w, &cov, N::one());
125 }
126 }
127
128 pub fn forget(&mut self, feature: u32) -> Option<FeatureCondMap<N, Const<DF>>> {
130 self.map.remove(&feature)
131 }
132
133 pub fn update_resample(
138 &mut self,
139 resampler: &mut sir::Resampler,
140 roughener: &mut sir::Roughener<N, DL>,
141 rng: &mut dyn RngCore
142 ) -> Result<(u32, f32), &str> {
143 let (resamples, unique, lcond) = resampler(&mut self.loc.w, rng)?;
145 sir::live_samples(&mut self.loc.s, &resamples);
147 self.loc.w.fill(1.);
149
150 let nsamples = self.loc.s.len();
152 for (_feature, fm) in self.map.iter_mut() {
153 let mut fmr = FeatureCondMap::with_capacity(nsamples); for fmi in fm.iter().enumerate() {
156 for _res in 0..resamples[fmi.0] {
157 fmr.push(fmi.1.clone());
159 }
160 }
161 *fm = fmr; }
163
164 roughener(&mut self.loc.s, rng); Ok((unique, lcond))
166 }
167
168 fn feature_statistics(
183 &self,
184 kstat: &mut KalmanState<N, Dyn>,
185 fs: usize,
186 fm: &FeatureCondMap<N, Const<DF>>,
187 fi: &mut dyn Iterator<Item = &FeatureCondMap<N, Const<DF>>>,
188 ) {
189 let nl = self.loc.s.first().unwrap().nrows(); let nsamples = N::from_usize(self.loc.s.len()).unwrap();
191
192 let mean_f = fm.iter().map(|f| &f.x).sum::<OVector<N, Const<DF>>>() / nsamples;
194 kstat.x.fixed_rows_mut::<DF>(fs).copy_from(&mean_f);
195
196 let nf = mean_f.shape_generic().0;
198 let mut var_f = OMatrix::zeros_generic(nf, nf);
199 for fp in fm.iter() {
200 let xx = &fp.x - &mean_f;
201 var_f += &fp.X + &xx * xx.transpose();
202 }
203 var_f /= nsamples;
204 kstat.X.fixed_view_mut::<DF, DF>(fs, fs).copy_from(&var_f);
205
206 for ls in 0..nl {
208 let mut cov = OVector::zeros_generic(nf, Const::<1>);
209 let mean_si = kstat.x[ls];
210 for fp in fm.iter().enumerate() {
211 cov += (&fp.1.x - &mean_f) * (self.loc.s[fp.0][ls] - mean_si);
212 }
213 cov /= nsamples;
214 kstat.X.fixed_view_mut::<DF, 1>(ls, fs).copy_from(&cov);
215 }
216
217 let mut fjs = nl; for fj in fi {
220 let mut cov = OMatrix::zeros_generic(nf, nf);
221 let mut fpj = fj.iter();
222 let mean_fi = kstat.x.fixed_rows::<DF>(fjs);
223 for fp in fm.iter() {
224 cov += (&fp.x - &mean_f) * (&fpj.next().unwrap().x - &mean_fi).transpose();
225 }
226 cov /= nsamples;
227 kstat.X.fixed_view_mut::<DF, DF>(fjs, fs).copy_from(&cov);
228 fjs += DF;
229 }
230 }
231
232 pub fn statistics_compressed(&self, kstat: &mut KalmanState<N, Dyn>)
240 where
241 DefaultAllocator: Allocator<U1, DL>,
242 {
243 let nl = self.loc.s.first().unwrap().shape_generic().0; assert!(
245 nl.value() + self.map.len() <= kstat.x.nrows(),
246 "kstat to small to hold location and features"
247 );
248
249 kstat.x.fill(N::zero()); kstat.X.fill(N::zero()); let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
254 kstat.x.rows_generic_mut(0, nl).copy_from(&loc_state.x);
255 kstat
256 .X
257 .generic_view_mut((0, 0), (nl, nl))
258 .copy_from(&loc_state.X);
259
260 let mut fs = nl.value(); for f in self.map.iter() {
263 let mut until_fi = self.map.iter().take_while(|it| it.0 != f.0).map(|it| it.1);
264 self.feature_statistics(kstat, fs, f.1, &mut until_fi); fs += DF;
266 }
267 kstat.X.fill_lower_triangle_with_upper_triangle();
268 }
269
270 pub fn statistics_sparse(&self, kstat: &mut KalmanState<N, Dyn>)
278 where
279 DefaultAllocator: Allocator<U1, DL>,
280 {
281 let nl = self.loc.s.first().unwrap().shape_generic().0; assert!(
283 nl.value() + self.map.len() <= kstat.x.nrows(),
284 "kstat to small to hold location and features"
285 );
286
287 kstat.x.fill(N::zero()); kstat.X.fill(N::zero()); let loc_state = KalmanEstimator::<N, DL>::kalman_state(&self.loc).unwrap();
292 kstat.x.rows_generic_mut(0, nl).copy_from(&loc_state.x);
293 kstat
294 .X
295 .generic_view_mut((0, 0), (nl, nl))
296 .copy_from(&loc_state.X);
297
298 let mut ordered_features = self.map.iter().map(|m| *m.0).collect::<Vec<_>>();
300 ordered_features.sort();
301 for fi in ordered_features.iter().cloned() {
302 let fs = nl.value() + DF * fi as usize; let mut until_fi = ordered_features
304 .iter()
305 .take_while(|it| **it != fi)
306 .map(|it| self.map.get(it).unwrap());
307 self.feature_statistics(kstat, fs, self.map.get(&fi).unwrap(), &mut until_fi);
308 }
310 kstat.X.fill_lower_triangle_with_upper_triangle();
311 }
312}