bayes_estimate/estimators/
information_root.rs

1#![allow(non_snake_case)]
2
3//! Information 'square root' state estimation.
4//!
5//! A discrete Bayesian estimator that uses a linear information root representation [`InformationRootState`] of the system for estimation.
6//!
7//! The linear representation can also be used for non-linear systems by using linearised models.
8
9use nalgebra::{
10    allocator::Allocator, Const, DefaultAllocator, Dim, DimAdd, DimMin, DimMinimum, DimSum, U1,
11};
12use nalgebra::{OMatrix, OVector, QR, RealField};
13
14use crate::linalg::cholesky::UDU;
15use crate::linalg::rcond;
16use crate::models::{
17    Estimator, ExtendedLinearObserver, InformationState, KalmanEstimator, KalmanState,
18};
19use crate::noise::{CorrelatedNoise, CoupledNoise};
20
21/// Information Root State.
22///
23/// Linear representation as an information root state vector and the information root (upper triangular) matrix.
24/// For a given [KalmanState] the information root state inverse(R).inverse(R)' == X, r == R.x
25/// For a given [InformationState] the information root state R'.R == I, r == inverse(R).i
26#[derive(PartialEq, Clone)]
27pub struct InformationRootState<N: RealField, D: Dim>
28where
29    DefaultAllocator: Allocator<D, D> + Allocator<D>,
30{
31    /// Information root state vector
32    pub r: OVector<N, D>,
33    /// Information root matrix (upper triangular)
34    pub R: OMatrix<N, D, D>,
35}
36
37impl<N: Copy + RealField, D: Dim> TryFrom<InformationState<N, D>> for InformationRootState<N, D>
38where
39    DefaultAllocator: Allocator<D, D> + Allocator<D>,
40{
41    type Error = &'static str;
42
43    fn try_from(state: InformationState<N, D>) -> Result<Self, Self::Error> {
44        // Information Root, R'.R = I
45        let R = state.I.clone().cholesky().ok_or("I not PD")?
46            .l().transpose();
47
48        // Information Root state, r=inv(R)'.i
49        let shape = R.shape_generic();
50        let mut RI = OMatrix::identity_generic(shape.0, shape.1);
51        R.solve_upper_triangular_mut(&mut RI);
52        let r = RI.tr_mul(&state.i);
53        Ok(InformationRootState { r, R })
54    }
55}
56
57impl<N: Copy + RealField, D: Dim> InformationRootState<N, D>
58    where
59        DefaultAllocator: Allocator<D, D> + Allocator<D>,
60{
61    pub fn information_state<'e>(&self) -> Result<InformationState<N, D>, &'e str> {
62        // Information, I = R.R'
63        let I = self.R.tr_mul(&self.R);
64        let x = self.state()?;
65        // Information state, i = I.x
66        let i = &I * x;
67
68        Ok(InformationState { i, I })
69    }
70}
71
72impl<N: Copy + RealField, D: Dim> Estimator<N, D> for InformationRootState<N, D>
73where
74    DefaultAllocator: Allocator<D, D> + Allocator<D>,
75{
76    fn state<'e>(&self) -> Result<OVector<N, D>, &'e str> {
77        self.kalman_state().map(|res| res.x)
78    }
79}
80
81impl<N: Copy + RealField, D: Dim> TryFrom<KalmanState<N, D>> for InformationRootState<N, D>
82    where
83        DefaultAllocator: Allocator<D, D> + Allocator<D>,
84{
85    type Error = &'static str;
86
87    fn try_from(state: KalmanState<N, D>) -> Result<Self, Self::Error> {
88        // Information Root, inv(R).inv(R)' = X
89        let udu = UDU::new();
90        let mut R = state.X.clone();
91        let rcond = udu.UCfactor_n(&mut R, state.X.nrows());
92        rcond::check_positive(rcond, "X not PD")?;
93        udu.UTinverse(&mut R).unwrap(); // check_positive should prevent singular
94
95        // Information Root state, r=R*x
96        let r = &R * &state.x;
97        Ok( InformationRootState { r, R } )
98    }
99}
100
101impl<N: Copy + RealField, D: Dim> KalmanEstimator<N, D> for InformationRootState<N, D>
102where
103    DefaultAllocator: Allocator<D, D> + Allocator<D>,
104{
105    fn kalman_state<'e>(&self) -> Result<KalmanState<N, D>, &'e str> {
106        let shape = self.R.shape_generic();
107        let mut RI = OMatrix::identity_generic(shape.0, shape.1);
108        self.R.solve_upper_triangular_mut(&mut RI);
109
110        // Covariance X = inv(R).inv(R)'
111        let X = &RI * &RI.transpose();
112        // State, x= inv(R).r
113        let x = RI * &self.r;
114
115        Ok(KalmanState { x, X })
116    }
117}
118
119impl<N: Copy + RealField, D: Dim, ZD: Dim> ExtendedLinearObserver<N, D, ZD>
120    for InformationRootState<N, D>
121where
122    DefaultAllocator: Allocator<D, D>
123        + Allocator<ZD, D>
124        + Allocator<ZD, ZD>
125        + Allocator<D>
126        + Allocator<ZD>,
127    D: DimAdd<ZD> + DimAdd<U1>,
128    DefaultAllocator: Allocator<DimSum<D, ZD>, DimSum<D, U1>> + Allocator<DimSum<D, ZD>>,
129    DimSum<D, ZD>: DimMin<DimSum<D, U1>>,
130    DefaultAllocator: Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>>
131        + Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>, DimSum<D, U1>>,
132{
133    fn observe_innovation<'e>(
134        &mut self,
135        s: &OVector<N, ZD>,
136        hx: &OMatrix<N, ZD, D>,
137        noise: &CorrelatedNoise<N, ZD>,
138    ) -> Result<(), &'e str> {
139        let udu = UDU::new();
140        let mut QI = noise.Q.clone();
141        let rcond = udu.UCfactor_n(&mut QI, s.nrows());
142        rcond::check_positive(rcond, "Q not PD")?;
143        udu.UTinverse(&mut QI).unwrap(); // check_positive should prevent singular
144
145        let x = self.state()?; // state is always defined
146        self.observe_info(&(s + hx * x), hx, &QI)
147    }
148}
149
150impl<N: Copy + RealField, D: Dim> InformationRootState<N, D>
151where
152    DefaultAllocator: Allocator<D, D> + Allocator<D>,
153{
154    pub fn predict<QD: Dim>(
155        &mut self,
156        x_pred: &OVector<N, D>,
157        fx: &OMatrix<N, D, D>,
158        noise: &CoupledNoise<N, D, QD>,
159    ) -> Result<(), &'static str>
160    where
161        D: DimAdd<QD>,
162        DefaultAllocator: Allocator<DimSum<D, QD>, DimSum<D, QD>>
163            + Allocator<DimSum<D, QD>>
164            + Allocator<D, QD>
165            + Allocator<QD>,
166        DimSum<D, QD>: DimMin<DimSum<D, QD>>,
167        DefaultAllocator: Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>>
168            + Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>, DimSum<D, QD>>,
169    {
170        let mut Fx_inv = fx.clone();
171        let invertible = Fx_inv.try_inverse_mut();
172        if !invertible {
173            return Err("Fx not invertible")?;
174        }
175
176        self.predict_inv_model(x_pred, &Fx_inv, noise)
177            .map(|_rcond| {})
178    }
179
180    pub fn predict_inv_model<QD: Dim>(
181        &mut self,
182        x_pred: &OVector<N, D>,
183        fx_inv: &OMatrix<N, D, D>, // Inverse of linear prediction model Fx
184        noise: &CoupledNoise<N, D, QD>,
185    ) -> Result<N, &'static str>
186    where
187        D: DimAdd<QD>,
188        DefaultAllocator: Allocator<DimSum<D, QD>, DimSum<D, QD>>
189            + Allocator<DimSum<D, QD>>
190            + Allocator<D, QD>
191            + Allocator<QD>,
192        DimSum<D, QD>: DimMin<DimSum<D, QD>>,
193        DefaultAllocator: Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>>
194            + Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>, DimSum<D, QD>>,
195    {
196        // Require Root of correlated predict noise (maybe semi-definite)
197        let mut Gqr = noise.G.clone();
198
199        for qi in 0..noise.q.nrows() {
200            let mut ZZ = Gqr.column_mut(qi);
201            ZZ *= noise.q[qi].sqrt();
202        }
203
204        // Form Augmented matrix for factorisation
205        let dqd = noise.G.shape_generic().0.add(noise.q.shape_generic().0);
206        let mut A = OMatrix::identity_generic(dqd, dqd); // Prefill with identity for top left and zero's in off diagonals
207        let RFxI: OMatrix<N, D, D> = &self.R * fx_inv;
208        let x: OMatrix<N, D, QD> = &RFxI * &Gqr;
209        let x_size = x_pred.shape_generic().0;
210        let q_size = noise.q.shape_generic().0;
211        A.generic_view_mut((q_size.value(), 0), (x_size, q_size))
212            .copy_from(&x);
213        A.generic_view_mut((q_size.value(), q_size.value()), (x_size, x_size))
214            .copy_from(&RFxI);
215        A.generic_view_mut((q_size.value(), 0), (x_size, q_size))
216            .copy_from(&x);
217        A.generic_view_mut((q_size.value(), q_size.value()), (x_size, x_size))
218            .copy_from(&RFxI);
219
220        // Calculate factorisation so we have and upper triangular R
221        let qr = QR::new(A);
222        // Extract the roots
223        let r = qr.r();
224        self.R
225            .copy_from(&r.generic_view((q_size.value(), q_size.value()), (x_size, x_size)));
226
227        self.r = &self.R * x_pred; // compute r from x_pred
228
229        Ok(UDU::new().UCrcond(&self.R)) // compute rcond of result
230    }
231
232    pub fn observe_info<'e, ZD: Dim>(
233        &mut self,
234        z: &OVector<N, ZD>,
235        hx: &OMatrix<N, ZD, D>,
236        noise_inv: &OMatrix<N, ZD, ZD>, // Inverse of correlated noise model
237    ) -> Result<(), &'e str>
238    where
239        DefaultAllocator: Allocator<D, D>
240            + Allocator<ZD, D>
241            + Allocator<ZD, ZD>
242            + Allocator<D>
243            + Allocator<ZD>,
244        D: DimAdd<ZD> + DimAdd<U1>,
245        DefaultAllocator: Allocator<DimSum<D, ZD>, DimSum<D, U1>> + Allocator<DimSum<D, ZD>>,
246        DimSum<D, ZD>: DimMin<DimSum<D, U1>>,
247        DefaultAllocator: Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>>
248            + Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>, DimSum<D, U1>>,
249    {
250        let x_size = self.r.shape_generic().0;
251        let z_size = z.shape_generic().0;
252        // Size consistency, z to model
253        if z_size != hx.shape_generic().0 {
254            return Err("observation and model size inconsistent");
255        }
256
257        // Form augmented matrix for factorisation
258        let xd = self.r.shape_generic().0;
259        // Prefill with identity for top left and zero's in off diagonals
260        let mut A = OMatrix::identity_generic(xd.add(z.shape_generic().0), xd.add(Const::<1>));
261        A.generic_view_mut((0, 0), (x_size, x_size))
262            .copy_from(&self.R);
263        A.generic_view_mut((0, x_size.value()), (x_size, Const::<1>))
264            .copy_from(&self.r);
265        A.generic_view_mut((x_size.value(), 0), (z_size, x_size))
266            .copy_from(&(noise_inv * hx));
267        A.generic_view_mut((x_size.value(), x_size.value()), (z_size, Const::<1>))
268            .copy_from(&(noise_inv * z));
269
270        // Calculate factorisation so we have and upper triangular R
271        let qr = QR::new(A);
272        // Extract the roots
273        let r = qr.r();
274
275        // Extract the roots
276        self.R.copy_from(&r.generic_view((0, 0), (x_size, x_size)));
277        self.r
278            .copy_from(&r.generic_view((0, x_size.value()), (x_size, Const::<1>)));
279
280        Ok(())
281    }
282}