1#![allow(non_snake_case)]
2
3use nalgebra::{
10 allocator::Allocator, Const, DefaultAllocator, Dim, DimAdd, DimMin, DimMinimum, DimSum, U1,
11};
12use nalgebra::{OMatrix, OVector, QR, RealField};
13
14use crate::linalg::cholesky::UDU;
15use crate::linalg::rcond;
16use crate::models::{
17 Estimator, ExtendedLinearObserver, InformationState, KalmanEstimator, KalmanState,
18};
19use crate::noise::{CorrelatedNoise, CoupledNoise};
20
21#[derive(PartialEq, Clone)]
27pub struct InformationRootState<N: RealField, D: Dim>
28where
29 DefaultAllocator: Allocator<D, D> + Allocator<D>,
30{
31 pub r: OVector<N, D>,
33 pub R: OMatrix<N, D, D>,
35}
36
37impl<N: Copy + RealField, D: Dim> TryFrom<InformationState<N, D>> for InformationRootState<N, D>
38where
39 DefaultAllocator: Allocator<D, D> + Allocator<D>,
40{
41 type Error = &'static str;
42
43 fn try_from(state: InformationState<N, D>) -> Result<Self, Self::Error> {
44 let R = state.I.clone().cholesky().ok_or("I not PD")?
46 .l().transpose();
47
48 let shape = R.shape_generic();
50 let mut RI = OMatrix::identity_generic(shape.0, shape.1);
51 R.solve_upper_triangular_mut(&mut RI);
52 let r = RI.tr_mul(&state.i);
53 Ok(InformationRootState { r, R })
54 }
55}
56
57impl<N: Copy + RealField, D: Dim> InformationRootState<N, D>
58 where
59 DefaultAllocator: Allocator<D, D> + Allocator<D>,
60{
61 pub fn information_state<'e>(&self) -> Result<InformationState<N, D>, &'e str> {
62 let I = self.R.tr_mul(&self.R);
64 let x = self.state()?;
65 let i = &I * x;
67
68 Ok(InformationState { i, I })
69 }
70}
71
72impl<N: Copy + RealField, D: Dim> Estimator<N, D> for InformationRootState<N, D>
73where
74 DefaultAllocator: Allocator<D, D> + Allocator<D>,
75{
76 fn state<'e>(&self) -> Result<OVector<N, D>, &'e str> {
77 self.kalman_state().map(|res| res.x)
78 }
79}
80
81impl<N: Copy + RealField, D: Dim> TryFrom<KalmanState<N, D>> for InformationRootState<N, D>
82 where
83 DefaultAllocator: Allocator<D, D> + Allocator<D>,
84{
85 type Error = &'static str;
86
87 fn try_from(state: KalmanState<N, D>) -> Result<Self, Self::Error> {
88 let udu = UDU::new();
90 let mut R = state.X.clone();
91 let rcond = udu.UCfactor_n(&mut R, state.X.nrows());
92 rcond::check_positive(rcond, "X not PD")?;
93 udu.UTinverse(&mut R).unwrap(); let r = &R * &state.x;
97 Ok( InformationRootState { r, R } )
98 }
99}
100
101impl<N: Copy + RealField, D: Dim> KalmanEstimator<N, D> for InformationRootState<N, D>
102where
103 DefaultAllocator: Allocator<D, D> + Allocator<D>,
104{
105 fn kalman_state<'e>(&self) -> Result<KalmanState<N, D>, &'e str> {
106 let shape = self.R.shape_generic();
107 let mut RI = OMatrix::identity_generic(shape.0, shape.1);
108 self.R.solve_upper_triangular_mut(&mut RI);
109
110 let X = &RI * &RI.transpose();
112 let x = RI * &self.r;
114
115 Ok(KalmanState { x, X })
116 }
117}
118
119impl<N: Copy + RealField, D: Dim, ZD: Dim> ExtendedLinearObserver<N, D, ZD>
120 for InformationRootState<N, D>
121where
122 DefaultAllocator: Allocator<D, D>
123 + Allocator<ZD, D>
124 + Allocator<ZD, ZD>
125 + Allocator<D>
126 + Allocator<ZD>,
127 D: DimAdd<ZD> + DimAdd<U1>,
128 DefaultAllocator: Allocator<DimSum<D, ZD>, DimSum<D, U1>> + Allocator<DimSum<D, ZD>>,
129 DimSum<D, ZD>: DimMin<DimSum<D, U1>>,
130 DefaultAllocator: Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>>
131 + Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>, DimSum<D, U1>>,
132{
133 fn observe_innovation<'e>(
134 &mut self,
135 s: &OVector<N, ZD>,
136 hx: &OMatrix<N, ZD, D>,
137 noise: &CorrelatedNoise<N, ZD>,
138 ) -> Result<(), &'e str> {
139 let udu = UDU::new();
140 let mut QI = noise.Q.clone();
141 let rcond = udu.UCfactor_n(&mut QI, s.nrows());
142 rcond::check_positive(rcond, "Q not PD")?;
143 udu.UTinverse(&mut QI).unwrap(); let x = self.state()?; self.observe_info(&(s + hx * x), hx, &QI)
147 }
148}
149
150impl<N: Copy + RealField, D: Dim> InformationRootState<N, D>
151where
152 DefaultAllocator: Allocator<D, D> + Allocator<D>,
153{
154 pub fn predict<QD: Dim>(
155 &mut self,
156 x_pred: &OVector<N, D>,
157 fx: &OMatrix<N, D, D>,
158 noise: &CoupledNoise<N, D, QD>,
159 ) -> Result<(), &'static str>
160 where
161 D: DimAdd<QD>,
162 DefaultAllocator: Allocator<DimSum<D, QD>, DimSum<D, QD>>
163 + Allocator<DimSum<D, QD>>
164 + Allocator<D, QD>
165 + Allocator<QD>,
166 DimSum<D, QD>: DimMin<DimSum<D, QD>>,
167 DefaultAllocator: Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>>
168 + Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>, DimSum<D, QD>>,
169 {
170 let mut Fx_inv = fx.clone();
171 let invertible = Fx_inv.try_inverse_mut();
172 if !invertible {
173 return Err("Fx not invertible")?;
174 }
175
176 self.predict_inv_model(x_pred, &Fx_inv, noise)
177 .map(|_rcond| {})
178 }
179
180 pub fn predict_inv_model<QD: Dim>(
181 &mut self,
182 x_pred: &OVector<N, D>,
183 fx_inv: &OMatrix<N, D, D>, noise: &CoupledNoise<N, D, QD>,
185 ) -> Result<N, &'static str>
186 where
187 D: DimAdd<QD>,
188 DefaultAllocator: Allocator<DimSum<D, QD>, DimSum<D, QD>>
189 + Allocator<DimSum<D, QD>>
190 + Allocator<D, QD>
191 + Allocator<QD>,
192 DimSum<D, QD>: DimMin<DimSum<D, QD>>,
193 DefaultAllocator: Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>>
194 + Allocator<DimMinimum<DimSum<D, QD>, DimSum<D, QD>>, DimSum<D, QD>>,
195 {
196 let mut Gqr = noise.G.clone();
198
199 for qi in 0..noise.q.nrows() {
200 let mut ZZ = Gqr.column_mut(qi);
201 ZZ *= noise.q[qi].sqrt();
202 }
203
204 let dqd = noise.G.shape_generic().0.add(noise.q.shape_generic().0);
206 let mut A = OMatrix::identity_generic(dqd, dqd); let RFxI: OMatrix<N, D, D> = &self.R * fx_inv;
208 let x: OMatrix<N, D, QD> = &RFxI * &Gqr;
209 let x_size = x_pred.shape_generic().0;
210 let q_size = noise.q.shape_generic().0;
211 A.generic_view_mut((q_size.value(), 0), (x_size, q_size))
212 .copy_from(&x);
213 A.generic_view_mut((q_size.value(), q_size.value()), (x_size, x_size))
214 .copy_from(&RFxI);
215 A.generic_view_mut((q_size.value(), 0), (x_size, q_size))
216 .copy_from(&x);
217 A.generic_view_mut((q_size.value(), q_size.value()), (x_size, x_size))
218 .copy_from(&RFxI);
219
220 let qr = QR::new(A);
222 let r = qr.r();
224 self.R
225 .copy_from(&r.generic_view((q_size.value(), q_size.value()), (x_size, x_size)));
226
227 self.r = &self.R * x_pred; Ok(UDU::new().UCrcond(&self.R)) }
231
232 pub fn observe_info<'e, ZD: Dim>(
233 &mut self,
234 z: &OVector<N, ZD>,
235 hx: &OMatrix<N, ZD, D>,
236 noise_inv: &OMatrix<N, ZD, ZD>, ) -> Result<(), &'e str>
238 where
239 DefaultAllocator: Allocator<D, D>
240 + Allocator<ZD, D>
241 + Allocator<ZD, ZD>
242 + Allocator<D>
243 + Allocator<ZD>,
244 D: DimAdd<ZD> + DimAdd<U1>,
245 DefaultAllocator: Allocator<DimSum<D, ZD>, DimSum<D, U1>> + Allocator<DimSum<D, ZD>>,
246 DimSum<D, ZD>: DimMin<DimSum<D, U1>>,
247 DefaultAllocator: Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>>
248 + Allocator<DimMinimum<DimSum<D, ZD>, DimSum<D, U1>>, DimSum<D, U1>>,
249 {
250 let x_size = self.r.shape_generic().0;
251 let z_size = z.shape_generic().0;
252 if z_size != hx.shape_generic().0 {
254 return Err("observation and model size inconsistent");
255 }
256
257 let xd = self.r.shape_generic().0;
259 let mut A = OMatrix::identity_generic(xd.add(z.shape_generic().0), xd.add(Const::<1>));
261 A.generic_view_mut((0, 0), (x_size, x_size))
262 .copy_from(&self.R);
263 A.generic_view_mut((0, x_size.value()), (x_size, Const::<1>))
264 .copy_from(&self.r);
265 A.generic_view_mut((x_size.value(), 0), (z_size, x_size))
266 .copy_from(&(noise_inv * hx));
267 A.generic_view_mut((x_size.value(), x_size.value()), (z_size, Const::<1>))
268 .copy_from(&(noise_inv * z));
269
270 let qr = QR::new(A);
272 let r = qr.r();
274
275 self.R.copy_from(&r.generic_view((0, 0), (x_size, x_size)));
277 self.r
278 .copy_from(&r.generic_view((0, x_size.value()), (x_size, Const::<1>)));
279
280 Ok(())
281 }
282}