use crate::linalg::allocator::Allocator;
use crate::linalg::{DefaultAllocator, DimName};
use crate::md::trajectory::{InterpState, Traj};
pub use super::estimate::*;
pub use super::kalman::*;
pub use super::measurement::*;
pub use super::residual::*;
pub use super::snc::*;
pub use super::*;
use crate::propagators::error_ctrl::ErrorCtrl;
use crate::propagators::PropInstance;
pub use crate::time::{Duration, Unit};
use crate::State;
use std::convert::TryFrom;
use std::default::Default;
use std::fmt;
use std::marker::PhantomData;
use std::ops::Add;
#[derive(Clone, Copy, Debug)]
pub enum SmoothingArc {
TimeGap(Duration),
Epoch(Epoch),
Prediction,
All,
}
impl fmt::Display for SmoothingArc {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
SmoothingArc::All => write!(f, "all estimates"),
SmoothingArc::Epoch(e) => write!(f, "{}", e),
SmoothingArc::TimeGap(g) => write!(f, "time gap of {}", g),
SmoothingArc::Prediction => write!(f, "first prediction"),
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct IterationConf {
pub smoother: SmoothingArc,
pub absolute_tol: f64,
pub relative_tol: f64,
pub max_iterations: usize,
pub max_divergences: usize,
pub force_failure: bool,
pub use_prefit: bool,
}
impl Default for IterationConf {
fn default() -> Self {
Self {
smoother: SmoothingArc::All,
absolute_tol: 1e-2,
relative_tol: 1e-3,
max_iterations: 15,
max_divergences: 3,
force_failure: false,
use_prefit: false,
}
}
}
impl fmt::Display for IterationConf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let kind = if self.use_prefit { "prefit" } else { "postfit" };
write!(f, "Iterate {} residuals until abs = {:.2e}, or rel = {:.2e}, or {} iterations, or {} subsequent divergences with smoothing condition of {}",
kind,
self.absolute_tol,
self.relative_tol,
self.max_iterations,
self.max_divergences,
self.smoother)
}
}
impl TryFrom<SmoothingArc> for IterationConf {
type Error = NyxError;
fn try_from(smoother: SmoothingArc) -> Result<Self, Self::Error> {
Ok(Self {
smoother,
force_failure: false,
..Default::default()
})
}
}
#[allow(clippy::upper_case_acronyms)]
pub struct ODProcess<
'a,
D: Dynamics,
E: ErrorCtrl,
Msr: Measurement<State = S>,
N: MeasurementDevice<S, Msr>,
T: EkfTrigger,
A: DimName,
S: EstimateFrom<D::StateType>,
K: Filter<S, A, Msr::MeasurementSize>,
> where
D::StateType: Add<OVector<f64, <S as State>::Size>, Output = D::StateType>,
DefaultAllocator: Allocator<f64, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size>
+ Allocator<f64, <S as State>::VecLength>
+ Allocator<f64, <D::StateType as State>::VecLength>
+ Allocator<f64, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, S::Size>
+ Allocator<f64, S::Size>
+ Allocator<f64, Msr::MeasurementSize, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, <D::StateType as State>::Size>
+ Allocator<f64, <D::StateType as State>::Size, Msr::MeasurementSize>
+ Allocator<f64, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<usize, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size, <S as State>::Size>
+ Allocator<f64, A>
+ Allocator<f64, A, A>
+ Allocator<f64, <D::StateType as State>::Size, A>
+ Allocator<f64, A, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size, A>
+ Allocator<f64, A, <S as State>::Size>,
{
pub prop: PropInstance<'a, D, E>,
pub kf: K,
pub devices: Vec<N>,
pub estimates: Vec<K::Estimate>,
pub residuals: Vec<Residual<Msr::MeasurementSize>>,
pub ekf_trigger: T,
simultaneous_msr: bool,
init_state: D::StateType,
_marker: PhantomData<A>,
}
impl<
'a,
D: Dynamics,
E: ErrorCtrl,
Msr: Measurement<State = S>,
N: MeasurementDevice<S, Msr>,
T: EkfTrigger,
A: DimName,
S: EstimateFrom<D::StateType>,
K: Filter<S, A, Msr::MeasurementSize>,
> ODProcess<'a, D, E, Msr, N, T, A, S, K>
where
D::StateType: Add<OVector<f64, <S as State>::Size>, Output = D::StateType>,
DefaultAllocator: Allocator<f64, <D::StateType as State>::Size>
+ Allocator<f64, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, S::Size>
+ Allocator<f64, S::Size>
+ Allocator<usize, S::Size, S::Size>
+ Allocator<f64, Msr::MeasurementSize, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, <D::StateType as State>::Size>
+ Allocator<f64, Msr::MeasurementSize, <S as State>::Size>
+ Allocator<f64, <D::StateType as State>::Size, Msr::MeasurementSize>
+ Allocator<f64, <S as State>::Size, Msr::MeasurementSize>
+ Allocator<f64, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<usize, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<f64, <D::StateType as State>::VecLength>
+ Allocator<f64, A>
+ Allocator<f64, A, A>
+ Allocator<f64, <D::StateType as State>::Size, A>
+ Allocator<f64, A, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size>
+ Allocator<f64, <S as State>::VecLength>
+ Allocator<f64, <S as State>::Size, <S as State>::Size>
+ Allocator<f64, <S as State>::Size, A>
+ Allocator<f64, A, <S as State>::Size>,
{
pub fn ekf(prop: PropInstance<'a, D, E>, kf: K, devices: Vec<N>, trigger: T) -> Self {
let init_state = prop.state;
let mut estimates = Vec::with_capacity(10_001);
estimates.push(kf.previous_estimate().clone());
Self {
prop,
kf,
devices,
estimates,
residuals: Vec::with_capacity(10_000),
ekf_trigger: trigger,
init_state,
simultaneous_msr: false,
_marker: PhantomData::<A>,
}
}
pub fn smooth(&self, condition: SmoothingArc) -> Result<Vec<K::Estimate>, NyxError> {
let l = self.estimates.len() - 1;
info!("Smoothing {} estimates until {}", l + 1, condition);
let mut smoothed = Vec::with_capacity(self.estimates.len());
smoothed.push(self.estimates.last().unwrap().clone());
loop {
let sm_est_kp1 = &self.estimates[l - smoothed.len() + 1];
let x_kp1_l = sm_est_kp1.state_deviation();
let p_kp1_l = sm_est_kp1.covar();
let est_k = &self.estimates[l - smoothed.len()];
let est_kp1 = &self.estimates[l - smoothed.len() + 1];
match condition {
SmoothingArc::Epoch(e) => {
if est_kp1.epoch() < e {
break;
}
}
SmoothingArc::TimeGap(gap_s) => {
if est_kp1.epoch() - est_k.epoch() > gap_s {
break;
}
}
SmoothingArc::Prediction => {
if est_kp1.predicted() {
break;
}
}
SmoothingArc::All => {
}
}
let phi_kp1_k = &est_kp1
.stm()
.clone()
.try_inverse()
.ok_or(NyxError::SingularStateTransitionMatrix)?;
let x_k_l = phi_kp1_k * x_kp1_l;
let p_k_l = phi_kp1_k * p_kp1_l * phi_kp1_k.transpose();
let mut smoothed_est_k = est_k.clone();
smoothed_est_k.set_state_deviation(x_k_l);
smoothed_est_k.set_covar(p_k_l);
smoothed.push(smoothed_est_k);
if smoothed.len() == self.estimates.len() {
break;
}
}
info!(
"Smoothing condition reached after {} estimates ",
smoothed.len()
);
if smoothed.len() < self.estimates.len() {
let mut k = self.estimates.len() - smoothed.len();
loop {
smoothed.push(self.estimates[k].clone());
if k == 0 {
break;
}
k -= 1;
}
}
smoothed.reverse();
Ok(smoothed)
}
pub fn rms_prefit_residual(&self) -> f64 {
let mut sum = 0.0;
for residual in &self.residuals {
let mut msr_noise_item_inv: OVector<f64, Msr::MeasurementSize> =
self.kf.measurement_noise(residual.dt).diagonal().clone();
for i in 0..msr_noise_item_inv.len() {
msr_noise_item_inv[i] = 1.0 / msr_noise_item_inv[i];
}
sum += residual.prefit.dot(&msr_noise_item_inv).powi(2);
}
(sum / (self.estimates.len() as f64)).sqrt()
}
pub fn rms_postfit_residual(&self) -> f64 {
let mut sum = 0.0;
for residual in &self.residuals {
sum += residual.postfit.dot(&residual.postfit);
}
(sum / (self.estimates.len() as f64)).sqrt()
}
pub fn iterate(&mut self, measurements: &[Msr], config: IterationConf) -> Result<(), NyxError> {
let mut best_rms = if config.use_prefit {
self.rms_prefit_residual()
} else {
self.rms_postfit_residual()
};
let mut previous_rms = best_rms;
let mut divergence_cnt = 0;
let mut iter_cnt = 0;
loop {
if best_rms <= config.absolute_tol {
info!("*****************");
info!("*** CONVERGED ***");
info!("*****************");
info!(
"Filter converged to absolute tolerance ({:.2e} < {:.2e}) after {} iterations",
best_rms, config.absolute_tol, iter_cnt
);
return Ok(());
}
iter_cnt += 1;
info!("***************************");
info!("*** Iteration number {} ***", iter_cnt);
info!("***************************");
let smoothed = self.smooth(config.smoother)?;
self.prop.state = self.init_state;
self.estimates = Vec::with_capacity(measurements.len());
self.estimates.push(smoothed[0].clone());
self.kf.set_previous_estimate(&smoothed[0]);
self.process_measurements(measurements)?;
let new_rms = if config.use_prefit {
self.rms_prefit_residual()
} else {
self.rms_postfit_residual()
};
let cur_rel_rms = (new_rms - best_rms).abs() / best_rms;
if cur_rel_rms < config.relative_tol {
info!("*****************");
info!("*** CONVERGED ***");
info!("*****************");
info!(
"New RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}",
new_rms, previous_rms, best_rms
);
info!(
"Filter converged to relative tolerance ({:.2e} < {:.2e}) after {} iterations",
cur_rel_rms, config.relative_tol, iter_cnt
);
return Ok(());
}
if new_rms > previous_rms {
warn!(
"New RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}",
new_rms, previous_rms, best_rms
);
divergence_cnt += 1;
previous_rms = new_rms;
if divergence_cnt >= config.max_divergences {
let msg = format!(
"Filter iterations have continuously diverged {} times: {}",
config.max_divergences, config
);
if config.force_failure {
return Err(NyxError::MaxIterReached(msg));
} else {
error!("{}", msg);
return Ok(());
}
} else {
warn!("Filter iteration caused divergence {} of {} acceptable subsequent divergences", divergence_cnt, config.max_divergences);
}
} else {
info!(
"New RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}",
new_rms, previous_rms, best_rms
);
divergence_cnt = 0;
previous_rms = new_rms;
if previous_rms < best_rms {
best_rms = previous_rms;
}
}
if iter_cnt >= config.max_iterations {
let msg = format!(
"Filter has iterated {} times but failed to reach filter convergence criteria: {}",
config.max_iterations, config
);
if config.force_failure {
return Err(NyxError::MaxIterReached(msg));
} else {
error!("{}", msg);
return Ok(());
}
}
}
}
#[allow(clippy::erasing_op)]
pub fn process_measurements(&mut self, measurements: &[Msr]) -> Result<(), NyxError> {
assert!(
!measurements.is_empty(),
"must have at least one measurement"
);
let num_msrs = measurements.len();
let prop_time = measurements[num_msrs - 1].epoch() - self.kf.previous_estimate().epoch();
info!("Navigation propagating for a total of {}", prop_time);
let mut dt = self.prop.state.epoch();
let mut reported = vec![false; 11];
info!(
"Processing {} measurements with covariance mapping",
num_msrs
);
for (msr_cnt, msr) in measurements.iter().enumerate() {
let next_msr_epoch = msr.epoch();
loop {
let delta_t = next_msr_epoch - dt;
if self.prop.details.step < delta_t {
self.prop.single_step()?;
} else if delta_t.in_seconds() > 0.0 {
self.prop.for_duration(delta_t)?;
}
let nominal_state = S::extract(self.prop.state);
dt = nominal_state.epoch();
if nominal_state.epoch() == next_msr_epoch {
for device in self.devices.iter() {
if let Some(computed_meas) = device.measure(&nominal_state) {
if computed_meas.visible() {
self.kf.update_h_tilde(computed_meas.sensitivity());
if self.kf.is_extended() && self.ekf_trigger.disable_ekf(dt) {
self.kf.set_extended(false);
info!("EKF disabled @ {}", dt);
}
match self.kf.measurement_update(
nominal_state,
&msr.observation(),
&computed_meas.observation(),
) {
Ok((est, res)) => {
trace!("msr update #{} @ {}", msr_cnt, dt);
if self.ekf_trigger.enable_ekf(&est)
&& !self.kf.is_extended()
{
self.kf.set_extended(true);
if !est.within_3sigma() {
warn!("EKF enabled @ {} but filter DIVERGING", dt);
} else {
info!("EKF enabled @ {}", dt);
}
}
if self.kf.is_extended() {
self.prop.state =
self.prop.state + est.state_deviation();
}
self.prop.state.reset_stm();
self.estimates.push(est);
self.residuals.push(res);
}
Err(e) => return Err(e),
}
if !self.simultaneous_msr {
break;
}
}
}
}
let msr_prct = (10.0 * (msr_cnt as f64) / (num_msrs as f64)) as usize;
if !reported[msr_prct] {
info!(
"{:>3}% done ({:.0} measurements processed)",
10 * msr_prct,
msr_cnt
);
reported[msr_prct] = true;
}
break;
} else {
trace!("time update {}", dt);
match self.kf.time_update(nominal_state) {
Ok(est) => {
self.estimates.push(est);
}
Err(e) => return Err(e),
}
self.prop.state.reset_stm();
}
}
}
if !reported[10] {
info!("{:>3}% done ({:.0} measurements processed)", 100, num_msrs);
}
Ok(())
}
pub fn map_covar(&mut self, end_epoch: Epoch) -> Result<(), NyxError> {
let prop_time = end_epoch - self.kf.previous_estimate().epoch();
info!(
"Propagating for {} seconds and mapping covariance",
prop_time
);
loop {
let mut dt = self.prop.state.epoch();
if dt + self.prop.details.step > end_epoch {
self.prop.until_epoch(end_epoch)?;
} else {
self.prop.single_step()?;
}
let prop_state = self.prop.state;
let nominal_state = S::extract(prop_state);
dt = nominal_state.epoch();
trace!("time update {}", dt);
match self.kf.time_update(nominal_state) {
Ok(est) => {
self.estimates.push(est);
}
Err(e) => return Err(e),
}
self.prop.state.reset_stm();
if dt == end_epoch {
break;
}
}
Ok(())
}
pub fn to_traj(&self) -> Result<Traj<S>, NyxError>
where
DefaultAllocator: Allocator<f64, <S as State>::VecLength>,
S: InterpState,
{
if self.estimates.is_empty() {
Err(NyxError::NoStateData(
"No navigation trajectory to generate: run the OD process first".to_string(),
))
} else {
todo!("generating a navigation trajectory #199");
}
}
}
impl<
'a,
D: Dynamics,
E: ErrorCtrl,
Msr: Measurement<State = S>,
N: MeasurementDevice<S, Msr>,
A: DimName,
S: EstimateFrom<D::StateType>,
K: Filter<S, A, Msr::MeasurementSize>,
> ODProcess<'a, D, E, Msr, N, CkfTrigger, A, S, K>
where
D::StateType: Add<OVector<f64, <S as State>::Size>, Output = D::StateType>,
DefaultAllocator: Allocator<f64, <D::StateType as State>::Size>
+ Allocator<f64, <D::StateType as State>::VecLength>
+ Allocator<f64, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, S::Size>
+ Allocator<f64, S::Size>
+ Allocator<f64, Msr::MeasurementSize, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, <D::StateType as State>::Size>
+ Allocator<f64, <D::StateType as State>::Size, Msr::MeasurementSize>
+ Allocator<f64, <S as State>::Size, Msr::MeasurementSize>
+ Allocator<f64, Msr::MeasurementSize, <S as State>::Size>
+ Allocator<f64, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<usize, <D::StateType as State>::Size, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size>
+ Allocator<f64, <S as State>::VecLength>
+ Allocator<f64, <S as State>::Size, <S as State>::Size>
+ Allocator<f64, A>
+ Allocator<f64, A, A>
+ Allocator<f64, <D::StateType as State>::Size, A>
+ Allocator<f64, A, <D::StateType as State>::Size>
+ Allocator<f64, <S as State>::Size, A>
+ Allocator<f64, A, <S as State>::Size>,
{
pub fn ckf(prop: PropInstance<'a, D, E>, kf: K, devices: Vec<N>) -> Self {
let init_state = prop.state;
let mut estimates = Vec::with_capacity(10_001);
estimates.push(kf.previous_estimate().clone());
Self {
prop,
kf,
devices,
simultaneous_msr: false,
estimates,
residuals: Vec::with_capacity(10_000),
ekf_trigger: CkfTrigger {},
init_state,
_marker: PhantomData::<A>,
}
}
}
pub trait EkfTrigger {
fn enable_ekf<T: State, E>(&mut self, est: &E) -> bool
where
E: Estimate<T>,
DefaultAllocator: Allocator<f64, <T as State>::Size>
+ Allocator<f64, <T as State>::VecLength>
+ Allocator<f64, <T as State>::Size, <T as State>::Size>;
fn disable_ekf(&mut self, _epoch: Epoch) -> bool {
false
}
fn interation_config(&self) -> Option<IterationConf> {
None
}
}
pub struct CkfTrigger;
impl EkfTrigger for CkfTrigger {
fn enable_ekf<T: State, E>(&mut self, _est: &E) -> bool
where
E: Estimate<T>,
DefaultAllocator: Allocator<f64, <T as State>::Size>
+ Allocator<f64, <T as State>::VecLength>
+ Allocator<f64, <T as State>::Size, <T as State>::Size>,
{
false
}
}
pub struct StdEkfTrigger {
pub num_msrs: usize,
pub disable_time: Duration,
pub within_sigma: f64,
prev_msr_dt: Option<Epoch>,
cur_msrs: usize,
}
impl StdEkfTrigger {
pub fn new(num_msrs: usize, disable_time: Duration) -> Self {
Self {
num_msrs,
disable_time,
within_sigma: -1.0,
prev_msr_dt: None,
cur_msrs: 0,
}
}
}
impl EkfTrigger for StdEkfTrigger {
fn enable_ekf<T: State, E>(&mut self, est: &E) -> bool
where
E: Estimate<T>,
DefaultAllocator: Allocator<f64, <T as State>::Size>
+ Allocator<f64, <T as State>::VecLength>
+ Allocator<f64, <T as State>::Size, <T as State>::Size>,
{
if !est.predicted() {
self.prev_msr_dt = Some(est.epoch());
}
self.cur_msrs += 1;
self.cur_msrs >= self.num_msrs
&& ((self.within_sigma > 0.0 && est.within_sigma(self.within_sigma))
|| self.within_sigma <= 0.0)
}
fn disable_ekf(&mut self, epoch: Epoch) -> bool {
match self.prev_msr_dt {
Some(prev_dt) => {
if (epoch - prev_dt).abs() > self.disable_time {
self.cur_msrs = 0;
true
} else {
false
}
}
None => false,
}
}
}