1use crate::core::problem;
10use crate::linalg;
11use crate::manifold;
12use faer;
13use nalgebra;
14use std::collections;
15use std::fmt;
16use std::time;
17use thiserror;
18
19pub mod dog_leg;
20pub mod gauss_newton;
21pub mod levenberg_marquardt;
22pub mod visualization;
23
24pub use dog_leg::DogLeg;
25pub use gauss_newton::GaussNewton;
26pub use levenberg_marquardt::LevenbergMarquardt;
27pub use visualization::OptimizationVisualizer;
28
29#[derive(Default, Clone, Copy, PartialEq, Eq)]
31pub enum OptimizerType {
32 #[default]
34 LevenbergMarquardt,
35 GaussNewton,
37 DogLeg,
39}
40
41impl fmt::Display for OptimizerType {
42 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
43 match self {
44 OptimizerType::LevenbergMarquardt => write!(f, "Levenberg-Marquardt"),
45 OptimizerType::GaussNewton => write!(f, "Gauss-Newton"),
46 OptimizerType::DogLeg => write!(f, "Dog Leg"),
47 }
48 }
49}
50
51#[derive(Debug, Clone, thiserror::Error)]
53pub enum OptimizerError {
54 #[error("Linear system solve failed: {0}")]
56 LinearSolveFailed(String),
57
58 #[error("Maximum iterations ({max_iters}) reached without convergence")]
60 MaxIterationsReached { max_iters: usize },
61
62 #[error("Trust region radius became too small: {radius:.6e} < {min_radius:.6e}")]
64 TrustRegionFailure { radius: f64, min_radius: f64 },
65
66 #[error("Damping parameter became too large: {damping:.6e} > {max_damping:.6e}")]
68 DampingFailure { damping: f64, max_damping: f64 },
69
70 #[error("Cost increased unexpectedly: {old_cost:.6e} -> {new_cost:.6e}")]
72 CostIncrease { old_cost: f64, new_cost: f64 },
73
74 #[error("Jacobian computation failed: {0}")]
76 JacobianFailed(String),
77
78 #[error("Invalid optimization parameters: {0}")]
80 InvalidParameters(String),
81
82 #[error("Numerical instability detected: {0}")]
84 NumericalInstability(String),
85
86 #[error("Linear algebra error: {0}")]
88 LinAlg(#[from] linalg::LinAlgError),
89
90 #[error("Problem has no variables to optimize")]
92 EmptyProblem,
93
94 #[error("Problem has no residual blocks")]
96 NoResidualBlocks,
97}
98
99pub type OptimizerResult<T> = Result<T, OptimizerError>;
101
102#[derive(Debug, Clone)]
119pub struct ConvergenceInfo {
120 pub final_gradient_norm: f64,
122 pub final_parameter_update_norm: f64,
124 pub cost_evaluations: usize,
126 pub jacobian_evaluations: usize,
128}
129
130impl fmt::Display for ConvergenceInfo {
131 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
132 write!(
133 f,
134 "Final gradient norm: {:.2e}, Final parameter update norm: {:.2e}, Cost evaluations: {}, Jacobian evaluations: {}",
135 self.final_gradient_norm,
136 self.final_parameter_update_norm,
137 self.cost_evaluations,
138 self.jacobian_evaluations
139 )
140 }
141}
142
143#[derive(Debug, Clone, PartialEq, Eq)]
145pub enum OptimizationStatus {
146 Converged,
148 MaxIterationsReached,
150 CostToleranceReached,
152 ParameterToleranceReached,
154 GradientToleranceReached,
156 NumericalFailure,
158 UserTerminated,
160 Timeout,
162 TrustRegionRadiusTooSmall,
164 MinCostThresholdReached,
166 IllConditionedJacobian,
168 InvalidNumericalValues,
170 Failed(String),
172}
173
174impl fmt::Display for OptimizationStatus {
175 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
176 match self {
177 OptimizationStatus::Converged => write!(f, "Converged"),
178 OptimizationStatus::MaxIterationsReached => write!(f, "Maximum iterations reached"),
179 OptimizationStatus::CostToleranceReached => write!(f, "Cost tolerance reached"),
180 OptimizationStatus::ParameterToleranceReached => {
181 write!(f, "Parameter tolerance reached")
182 }
183 OptimizationStatus::GradientToleranceReached => write!(f, "Gradient tolerance reached"),
184 OptimizationStatus::NumericalFailure => write!(f, "Numerical failure"),
185 OptimizationStatus::UserTerminated => write!(f, "User terminated"),
186 OptimizationStatus::Timeout => write!(f, "Timeout"),
187 OptimizationStatus::TrustRegionRadiusTooSmall => {
188 write!(f, "Trust region radius too small")
189 }
190 OptimizationStatus::MinCostThresholdReached => {
191 write!(f, "Minimum cost threshold reached")
192 }
193 OptimizationStatus::IllConditionedJacobian => {
194 write!(f, "Ill-conditioned Jacobian matrix")
195 }
196 OptimizationStatus::InvalidNumericalValues => {
197 write!(f, "Invalid numerical values (NaN/Inf) detected")
198 }
199 OptimizationStatus::Failed(msg) => write!(f, "Failed: {msg}"),
200 }
201 }
202}
203
204#[derive(Clone)]
206pub struct SolverResult<T> {
207 pub parameters: T,
209 pub status: OptimizationStatus,
211 pub initial_cost: f64,
213 pub final_cost: f64,
215 pub iterations: usize,
217 pub elapsed_time: time::Duration,
219 pub convergence_info: Option<ConvergenceInfo>,
221 pub covariances: Option<std::collections::HashMap<String, faer::Mat<f64>>>,
229}
230
231pub trait Solver {
233 type Config;
235 type Error;
237
238 fn new() -> Self;
240
241 fn optimize(
243 &mut self,
244 problem: &problem::Problem,
245 initial_params: &collections::HashMap<
246 String,
247 (manifold::ManifoldType, nalgebra::DVector<f64>),
248 >,
249 ) -> Result<SolverResult<collections::HashMap<String, problem::VariableEnum>>, Self::Error>;
250}
251
252pub fn apply_parameter_step(
271 variables: &mut collections::HashMap<String, problem::VariableEnum>,
272 step: faer::MatRef<f64>,
273 variable_order: &[String],
274) -> f64 {
275 let mut step_offset = 0;
276
277 for var_name in variable_order {
278 if let Some(var) = variables.get_mut(var_name) {
279 let var_size = var.get_size();
280 let var_step = step.subrows(step_offset, var_size);
281
282 var.apply_tangent_step(var_step);
285
286 step_offset += var_size;
287 }
288 }
289
290 step.norm_l2()
292}
293
294pub fn apply_negative_parameter_step(
305 variables: &mut collections::HashMap<String, problem::VariableEnum>,
306 step: faer::MatRef<f64>,
307 variable_order: &[String],
308) {
309 use faer::Mat;
310
311 let mut negative_step = Mat::zeros(step.nrows(), 1);
313 for i in 0..step.nrows() {
314 negative_step[(i, 0)] = -step[(i, 0)];
315 }
316
317 apply_parameter_step(variables, negative_step.as_ref(), variable_order);
319}
320
321pub fn compute_cost(residual: &faer::Mat<f64>) -> f64 {
322 let cost = residual.norm_l2();
323 0.5 * cost * cost
324}