1use crate::core::problem::{Problem, VariableEnum};
10use crate::linalg;
11use crate::manifold::ManifoldType;
12use faer::{Mat, MatRef};
13use nalgebra::DVector;
14use std::collections::HashMap;
15use std::time;
16use std::{
17 fmt,
18 fmt::{Display, Formatter},
19};
20use thiserror::Error;
21
22pub mod dog_leg;
23pub mod gauss_newton;
24pub mod levenberg_marquardt;
25pub mod visualization;
26
27pub use dog_leg::DogLeg;
28pub use gauss_newton::GaussNewton;
29pub use levenberg_marquardt::LevenbergMarquardt;
30pub use visualization::OptimizationVisualizer;
31
32#[derive(Default, Clone, Copy, PartialEq, Eq)]
34pub enum OptimizerType {
35 #[default]
37 LevenbergMarquardt,
38 GaussNewton,
40 DogLeg,
42}
43
44impl Display for OptimizerType {
45 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
46 match self {
47 OptimizerType::LevenbergMarquardt => write!(f, "Levenberg-Marquardt"),
48 OptimizerType::GaussNewton => write!(f, "Gauss-Newton"),
49 OptimizerType::DogLeg => write!(f, "Dog Leg"),
50 }
51 }
52}
53
54#[derive(Debug, Clone, Error)]
56pub enum OptimizerError {
57 #[error("Linear system solve failed: {0}")]
59 LinearSolveFailed(String),
60
61 #[error("Maximum iterations ({max_iters}) reached without convergence")]
63 MaxIterationsReached { max_iters: usize },
64
65 #[error("Trust region radius became too small: {radius:.6e} < {min_radius:.6e}")]
67 TrustRegionFailure { radius: f64, min_radius: f64 },
68
69 #[error("Damping parameter became too large: {damping:.6e} > {max_damping:.6e}")]
71 DampingFailure { damping: f64, max_damping: f64 },
72
73 #[error("Cost increased unexpectedly: {old_cost:.6e} -> {new_cost:.6e}")]
75 CostIncrease { old_cost: f64, new_cost: f64 },
76
77 #[error("Jacobian computation failed: {0}")]
79 JacobianFailed(String),
80
81 #[error("Invalid optimization parameters: {0}")]
83 InvalidParameters(String),
84
85 #[error("Numerical instability detected: {0}")]
87 NumericalInstability(String),
88
89 #[error("Linear algebra error: {0}")]
91 LinAlg(#[from] linalg::LinAlgError),
92
93 #[error("Problem has no variables to optimize")]
95 EmptyProblem,
96
97 #[error("Problem has no residual blocks")]
99 NoResidualBlocks,
100}
101
102pub type OptimizerResult<T> = Result<T, OptimizerError>;
104
105#[derive(Debug, Clone)]
122pub struct ConvergenceInfo {
123 pub final_gradient_norm: f64,
125 pub final_parameter_update_norm: f64,
127 pub cost_evaluations: usize,
129 pub jacobian_evaluations: usize,
131}
132
133impl Display for ConvergenceInfo {
134 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
135 write!(
136 f,
137 "Final gradient norm: {:.2e}, Final parameter update norm: {:.2e}, Cost evaluations: {}, Jacobian evaluations: {}",
138 self.final_gradient_norm,
139 self.final_parameter_update_norm,
140 self.cost_evaluations,
141 self.jacobian_evaluations
142 )
143 }
144}
145
146#[derive(Debug, Clone, PartialEq, Eq)]
148pub enum OptimizationStatus {
149 Converged,
151 MaxIterationsReached,
153 CostToleranceReached,
155 ParameterToleranceReached,
157 GradientToleranceReached,
159 NumericalFailure,
161 UserTerminated,
163 Timeout,
165 TrustRegionRadiusTooSmall,
167 MinCostThresholdReached,
169 IllConditionedJacobian,
171 InvalidNumericalValues,
173 Failed(String),
175}
176
177impl Display for OptimizationStatus {
178 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
179 match self {
180 OptimizationStatus::Converged => write!(f, "Converged"),
181 OptimizationStatus::MaxIterationsReached => write!(f, "Maximum iterations reached"),
182 OptimizationStatus::CostToleranceReached => write!(f, "Cost tolerance reached"),
183 OptimizationStatus::ParameterToleranceReached => {
184 write!(f, "Parameter tolerance reached")
185 }
186 OptimizationStatus::GradientToleranceReached => write!(f, "Gradient tolerance reached"),
187 OptimizationStatus::NumericalFailure => write!(f, "Numerical failure"),
188 OptimizationStatus::UserTerminated => write!(f, "User terminated"),
189 OptimizationStatus::Timeout => write!(f, "Timeout"),
190 OptimizationStatus::TrustRegionRadiusTooSmall => {
191 write!(f, "Trust region radius too small")
192 }
193 OptimizationStatus::MinCostThresholdReached => {
194 write!(f, "Minimum cost threshold reached")
195 }
196 OptimizationStatus::IllConditionedJacobian => {
197 write!(f, "Ill-conditioned Jacobian matrix")
198 }
199 OptimizationStatus::InvalidNumericalValues => {
200 write!(f, "Invalid numerical values (NaN/Inf) detected")
201 }
202 OptimizationStatus::Failed(msg) => write!(f, "Failed: {msg}"),
203 }
204 }
205}
206
207#[derive(Clone)]
209pub struct SolverResult<T> {
210 pub parameters: T,
212 pub status: OptimizationStatus,
214 pub initial_cost: f64,
216 pub final_cost: f64,
218 pub iterations: usize,
220 pub elapsed_time: time::Duration,
222 pub convergence_info: Option<ConvergenceInfo>,
224 pub covariances: Option<HashMap<String, Mat<f64>>>,
232}
233
234pub trait Solver {
236 type Config;
238 type Error;
240
241 fn new() -> Self;
243
244 fn optimize(
246 &mut self,
247 problem: &Problem,
248 initial_params: &HashMap<String, (ManifoldType, DVector<f64>)>,
249 ) -> Result<SolverResult<HashMap<String, VariableEnum>>, Self::Error>;
250}
251
252pub fn apply_parameter_step(
271 variables: &mut HashMap<String, VariableEnum>,
272 step: MatRef<f64>,
273 variable_order: &[String],
274) -> f64 {
275 let mut step_offset = 0;
276
277 for var_name in variable_order {
278 if let Some(var) = variables.get_mut(var_name) {
279 let var_size = var.get_size();
280 let var_step = step.subrows(step_offset, var_size);
281
282 var.apply_tangent_step(var_step);
285
286 step_offset += var_size;
287 }
288 }
289
290 step.norm_l2()
292}
293
294pub fn apply_negative_parameter_step(
305 variables: &mut HashMap<String, VariableEnum>,
306 step: MatRef<f64>,
307 variable_order: &[String],
308) {
309 let mut negative_step = Mat::zeros(step.nrows(), 1);
311 for i in 0..step.nrows() {
312 negative_step[(i, 0)] = -step[(i, 0)];
313 }
314
315 apply_parameter_step(variables, negative_step.as_ref(), variable_order);
317}
318
319pub fn compute_cost(residual: &Mat<f64>) -> f64 {
320 let cost = residual.norm_l2();
321 0.5 * cost * cost
322}