tiny_solver/optimizer/
levenberg_marquardt_optimizer.rs

1use log::trace;
2use std::ops::Mul;
3use std::{collections::HashMap, time::Instant};
4
5use faer::sparse::Triplet;
6use faer_ext::IntoNalgebra;
7
8use crate::common::OptimizerOptions;
9use crate::linear;
10use crate::optimizer;
11use crate::parameter_block::ParameterBlock;
12use crate::sparse::LinearSolverType;
13use crate::sparse::SparseLinearSolver;
14
15const DEFAULT_MIN_DIAGONAL: f64 = 1e-6;
16const DEFAULT_MAX_DIAGONAL: f64 = 1e32;
17const DEFAULT_INITIAL_TRUST_REGION_RADIUS: f64 = 1e4;
18
19#[derive(Debug)]
20pub struct LevenbergMarquardtOptimizer {
21    min_diagonal: f64,
22    max_diagonal: f64,
23    initial_trust_region_radius: f64,
24}
25
26impl LevenbergMarquardtOptimizer {
27    pub fn new(min_diagonal: f64, max_diagonal: f64, initial_trust_region_radius: f64) -> Self {
28        Self {
29            min_diagonal,
30            max_diagonal,
31            initial_trust_region_radius,
32        }
33    }
34}
35
36impl Default for LevenbergMarquardtOptimizer {
37    fn default() -> Self {
38        Self {
39            min_diagonal: DEFAULT_MIN_DIAGONAL,
40            max_diagonal: DEFAULT_MAX_DIAGONAL,
41            initial_trust_region_radius: DEFAULT_INITIAL_TRUST_REGION_RADIUS,
42        }
43    }
44}
45
46impl optimizer::Optimizer for LevenbergMarquardtOptimizer {
47    fn optimize(
48        &self,
49        problem: &crate::problem::Problem,
50        initial_values: &std::collections::HashMap<String, nalgebra::DVector<f64>>,
51        optimizer_option: Option<OptimizerOptions>,
52    ) -> Option<HashMap<String, nalgebra::DVector<f64>>> {
53        let mut parameter_blocks: HashMap<String, ParameterBlock> =
54            problem.initialize_parameter_blocks(initial_values);
55
56        let variable_name_to_col_idx_dict =
57            problem.get_variable_name_to_col_idx_dict(&parameter_blocks);
58        let total_variable_dimension = parameter_blocks
59            .values()
60            .map(|p| {
61                if p.manifold.is_some() {
62                    p.tangent_size()
63                } else {
64                    p.tangent_size() - p.fixed_variables.len()
65                }
66            })
67            .sum();
68
69        let opt_option = optimizer_option.unwrap_or_default();
70        let mut linear_solver: Box<dyn SparseLinearSolver> = match opt_option.linear_solver_type {
71            LinearSolverType::SparseCholesky => Box::new(linear::SparseCholeskySolver::new()),
72            LinearSolverType::SparseQR => Box::new(linear::SparseQRSolver::new()),
73        };
74
75        // On the first iteration, we'll generate a diagonal matrix of the jacobian.
76        // Its shape will be (total_variable_dimension, total_variable_dimension).
77        // With LM, rather than solving A * dx = b for dx, we solve for (A + lambda * diag(A)) dx = b.
78        let mut jacobi_scaling_diagonal: Option<faer::sparse::SparseColMat<usize, f64>> = None;
79
80        let symbolic_structure = problem.build_symbolic_structure(
81            &parameter_blocks,
82            total_variable_dimension,
83            &variable_name_to_col_idx_dict,
84        );
85
86        // Damping parameter (a.k.a lambda / Marquardt parameter)
87        let mut u = 1.0 / self.initial_trust_region_radius;
88
89        let mut last_err;
90        let mut current_error = self.compute_error(problem, &parameter_blocks);
91        for i in 0..opt_option.max_iteration {
92            last_err = current_error;
93
94            let (residuals, mut jac) = problem.compute_residual_and_jacobian(
95                &parameter_blocks,
96                &variable_name_to_col_idx_dict,
97                &symbolic_structure,
98            );
99
100            if i == 0 {
101                // On the first iteration, generate the diagonal of the jacobian.
102                let cols = jac.shape().1;
103                let jacobi_scaling_vec: Vec<Triplet<usize, usize, f64>> = (0..cols)
104                    .map(|c| {
105                        let v = jac.val_of_col(c).iter().map(|&i| i * i).sum::<f64>().sqrt();
106                        Triplet::new(c, c, 1.0 / (1.0 + v))
107                    })
108                    .collect();
109
110                jacobi_scaling_diagonal = Some(
111                    faer::sparse::SparseColMat::<usize, f64>::try_new_from_triplets(
112                        cols,
113                        cols,
114                        &jacobi_scaling_vec,
115                    )
116                    .unwrap(),
117                );
118            }
119
120            // Scale the current jacobian by the diagonal matrix
121            jac = jac * jacobi_scaling_diagonal.as_ref().unwrap();
122
123            // J^T * J = Matrix of shape (total_variable_dimension, total_variable_dimension)
124            let jtj = jac
125                .as_ref()
126                .transpose()
127                .to_col_major()
128                .unwrap()
129                .mul(jac.as_ref());
130
131            // J^T * -r = Matrix of shape (total_variable_dimension, 1)
132            let jtr = jac.as_ref().transpose().mul(-&residuals);
133
134            // Regularize the diagonal of jtj between the min and max diagonal values.
135            let mut jtj_regularized = jtj.clone();
136            for i in 0..total_variable_dimension {
137                jtj_regularized[(i, i)] +=
138                    u * (jtj[(i, i)].max(self.min_diagonal)).min(self.max_diagonal);
139            }
140
141            let start = Instant::now();
142            if let Some(lm_step) = linear_solver.solve_jtj(&jtr, &jtj_regularized) {
143                let duration = start.elapsed();
144                let dx = jacobi_scaling_diagonal.as_ref().unwrap() * &lm_step;
145
146                trace!("Time elapsed in solve Ax=b is: {:?}", duration);
147
148                let dx_na = dx.as_ref().into_nalgebra().column(0).clone_owned();
149
150                let mut new_param_blocks = parameter_blocks.clone();
151
152                self.apply_dx2(
153                    &dx_na,
154                    &mut new_param_blocks,
155                    &variable_name_to_col_idx_dict,
156                );
157
158                // Compute residuals of (x + dx)
159                let new_residuals = problem.compute_residuals(&new_param_blocks, true);
160
161                // rho is the ratio between the actual reduction in error and the reduction
162                // in error if the problem were linear.
163                let actual_residual_change =
164                    residuals.as_ref().squared_norm_l2() - new_residuals.as_ref().squared_norm_l2();
165                trace!("actual_residual_change {}", actual_residual_change);
166                let linear_residual_change: faer::Mat<f64> =
167                    lm_step.transpose().mul(2.0 * &jtr - &jtj * &lm_step);
168                let rho = actual_residual_change / linear_residual_change[(0, 0)];
169
170                if rho > 0.0 {
171                    // The linear model appears to be fitting, so accept (x + dx) as the new x.
172                    parameter_blocks = new_param_blocks;
173
174                    // Increase the trust region by reducing u
175                    let tmp = 2.0 * rho - 1.0;
176                    u *= (1.0_f64 / 3.0).max(1.0 - tmp * tmp * tmp);
177                } else {
178                    // If there's too much divergence, reduce the trust region and try again with the same parameters.
179                    u *= 2.0;
180                    trace!("u {}", u);
181                }
182            } else {
183                log::debug!("solve ax=b failed");
184                return None;
185            }
186
187            current_error = self.compute_error(problem, &parameter_blocks);
188            trace!("iter:{} total err:{}", i, current_error);
189
190            if current_error < opt_option.min_error_threshold {
191                trace!("error too low");
192                break;
193            } else if current_error.is_nan() {
194                log::debug!("solve ax=b failed, current error is nan");
195                return None;
196            }
197
198            if (last_err - current_error).abs() < opt_option.min_abs_error_decrease_threshold {
199                trace!("absolute error decrease low");
200                break;
201            } else if (last_err - current_error).abs() / last_err
202                < opt_option.min_rel_error_decrease_threshold
203            {
204                trace!("relative error decrease low");
205                break;
206            }
207        }
208        let params = parameter_blocks
209            .iter()
210            .map(|(k, v)| (k.to_owned(), v.params.clone()))
211            .collect();
212        Some(params)
213    }
214}