Skip to main content

levenberg_marquardt_sparse/
lib.rs

1//! Sparse implementation of the
2//! [Levenberg-Marquardt](https://en.wikipedia.org/wiki/Levenberg%E2%80%93Marquardt_algorithm)
3//! optimization algorithm using [nalgebra](https://nalgebra.org).
4//!
5//! This crate is a fork of `levenberg-marquardt` that removes the dense QR-based
6//! solver path and always uses a sparse normal-equations solve based on
7//! Jacobi-preconditioned Conjugate Gradient (PCG).
8//!
9//! The port from the original dense implementation to sparse matrices was carried
10//! out via agentic coding using GitHub Copilot.
11//!
12//! It is intended for problems where the Jacobian is sparse, such as bundle
13//! adjustment and SLAM, while still providing a dense-to-sparse bridge through
14//! [`SparseJacobian::from_dense`] for small examples and migration.
15//!
16//! This algorithm tries to solve the least squares optimization problem
17//! ```math
18//! \min_{\vec{x}\in\R^n}f(\vec{x})\quad\text{where}\quad\begin{cases}\begin{aligned}
19//!   \ f\!:\R^n &\to \R \\
20//!  \vec{x} &\mapsto \frac{1}{2}\sum_{i=1}^m \bigl(r_i(\vec{x})\bigr)^2,
21//! \end{aligned}\end{cases}
22//! ```
23//! for differentiable _residual functions_ `$r_i\!:\R^n\to\R$`.
24//!
25//! # Inputs
26//!
27//! The problem has `$n$` parameters `$\vec{x}\in\R^n$` and `$m$` residual
28//! functions `$r_i\!:\R^n\to\R$`.
29//!
30//! You must provide an implementation of
31//!
32//! - the residual vector `$\vec{x} \mapsto (r_1(\vec{x}), \ldots, r_m(\vec{x}))^\top\in\R^m$`
33//! - and its Jacobian `$\mathbf{J} \in \R^{m\times n}$`, returned as a
34//!   [`SparseJacobian`] in triplet (COO) format, defined as
35//!   ```math
36//!   \mathbf{J} \coloneqq
37//!   \def\arraystretch{1.5}
38//!   \begin{pmatrix}
39//!   \frac{\partial r_1}{\partial x_1} & \cdots & \frac{\partial r_1}{\partial x_n} \\
40//!   \frac{\partial r_2}{\partial x_1} & \cdots & \frac{\partial r_2}{\partial x_n} \\
41//!   \vdots & \ddots & \vdots \\
42//!   \frac{\partial r_m}{\partial x_1} & \cdots & \frac{\partial r_m}{\partial x_n}
43//!   \end{pmatrix}.
44//!   ```
45//!
46//! Finally, you have to provide an initial guess for `$\vec{x}$`. This can
47//! be a constant value, but typically the optimization result crucially depends
48//! on a good initial value.
49//!
50//! `SparseJacobian` stores `(row, col, value)` entries. You can construct it
51//! directly with [`SparseJacobian::new`] or [`SparseJacobian::from_triplets`],
52//! or use [`SparseJacobian::from_dense`] as a bridge when migrating existing
53//! dense code.
54//!
55//! The algorithm also has a number of hyperparameters which are documented
56//! at [`LevenbergMarquardt`](struct.LevenbergMarquardt.html).
57//!
58//! This crate is `#![no_std]` and requires `alloc`.
59//!
60//! # Usage Example
61//!
62//! We use `$f(x, y) \coloneqq \frac{1}{2}[(x^2 + y - 11)^2 + (x + y^2 - 7)^2]$` as a [test function](https://en.wikipedia.org/wiki/Himmelblau%27s_function)
63//! for this example.
64//! In this case we have `$n = 2$` and `$m = 2$` with
65//!
66//! ```math
67//!   r_1(\vec{x}) \coloneqq x_1^2 + x_2 - 11\quad\text{and}\quad
68//!   r_2(\vec{x}) \coloneqq x_1 + x_2^2 - 7.
69//! ```
70//!
71//! ```
72//! # use nalgebra::*;
73//! # use nalgebra::storage::Owned;
74//! # use levenberg_marquardt_sparse::{LeastSquaresProblem, LevenbergMarquardt, SparseJacobian};
75//! struct ExampleProblem {
76//!     // holds current value of the n parameters
77//!     p: Vector2<f64>,
78//! }
79//!
80//! // We implement a trait for every problem we want to solve
81//! impl LeastSquaresProblem<f64, U2, U2> for ExampleProblem {
82//!     type ParameterStorage = Owned<f64, U2>;
83//!     type ResidualStorage = Owned<f64, U2>;
84//!
85//!     fn set_params(&mut self, p: &Vector2<f64>) {
86//!         self.p.copy_from(p);
87//!         // do common calculations for residuals and the Jacobian here
88//!     }
89//!
90//!     fn params(&self) -> Vector2<f64> { self.p }
91//!
92//!     fn residuals(&self) -> Option<Vector2<f64>> {
93//!         let [x, y] = [self.p.x, self.p.y];
94//!         // vector containing residuals $r_1(\vec{x})$ and $r_2(\vec{x})$
95//!         Some(Vector2::new(x*x + y - 11., x + y*y - 7.))
96//!     }
97//!
98//!     fn jacobian(&self) -> Option<SparseJacobian<f64>> {
99//!         let [x, y] = [self.p.x, self.p.y];
100//!
101//!         // first row of Jacobian, derivatives of first residual
102//!         let d1_x = 2. * x; // $\frac{\partial}{\partial x_1}r_1(\vec{x}) = \frac{\partial}{\partial x} (x^2 + y - 11) = 2x$
103//!         let d1_y = 1.;     // $\frac{\partial}{\partial x_2}r_1(\vec{x}) = \frac{\partial}{\partial y} (x^2 + y - 11) = 1$
104//!
105//!         // second row of Jacobian, derivatives of second residual
106//!         let d2_x = 1.;     // $\frac{\partial}{\partial x_1}r_2(\vec{x}) = \frac{\partial}{\partial x} (x + y^2 - 7) = 1$
107//!         let d2_y = 2. * y; // $\frac{\partial}{\partial x_2}r_2(\vec{x}) = \frac{\partial}{\partial y} (x + y^2 - 7) = 2y$
108//!
109//!         Some(SparseJacobian::from_triplets(
110//!             2,
111//!             2,
112//!             vec![
113//!                 (0, 0, d1_x),
114//!                 (0, 1, d1_y),
115//!                 (1, 0, d2_x),
116//!                 (1, 1, d2_y),
117//!             ],
118//!         ))
119//!     }
120//! }
121//!
122//! let problem = ExampleProblem {
123//!     // the initial guess for $\vec{x}$
124//!     p: Vector2::new(1., 1.),
125//! };
126//! let (_result, report) = LevenbergMarquardt::new().minimize(problem);
127//! assert!(report.termination.was_successful());
128//! assert!(report.objective_function.abs() < 1e-10);
129//! ```
130//!
131//! # Derivative checking
132//!
133//! You should try using [`differentiate_numerically`](fn.differentiate_numerically.html)
134//! in a unit test to verify that your Jacobian implementation matches the residuals.
135#![allow(unexpected_cfgs)]
136#![no_std]
137#![cfg_attr(feature = "RUSTC_IS_NIGHTLY", core_intrinsics)]
138
139extern crate alloc;
140
141mod lm;
142mod problem;
143pub(crate) mod utils;
144
145pub use lm::{LevenbergMarquardt, MinimizationReport, TerminationReason};
146pub use problem::{LeastSquaresProblem, SparseJacobian};
147
148pub use utils::{differentiate_holomorphic_numerically, differentiate_numerically};