levenberg_marquardt/
lib.rs

1//! Implementation of the [Levenberg-Marquardt](https://en.wikipedia.org/wiki/Levenberg%E2%80%93Marquardt_algorithm)
2//! optimization algorithm using [nalgebra](https://nalgebra.org).
3//!
4//! This algorithm tries to solve the least squares optimization problem
5//! ```math
6//! \min_{\vec{x}\in\R^n}f(\vec{x})\quad\text{where}\quad\begin{cases}\begin{aligned}
7//!   \ f\!:\R^n &\to \R \\
8//!  \vec{x} &\mapsto \frac{1}{2}\sum_{i=1}^m \bigl(r_i(\vec{x})\bigr)^2,
9//! \end{aligned}\end{cases}
10//! ```
11//! for differentiable _residual functions_ `$r_i\!:\R^n\to\R$`.
12//!
13//! # Inputs
14//!
15//! The problem has `$n$` parameters `$\vec{x}\in\R^n$` and `$m$` residual
16//! functions `$r_i\!:\R^n\to\R$`.
17//!
18//! You must provide an implementation of
19//!
20//! - the residual vector `$\vec{x} \mapsto (r_1(\vec{x}), \ldots, r_m(\vec{x}))^\top\in\R^m$`
21//! - and its Jacobian `$\mathbf{J} \in \R^{m\times n}$`, defined as
22//!   ```math
23//!   \mathbf{J} \coloneqq
24//!   \def\arraystretch{1.5}
25//!   \begin{pmatrix}
26//!   \frac{\partial r_1}{\partial x_1} & \cdots & \frac{\partial r_1}{\partial x_n} \\
27//!   \frac{\partial r_2}{\partial x_1} & \cdots & \frac{\partial r_2}{\partial x_n} \\
28//!   \vdots & \ddots & \vdots \\
29//!   \frac{\partial r_m}{\partial x_1} & \cdots & \frac{\partial r_m}{\partial x_n}
30//!   \end{pmatrix}.
31//!   ```
32//!
33//! Finally, you have to provide an initial guess for `$\vec{x}$`. This can
34//! be a constant value, but typically the optimization result crucially depends
35//! on a good initial value.
36//!
37//! The algorithm also has a number of hyperparameters which are documented
38//! at [`LevenbergMarquardt`](struct.LevenbergMarquardt.html).
39//!
40//! # Usage Example
41//!
42//! We use `$f(x, y) \coloneqq \frac{1}{2}[(x^2 + y - 11)^2 + (x + y^2 - 7)^2]$` as a [test function](https://en.wikipedia.org/wiki/Himmelblau%27s_function)
43//! for this example.
44//! In this case we have `$n = 2$` and `$m = 2$` with
45//!
46//! ```math
47//!   r_1(\vec{x}) \coloneqq x_1^2 + x_2 - 11\quad\text{and}\quad
48//!   r_2(\vec{x}) \coloneqq x_1 + x_2^2 - 7.
49//! ```
50//!
51//! ```
52//! # use nalgebra::*;
53//! # use nalgebra::storage::Owned;
54//! # use levenberg_marquardt::{LeastSquaresProblem, LevenbergMarquardt};
55//! struct ExampleProblem {
56//!     // holds current value of the n parameters
57//!     p: Vector2<f64>,
58//! }
59//!
60//! // We implement a trait for every problem we want to solve
61//! impl LeastSquaresProblem<f64, U2, U2> for ExampleProblem {
62//!     type ParameterStorage = Owned<f64, U2>;
63//!     type ResidualStorage = Owned<f64, U2>;
64//!     type JacobianStorage = Owned<f64, U2, U2>;
65//!     
66//!     fn set_params(&mut self, p: &Vector2<f64>) {
67//!         self.p.copy_from(p);
68//!         // do common calculations for residuals and the Jacobian here
69//!     }
70//!     
71//!     fn params(&self) -> Vector2<f64> { self.p }
72//!     
73//!     fn residuals(&self) -> Option<Vector2<f64>> {
74//!         let [x, y] = [self.p.x, self.p.y];
75//!         // vector containing residuals $r_1(\vec{x})$ and $r_2(\vec{x})$
76//!         Some(Vector2::new(x*x + y - 11., x + y*y - 7.))
77//!     }
78//!     
79//!     fn jacobian(&self) -> Option<Matrix2<f64>> {
80//!         let [x, y] = [self.p.x, self.p.y];
81//!         
82//!         // first row of Jacobian, derivatives of first residual
83//!         let d1_x = 2. * x; // $\frac{\partial}{\partial x_1}r_1(\vec{x}) = \frac{\partial}{\partial x} (x^2 + y - 11) = 2x$
84//!         let d1_y = 1.;     // $\frac{\partial}{\partial x_2}r_1(\vec{x}) = \frac{\partial}{\partial y} (x^2 + y - 11) = 1$
85//!         
86//!         // second row of Jacobian, derivatives of second residual
87//!         let d2_x = 1.;     // $\frac{\partial}{\partial x_1}r_2(\vec{x}) = \frac{\partial}{\partial x} (x + y^2 - 7) = 1$
88//!         let d2_y = 2. * y; // $\frac{\partial}{\partial x_2}r_2(\vec{x}) = \frac{\partial}{\partial y} (x + y^2 - 7) = 2y$
89//!
90//!         Some(Matrix2::new(
91//!             d1_x, d1_y,
92//!             d2_x, d2_y,
93//!         ))
94//!     }
95//! }
96//!
97//! let problem = ExampleProblem {
98//!     // the initial guess for $\vec{x}$
99//!     p: Vector2::new(1., 1.),
100//! };
101//! let (_result, report) = LevenbergMarquardt::new().minimize(problem);
102//! assert!(report.termination.was_successful());
103//! assert!(report.objective_function.abs() < 1e-10);
104//! ```
105//!
106//! # Derivative checking
107//!
108//! You should try using [`differentiate_numerically`](fn.differentiate_numerically.html)
109//! in a unit test to verify that your Jacobian implementation matches the residuals.
110#![allow(unexpected_cfgs)]
111#![no_std]
112#![cfg_attr(feature = "RUSTC_IS_NIGHTLY", core_intrinsics)]
113
114extern crate alloc;
115
116mod lm;
117mod problem;
118mod qr;
119mod trust_region;
120pub(crate) mod utils;
121
122pub use lm::TerminationReason;
123pub use problem::LeastSquaresProblem;
124
125pub use utils::{differentiate_holomorphic_numerically, differentiate_numerically};
126
127cfg_if::cfg_if! {
128    if #[cfg(feature="minpack-compat")] {
129        pub type LevenbergMarquardt = lm::LevenbergMarquardt<f64>;
130        pub type MinimizationReport = lm::MinimizationReport<f64>;
131    } else {
132        pub use lm::{LevenbergMarquardt, MinimizationReport};
133    }
134}