pub fn least_squares<F, J, D, S1, S2>(
residuals: F,
x0: &ArrayBase<S1, Ix1>,
method: Method,
jacobian: Option<J>,
data: &ArrayBase<S2, Ix1>,
options: Option<Options>,
) -> OptimizeResult<OptimizeResults<f64>>Expand description
Solve a nonlinear least-squares problem.
This function finds the optimal parameters that minimize the sum of
squares of the elements of the vector returned by the residuals function.
§Arguments
residuals- Function that returns the residualsx0- Initial guess for the parametersmethod- Method to use for solving the problemjacobian- Jacobian of the residuals (optional)data- Additional data to pass to the residuals and jacobian functionsoptions- Options for the solver
§Returns
OptimizeResultscontaining the optimization results
§Example
use scirs2_core::ndarray::{array, Array1, Array2};
use scirs2_optimize::least_squares::{least_squares, Method};
// Define a function that returns the residuals
fn residual(x: &[f64], _: &[f64]) -> Array1<f64> {
let y = array![
x[0] + 2.0 * x[1] - 2.0,
x[0] + x[1] - 1.0
];
y
}
// Define the Jacobian (optional)
fn jacobian(x: &[f64], _: &[f64]) -> Array2<f64> {
array![[1.0, 2.0], [1.0, 1.0]]
}
// Initial guess
let x0 = array![0.0, 0.0];
let data = array![]; // No data needed for this example
// Solve the least squares problem
let result = least_squares(residual, &x0, Method::LevenbergMarquardt, Some(jacobian), &data, None)?;
// The solution should be close to [0.0, 1.0]
assert!(result.success);