Expand description
Robust least squares methods
This module provides M-estimators that are less sensitive to outliers than standard least squares. The key idea is to use a different loss function that reduces the influence of large residuals.
§Example
use scirs2_core::ndarray::{array, Array1, Array2};
use scirs2_optimize::least_squares::robust::{robust_least_squares, HuberLoss, RobustOptions};
// Define a function that returns the residuals
fn residual(x: &[f64], data: &[f64]) -> Array1<f64> {
let n = data.len() / 2;
let t_values = &data[0..n];
let y_values = &data[n..];
let mut res = Array1::zeros(n);
for i in 0..n {
// Model: y = x[0] + x[1] * t
res[i] = y_values[i] - (x[0] + x[1] * t_values[i]);
}
res
}
// Define the Jacobian
fn jacobian(x: &[f64], data: &[f64]) -> Array2<f64> {
let n = data.len() / 2;
let t_values = &data[0..n];
let mut jac = Array2::zeros((n, 2));
for i in 0..n {
jac[[i, 0]] = -1.0;
jac[[i, 1]] = -t_values[i];
}
jac
}
// Create data with outliers (concatenated x and y values)
let data = array![0.0, 1.0, 2.0, 3.0, 4.0, 0.1, 0.9, 2.1, 2.9, 10.0];
// Initial guess
let x0 = array![0.0, 0.0];
// Solve using Huber loss for robustness
let loss = HuberLoss::new(1.0);
let result = robust_least_squares(
residual,
&x0,
loss,
Some(jacobian),
&data,
None
)?;
assert!(result.success);Structs§
- Bisquare
Loss - Bisquare (Tukey) loss function
- Cauchy
Loss - Cauchy loss function
- Huber
Loss - Huber loss function
- Robust
Options - Options for robust least squares optimization
- Squared
Loss - Standard least squares loss (for comparison)
Traits§
- Robust
Loss - Trait for robust loss functions
Functions§
- robust_
least_ squares - Solve a robust least squares problem using M-estimators