tiny_solver/optimizer/
common.rs

1use std::collections::{HashMap, HashSet};
2use std::ops::Add;
3
4use nalgebra as na;
5
6use crate::parameter_block::ParameterBlock;
7use crate::problem;
8use crate::sparse::LinearSolverType;
9
10pub trait Optimizer {
11    fn optimize(
12        &self,
13        problem: &problem::Problem,
14        initial_values: &HashMap<String, na::DVector<f64>>,
15        optimizer_option: Option<OptimizerOptions>,
16    ) -> Option<HashMap<String, na::DVector<f64>>>;
17    fn apply_dx(
18        &self,
19        dx: &na::DVector<f64>,
20        params: &mut HashMap<String, na::DVector<f64>>,
21        variable_name_to_col_idx_dict: &HashMap<String, usize>,
22        fixed_var_indexes: &HashMap<String, HashSet<usize>>,
23        variable_bounds: &HashMap<String, HashMap<usize, (f64, f64)>>,
24    ) {
25        for (key, param) in params.iter_mut() {
26            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
27                let var_size = param.shape().0;
28                let mut updated_param = param.clone().add(dx.rows(*col_idx, var_size));
29                if let Some(indexes_to_fix) = fixed_var_indexes.get(key) {
30                    for &idx in indexes_to_fix {
31                        log::debug!("Fix {} {}", key, idx);
32                        updated_param[idx] = param[idx];
33                    }
34                }
35                if let Some(indexes_to_bound) = variable_bounds.get(key) {
36                    for (&idx, &(lower, upper)) in indexes_to_bound {
37                        let old = updated_param[idx];
38                        updated_param[idx] = updated_param[idx].max(lower).min(upper);
39                        log::debug!("bound {} {} {} -> {}", key, idx, old, updated_param[idx]);
40                    }
41                }
42                param.copy_from(&updated_param);
43            }
44        }
45    }
46    fn apply_dx2(
47        &self,
48        dx: &na::DVector<f64>,
49        params: &mut HashMap<String, ParameterBlock>,
50        variable_name_to_col_idx_dict: &HashMap<String, usize>,
51    ) {
52        params.iter_mut().for_each(|(key, param)| {
53            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
54                let var_size = param.tangent_size();
55                param.update_params(param.plus_f64(dx.rows(*col_idx, var_size)));
56            }
57        });
58        // for (key, param) in params.par_iter_mut() {
59        // }
60    }
61    fn compute_error(
62        &self,
63        problem: &problem::Problem,
64        params: &HashMap<String, ParameterBlock>,
65    ) -> f64 {
66        problem
67            .compute_residuals(params, true)
68            .as_ref()
69            .squared_norm_l2()
70    }
71}
72
73#[derive(PartialEq, Debug)]
74pub enum SolverStatus {
75    Running,
76    // Resulting solution may be OK to use.
77    GradientTooSmall,         // eps > max(J'*f(x))
78    RelativeStepSizeTooSmall, // eps > ||dx|| / ||x||
79    ErrorTooSmall,            // eps > ||f(x)||
80    HitMaxIterations,
81    // Numerical issues
82    // FAILED_TO_EVALUATE_COST_FUNCTION,
83    // FAILED_TO_SOLVER_LINEAR_SYSTEM,
84}
85
86#[derive(Clone)]
87pub struct OptimizerOptions {
88    pub max_iteration: usize,
89    pub linear_solver_type: LinearSolverType,
90    pub verbosity_level: usize,
91    pub min_abs_error_decrease_threshold: f64,
92    pub min_rel_error_decrease_threshold: f64,
93    pub min_error_threshold: f64,
94    // pub relative_step_threshold: 1e-16,
95}
96
97impl Default for OptimizerOptions {
98    fn default() -> Self {
99        OptimizerOptions {
100            max_iteration: 100,
101            linear_solver_type: LinearSolverType::SparseCholesky,
102            verbosity_level: 0,
103            min_abs_error_decrease_threshold: 1e-5,
104            min_rel_error_decrease_threshold: 1e-5,
105            min_error_threshold: 1e-10,
106        }
107    }
108}