tiny_solver/optimizer/
common.rs

1use std::collections::{HashMap, HashSet};
2use std::ops::Add;
3
4use nalgebra as na;
5
6use crate::parameter_block::ParameterBlock;
7use crate::problem;
8use crate::sparse::LinearSolverType;
9
10pub trait Optimizer {
11    fn optimize(
12        &self,
13        problem: &problem::Problem,
14        initial_values: &HashMap<String, na::DVector<f64>>,
15        optimizer_option: Option<OptimizerOptions>,
16    ) -> Option<HashMap<String, na::DVector<f64>>>;
17    fn apply_dx(
18        &self,
19        dx: &na::DVector<f64>,
20        params: &mut HashMap<String, na::DVector<f64>>,
21        variable_name_to_col_idx_dict: &HashMap<String, usize>,
22        fixed_var_indexes: &HashMap<String, HashSet<usize>>,
23        variable_bounds: &HashMap<String, HashMap<usize, (f64, f64)>>,
24    ) {
25        for (key, param) in params.iter_mut() {
26            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
27                let var_size = param.shape().0;
28                let mut updated_param = param.clone().add(dx.rows(*col_idx, var_size));
29                if let Some(indexes_to_fix) = fixed_var_indexes.get(key) {
30                    for &idx in indexes_to_fix {
31                        log::debug!("Fix {} {}", key, idx);
32                        updated_param[idx] = param[idx];
33                    }
34                }
35                if let Some(indexes_to_bound) = variable_bounds.get(key) {
36                    for (&idx, &(lower, upper)) in indexes_to_bound {
37                        let old = updated_param[idx];
38                        updated_param[idx] = updated_param[idx].max(lower).min(upper);
39                        log::debug!("bound {} {} {} -> {}", key, idx, old, updated_param[idx]);
40                    }
41                }
42                param.copy_from(&updated_param);
43            }
44        }
45    }
46    fn apply_dx2(
47        &self,
48        dx: &na::DVector<f64>,
49        params: &mut HashMap<String, ParameterBlock>,
50        variable_name_to_col_idx_dict: &HashMap<String, usize>,
51    ) {
52        params.iter_mut().for_each(|(key, param)| {
53            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
54                let var_size = param.tangent_size();
55                param.update_params(param.plus_f64(dx.rows(*col_idx, var_size)));
56            }
57        });
58        // for (key, param) in params.par_iter_mut() {
59        // }
60    }
61    fn compute_error(
62        &self,
63        problem: &problem::Problem,
64        params: &HashMap<String, ParameterBlock>,
65    ) -> f64 {
66        problem.compute_residuals(params, true).squared_norm_l2()
67    }
68}
69
70#[derive(PartialEq, Debug)]
71pub enum SolverStatus {
72    Running,
73    // Resulting solution may be OK to use.
74    GradientTooSmall,         // eps > max(J'*f(x))
75    RelativeStepSizeTooSmall, // eps > ||dx|| / ||x||
76    ErrorTooSmall,            // eps > ||f(x)||
77    HitMaxIterations,
78    // Numerical issues
79    // FAILED_TO_EVALUATE_COST_FUNCTION,
80    // FAILED_TO_SOLVER_LINEAR_SYSTEM,
81}
82
83#[derive(Clone)]
84pub struct OptimizerOptions {
85    pub max_iteration: usize,
86    pub linear_solver_type: LinearSolverType,
87    pub verbosity_level: usize,
88    pub min_abs_error_decrease_threshold: f64,
89    pub min_rel_error_decrease_threshold: f64,
90    pub min_error_threshold: f64,
91    // pub relative_step_threshold: 1e-16,
92}
93
94impl Default for OptimizerOptions {
95    fn default() -> Self {
96        OptimizerOptions {
97            max_iteration: 100,
98            linear_solver_type: LinearSolverType::SparseCholesky,
99            verbosity_level: 0,
100            min_abs_error_decrease_threshold: 1e-5,
101            min_rel_error_decrease_threshold: 1e-5,
102            min_error_threshold: 1e-10,
103        }
104    }
105}