tiny_solver/optimizer/
common.rs

1use std::collections::{HashMap, HashSet};
2use std::ops::Add;
3
4use nalgebra as na;
5
6use crate::parameter_block::ParameterBlock;
7use crate::problem;
8use crate::sparse::LinearSolverType;
9
10pub trait Optimizer {
11    fn optimize(
12        &self,
13        problem: &problem::Problem,
14        initial_values: &HashMap<String, na::DVector<f64>>,
15        optimizer_option: Option<OptimizerOptions>,
16    ) -> Option<HashMap<String, na::DVector<f64>>>;
17    fn apply_dx(
18        &self,
19        dx: &na::DVector<f64>,
20        params: &mut HashMap<String, na::DVector<f64>>,
21        variable_name_to_col_idx_dict: &HashMap<String, usize>,
22        fixed_var_indexes: &HashMap<String, HashSet<usize>>,
23        variable_bounds: &HashMap<String, HashMap<usize, (f64, f64)>>,
24    ) {
25        for (key, param) in params.iter_mut() {
26            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
27                let var_size = param.shape().0;
28                let mut updated_param = param.clone().add(dx.rows(*col_idx, var_size));
29                if let Some(indexes_to_fix) = fixed_var_indexes.get(key) {
30                    for &idx in indexes_to_fix {
31                        log::debug!("Fix {} {}", key, idx);
32                        updated_param[idx] = param[idx];
33                    }
34                }
35                if let Some(indexes_to_bound) = variable_bounds.get(key) {
36                    for (&idx, &(lower, upper)) in indexes_to_bound {
37                        let old = updated_param[idx];
38                        updated_param[idx] = updated_param[idx].max(lower).min(upper);
39                        log::debug!("bound {} {} {} -> {}", key, idx, old, updated_param[idx]);
40                    }
41                }
42                param.copy_from(&updated_param);
43            }
44        }
45    }
46    fn apply_dx2(
47        &self,
48        dx: &na::DVector<f64>,
49        params: &mut HashMap<String, ParameterBlock>,
50        variable_name_to_col_idx_dict: &HashMap<String, usize>,
51    ) {
52        params.iter_mut().for_each(|(key, param)| {
53            if let Some(col_idx) = variable_name_to_col_idx_dict.get(key) {
54                let tangent_size = param.tangent_size();
55                let effective_size = if param.manifold.is_some() {
56                    tangent_size
57                } else {
58                    tangent_size - param.fixed_variables.len()
59                };
60
61                let dx_reduced = dx.rows(*col_idx, effective_size);
62
63                let mut dx_full = na::DVector::zeros(tangent_size);
64                if param.manifold.is_some() {
65                    dx_full.copy_from(&dx_reduced);
66                } else {
67                    let mut reduced_idx = 0;
68                    for i in 0..tangent_size {
69                        if !param.fixed_variables.contains(&i) {
70                            dx_full[i] = dx_reduced[reduced_idx];
71                            reduced_idx += 1;
72                        }
73                    }
74                }
75                param.update_params(param.plus_f64(dx_full.rows(0, tangent_size)));
76            }
77        });
78        // for (key, param) in params.par_iter_mut() {
79        // }
80    }
81    fn compute_error(
82        &self,
83        problem: &problem::Problem,
84        params: &HashMap<String, ParameterBlock>,
85    ) -> f64 {
86        problem
87            .compute_residuals(params, true)
88            .as_ref()
89            .squared_norm_l2()
90    }
91}
92
93#[derive(PartialEq, Debug)]
94pub enum SolverStatus {
95    Running,
96    // Resulting solution may be OK to use.
97    GradientTooSmall,         // eps > max(J'*f(x))
98    RelativeStepSizeTooSmall, // eps > ||dx|| / ||x||
99    ErrorTooSmall,            // eps > ||f(x)||
100    HitMaxIterations,
101    // Numerical issues
102    // FAILED_TO_EVALUATE_COST_FUNCTION,
103    // FAILED_TO_SOLVER_LINEAR_SYSTEM,
104}
105
106#[derive(Clone)]
107pub struct OptimizerOptions {
108    pub max_iteration: usize,
109    pub linear_solver_type: LinearSolverType,
110    pub verbosity_level: usize,
111    pub min_abs_error_decrease_threshold: f64,
112    pub min_rel_error_decrease_threshold: f64,
113    pub min_error_threshold: f64,
114    // pub relative_step_threshold: 1e-16,
115}
116
117impl Default for OptimizerOptions {
118    fn default() -> Self {
119        OptimizerOptions {
120            max_iteration: 100,
121            linear_solver_type: LinearSolverType::SparseCholesky,
122            verbosity_level: 0,
123            min_abs_error_decrease_threshold: 1e-5,
124            min_rel_error_decrease_threshold: 1e-5,
125            min_error_threshold: 1e-10,
126        }
127    }
128}