vision_calibration_optim/backend/
mod.rs1mod tiny_solver_backend;
7mod tiny_solver_manifolds;
8
9use crate::Error;
10use anyhow::Result as AnyhowResult;
11use nalgebra::DVector;
12use serde::{Deserialize, Serialize};
13use std::collections::HashMap;
14
15use crate::ir::ProblemIR;
16
17pub use tiny_solver_backend::TinySolverBackend;
18
19#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct BackendSolveOptions {
22 pub max_iters: usize,
24 pub verbosity: usize,
26 pub linear_solver: Option<LinearSolverKind>,
28 pub min_abs_decrease: Option<f64>,
30 pub min_rel_decrease: Option<f64>,
32 pub min_error: Option<f64>,
34}
35
36impl Default for BackendSolveOptions {
37 fn default() -> Self {
38 Self {
39 max_iters: 100,
40 verbosity: 0,
41 linear_solver: Some(LinearSolverKind::SparseCholesky),
42 min_abs_decrease: Some(1e-5),
43 min_rel_decrease: Some(1e-5),
44 min_error: Some(1e-10),
45 }
46 }
47}
48
49#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
51pub enum LinearSolverKind {
52 SparseCholesky,
54 SparseQR,
56}
57
58#[derive(Clone, Debug, Serialize, Deserialize)]
60pub struct SolveReport {
61 pub final_cost: f64,
63 }
68
69#[derive(Debug, Clone)]
73pub struct BackendSolution {
74 pub params: HashMap<String, DVector<f64>>,
76 pub solve_report: SolveReport,
78}
79
80pub trait OptimBackend {
82 fn solve(
84 &self,
85 ir: &ProblemIR,
86 initial: &HashMap<String, DVector<f64>>,
87 opts: &BackendSolveOptions,
88 ) -> AnyhowResult<BackendSolution>;
89}
90
91#[derive(Debug, Clone, Copy, PartialEq, Eq)]
93pub enum BackendKind {
94 TinySolver,
96 Ceres,
98}
99
100pub fn solve_with_backend(
108 backend: BackendKind,
109 ir: &ProblemIR,
110 initial: &HashMap<String, DVector<f64>>,
111 opts: &BackendSolveOptions,
112) -> Result<BackendSolution, Error> {
113 match backend {
114 BackendKind::TinySolver => TinySolverBackend
115 .solve(ir, initial, opts)
116 .map_err(Error::from),
117 BackendKind::Ceres => Err(Error::numerical("Ceres backend not implemented")),
118 }
119}