Skip to main content

vision_calibration_optim/backend/
mod.rs

1//! Backend adapters that compile the IR into solver-specific problems.
2//!
3//! Backends are responsible for translating the IR into solver-native graphs,
4//! applying manifolds and constraints, and returning a solved parameter map.
5
6mod tiny_solver_backend;
7mod tiny_solver_manifolds;
8
9use crate::Error;
10use anyhow::Result as AnyhowResult;
11use nalgebra::DVector;
12use serde::{Deserialize, Serialize};
13use std::collections::HashMap;
14
15use crate::ir::ProblemIR;
16
17pub use tiny_solver_backend::TinySolverBackend;
18
19/// Backend-agnostic solver options.
20#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct BackendSolveOptions {
22    /// Maximum number of iterations for the optimizer.
23    pub max_iters: usize,
24    /// Verbosity level (backend-specific).
25    pub verbosity: usize,
26    /// Optional linear solver selection.
27    pub linear_solver: Option<LinearSolverKind>,
28    /// Absolute error decrease threshold for early termination.
29    pub min_abs_decrease: Option<f64>,
30    /// Relative error decrease threshold for early termination.
31    pub min_rel_decrease: Option<f64>,
32    /// Error threshold for early termination.
33    pub min_error: Option<f64>,
34}
35
36impl Default for BackendSolveOptions {
37    fn default() -> Self {
38        Self {
39            max_iters: 100,
40            verbosity: 0,
41            linear_solver: Some(LinearSolverKind::SparseCholesky),
42            min_abs_decrease: Some(1e-5),
43            min_rel_decrease: Some(1e-5),
44            min_error: Some(1e-10),
45        }
46    }
47}
48
49/// Linear solver selection (backend-agnostic).
50#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
51pub enum LinearSolverKind {
52    /// Sparse Cholesky decomposition.
53    SparseCholesky,
54    /// Sparse QR decomposition.
55    SparseQR,
56}
57
58/// Summary of backend solve outcome.
59#[derive(Clone, Debug, Serialize, Deserialize)]
60pub struct SolveReport {
61    /// Final objective value reported by backend.
62    pub final_cost: f64,
63    // strongly consider adding:
64    // pub num_iters: usize,
65    // pub status: SolveStatus,
66    // pub time_ms: u64,
67}
68
69/// Solver output from a backend.
70///
71/// The `params` map uses the IR parameter block names.
72#[derive(Debug, Clone)]
73pub struct BackendSolution {
74    /// Optimized parameter vectors keyed by block name.
75    pub params: HashMap<String, DVector<f64>>,
76    /// Final robustified cost if supported by the backend.
77    pub solve_report: SolveReport,
78}
79
80/// Backend interface implemented by solver adapters.
81pub trait OptimBackend {
82    /// Solve a compiled IR with the provided initial parameters.
83    fn solve(
84        &self,
85        ir: &ProblemIR,
86        initial: &HashMap<String, DVector<f64>>,
87        opts: &BackendSolveOptions,
88    ) -> AnyhowResult<BackendSolution>;
89}
90
91/// Supported solver backends.
92#[derive(Debug, Clone, Copy, PartialEq, Eq)]
93pub enum BackendKind {
94    /// tiny-solver Levenberg-Marquardt backend.
95    TinySolver,
96    /// Placeholder for a Ceres backend.
97    Ceres,
98}
99
100/// Solve a problem using the selected backend.
101///
102/// This is the main backend-agnostic entry point used by problems.
103///
104/// # Errors
105///
106/// Returns [`Error::Numerical`] if the solver fails or the requested backend is not available.
107pub fn solve_with_backend(
108    backend: BackendKind,
109    ir: &ProblemIR,
110    initial: &HashMap<String, DVector<f64>>,
111    opts: &BackendSolveOptions,
112) -> Result<BackendSolution, Error> {
113    match backend {
114        BackendKind::TinySolver => TinySolverBackend
115            .solve(ir, initial, opts)
116            .map_err(Error::from),
117        BackendKind::Ceres => Err(Error::numerical("Ceres backend not implemented")),
118    }
119}