Skip to main content

apex_solver/core/
problem.rs

1//! Optimization problem definition and sparse Jacobian computation.
2//!
3//! The `Problem` struct is the central component that defines a factor graph optimization problem.
4//! It manages residual blocks (constraints), variables, and the construction of sparse Jacobian
5//! matrices for efficient nonlinear least squares optimization.
6//!
7//! # Factor Graph Representation
8//!
9//! The optimization problem is represented as a bipartite factor graph:
10//!
11//! ```text
12//! Variables:  x0 --- x1 --- x2 --- x3
13//!              |      |      |      |
14//! Factors:    f0     f1     f2     f3 (constraints/measurements)
15//! ```
16//!
17//! Each factor connects one or more variables and contributes a residual (error) term to the
18//! overall cost function:
19//!
20//! ```text
21//! minimize Σ_i ρ(||r_i(x)||²)
22//! ```
23//!
24//! where `r_i(x)` is the residual for factor i, and ρ is an optional robust loss function.
25//!
26//! # Key Responsibilities
27//!
28//! 1. **Residual Block Management**: Add/remove factors and track their structure
29//! 2. **Variable Management**: Initialize variables with manifold types and constraints
30//! 3. **Sparsity Pattern**: Build symbolic structure for efficient sparse linear algebra
31//! 4. **Linearization**: Compute residuals and Jacobians in parallel
32//! 5. **Covariance**: Extract per-variable uncertainty estimates after optimization
33//!
34//! # Sparse Jacobian Structure
35//!
36//! The Jacobian matrix `J = ∂r/∂x` is sparse because each factor only depends on a small
37//! subset of variables. For example, a between factor connecting x0 and x1 contributes
38//! a 3×6 block (SE2) or 6×12 block (SE3) to the Jacobian, leaving the rest as zeros.
39//!
40//! The Problem pre-computes the sparsity pattern once, then efficiently fills in the
41//! numerical values during each iteration.
42//!
43//! # Mixed Manifold Support
44//!
45//! The Problem supports mixed manifold types in a single optimization problem via
46//! [`VariableEnum`]. This allows:
47//! - SE2 and SE3 poses in the same graph
48//! - SO3 rotations with R³ landmarks
49//! - Any combination of supported manifolds
50//!
51//! # Example: Building a Problem
52//!
53//! ```
54//! use apex_solver::core::problem::Problem;
55//! use apex_solver::factors::{BetweenFactor, PriorFactor};
56//! use apex_solver::core::loss_functions::HuberLoss;
57//! use apex_solver::manifold::ManifoldType;
58//! use nalgebra::{DVector, dvector};
59//! use std::collections::HashMap;
60//! use apex_solver::manifold::se2::SE2;
61//! # use apex_solver::error::ApexSolverResult;
62//! # fn example() -> ApexSolverResult<()> {
63//!
64//! let mut problem = Problem::new();
65//!
66//! // Add prior factor to anchor the first pose
67//! let prior = Box::new(PriorFactor {
68//!     data: dvector![0.0, 0.0, 0.0],
69//! });
70//! problem.add_residual_block(&["x0"], prior, None);
71//!
72//! // Add between factor with robust loss
73//! let between = Box::new(BetweenFactor::new(SE2::from_xy_angle(1.0, 0.0, 0.1)));
74//! let loss: Option<Box<dyn apex_solver::core::loss_functions::LossFunction + Send>> =
75//!     Some(Box::new(HuberLoss::new(1.0)?));
76//! problem.add_residual_block(&["x0", "x1"], between, loss);
77//!
78//! // Initialize variables
79//! let mut initial_values = HashMap::new();
80//! initial_values.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
81//! initial_values.insert("x1".to_string(), (ManifoldType::SE2, dvector![0.9, 0.1, 0.12]));
82//!
83//! let variables = problem.initialize_variables(&initial_values);
84//! # Ok(())
85//! # }
86//! # example().unwrap();
87//! ```
88
89use std::{
90    collections::{HashMap, HashSet},
91    fs::File,
92    io::{Error, Write},
93    sync::{Arc, Mutex},
94};
95
96use faer::{
97    Col, Mat, MatRef,
98    sparse::{Argsort, Pair, SparseColMat, SymbolicSparseColMat},
99};
100use nalgebra::DVector;
101use rayon::prelude::*;
102use tracing::warn;
103
104use crate::{
105    core::{
106        CoreError, corrector::Corrector, loss_functions::LossFunction,
107        residual_block::ResidualBlock, variable::Variable,
108    },
109    error::{ApexSolverError, ApexSolverResult},
110    factors::Factor,
111    linalg::{SparseLinearSolver, extract_variable_covariances},
112};
113use apex_manifolds::{ManifoldType, rn, se2, se3, so2, so3};
114
115/// Symbolic structure for sparse matrix operations.
116///
117/// Contains the sparsity pattern (which entries are non-zero) and an ordering
118/// for efficient numerical computation. This is computed once at the beginning
119/// and reused throughout optimization.
120///
121/// # Fields
122///
123/// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
124/// - `order`: A fill-reducing ordering/permutation for numerical stability
125pub struct SymbolicStructure {
126    pub pattern: SymbolicSparseColMat<usize>,
127    pub order: Argsort<usize>,
128}
129
130/// Enum to handle mixed manifold variable types
131#[derive(Clone)]
132pub enum VariableEnum {
133    Rn(Variable<rn::Rn>),
134    SE2(Variable<se2::SE2>),
135    SE3(Variable<se3::SE3>),
136    SO2(Variable<so2::SO2>),
137    SO3(Variable<so3::SO3>),
138}
139
140impl VariableEnum {
141    /// Get the manifold type for this variable
142    pub fn manifold_type(&self) -> ManifoldType {
143        match self {
144            VariableEnum::Rn(_) => ManifoldType::RN,
145            VariableEnum::SE2(_) => ManifoldType::SE2,
146            VariableEnum::SE3(_) => ManifoldType::SE3,
147            VariableEnum::SO2(_) => ManifoldType::SO2,
148            VariableEnum::SO3(_) => ManifoldType::SO3,
149        }
150    }
151
152    /// Get the tangent space size for this variable
153    pub fn get_size(&self) -> usize {
154        match self {
155            VariableEnum::Rn(var) => var.get_size(),
156            VariableEnum::SE2(var) => var.get_size(),
157            VariableEnum::SE3(var) => var.get_size(),
158            VariableEnum::SO2(var) => var.get_size(),
159            VariableEnum::SO3(var) => var.get_size(),
160        }
161    }
162
163    /// Convert to DVector for use with Factor trait
164    pub fn to_vector(&self) -> DVector<f64> {
165        match self {
166            VariableEnum::Rn(var) => var.value.clone().into(),
167            VariableEnum::SE2(var) => var.value.clone().into(),
168            VariableEnum::SE3(var) => var.value.clone().into(),
169            VariableEnum::SO2(var) => var.value.clone().into(),
170            VariableEnum::SO3(var) => var.value.clone().into(),
171        }
172    }
173
174    /// Apply a tangent space step to update this variable.
175    ///
176    /// This method applies a manifold plus operation: x_new = x ⊞ δx
177    /// where δx is a tangent vector. It supports all manifold types.
178    ///
179    /// # Arguments
180    /// * `step_slice` - View into the full step vector for this variable's DOF
181    ///
182    /// # Implementation Notes
183    /// Uses explicit clone instead of unsafe memory copy (`IntoNalgebra`) for small vectors.
184    /// This is safe and performant for typical manifold dimensions (1-6 DOF).
185    ///
186    pub fn apply_tangent_step(&mut self, step_slice: MatRef<f64>) {
187        match self {
188            VariableEnum::SE3(var) => {
189                let mut step_data: Vec<f64> = (0..6).map(|i| step_slice[(i, 0)]).collect();
190                for &fixed_idx in &var.fixed_indices {
191                    if fixed_idx < 6 {
192                        step_data[fixed_idx] = 0.0;
193                    }
194                }
195                let tangent = se3::SE3Tangent::from(DVector::from_vec(step_data));
196                let new_value = var.plus(&tangent);
197                var.set_value(new_value);
198            }
199            VariableEnum::SE2(var) => {
200                let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
201                for &fixed_idx in &var.fixed_indices {
202                    if fixed_idx < 3 {
203                        step_data[fixed_idx] = 0.0;
204                    }
205                }
206                let tangent = se2::SE2Tangent::from(DVector::from_vec(step_data));
207                let new_value = var.plus(&tangent);
208                var.set_value(new_value);
209            }
210            VariableEnum::SO3(var) => {
211                let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
212                for &fixed_idx in &var.fixed_indices {
213                    if fixed_idx < 3 {
214                        step_data[fixed_idx] = 0.0;
215                    }
216                }
217                let tangent = so3::SO3Tangent::from(DVector::from_vec(step_data));
218                let new_value = var.plus(&tangent);
219                var.set_value(new_value);
220            }
221            VariableEnum::SO2(var) => {
222                let mut step_data: Vec<f64> = (0..1).map(|i| step_slice[(i, 0)]).collect();
223                for &fixed_idx in &var.fixed_indices {
224                    if fixed_idx < 1 {
225                        step_data[fixed_idx] = 0.0;
226                    }
227                }
228                let tangent = so2::SO2Tangent::from(DVector::from_vec(step_data));
229                let new_value = var.plus(&tangent);
230                var.set_value(new_value);
231            }
232            VariableEnum::Rn(var) => {
233                let size = var.get_size();
234                let mut step_data: Vec<f64> = (0..size).map(|i| step_slice[(i, 0)]).collect();
235                for &fixed_idx in &var.fixed_indices {
236                    if fixed_idx < size {
237                        step_data[fixed_idx] = 0.0;
238                    }
239                }
240                let tangent = rn::RnTangent::new(DVector::from_vec(step_data));
241                let new_value = var.plus(&tangent);
242                var.set_value(new_value);
243            }
244        }
245    }
246
247    /// Get the covariance matrix for this variable (if computed).
248    ///
249    /// Returns `None` if covariance has not been computed.
250    ///
251    /// # Returns
252    /// Reference to the covariance matrix in tangent space
253    pub fn get_covariance(&self) -> Option<&Mat<f64>> {
254        match self {
255            VariableEnum::Rn(var) => var.get_covariance(),
256            VariableEnum::SE2(var) => var.get_covariance(),
257            VariableEnum::SE3(var) => var.get_covariance(),
258            VariableEnum::SO2(var) => var.get_covariance(),
259            VariableEnum::SO3(var) => var.get_covariance(),
260        }
261    }
262
263    /// Set the covariance matrix for this variable.
264    ///
265    /// The covariance matrix should be square with dimension equal to
266    /// the tangent space dimension of this variable.
267    ///
268    /// # Arguments
269    /// * `cov` - Covariance matrix in tangent space
270    pub fn set_covariance(&mut self, cov: Mat<f64>) {
271        match self {
272            VariableEnum::Rn(var) => var.set_covariance(cov),
273            VariableEnum::SE2(var) => var.set_covariance(cov),
274            VariableEnum::SE3(var) => var.set_covariance(cov),
275            VariableEnum::SO2(var) => var.set_covariance(cov),
276            VariableEnum::SO3(var) => var.set_covariance(cov),
277        }
278    }
279
280    /// Clear the covariance matrix for this variable.
281    pub fn clear_covariance(&mut self) {
282        match self {
283            VariableEnum::Rn(var) => var.clear_covariance(),
284            VariableEnum::SE2(var) => var.clear_covariance(),
285            VariableEnum::SE3(var) => var.clear_covariance(),
286            VariableEnum::SO2(var) => var.clear_covariance(),
287            VariableEnum::SO3(var) => var.clear_covariance(),
288        }
289    }
290
291    /// Get the bounds for this variable.
292    ///
293    /// Returns a reference to the bounds map where keys are indices and values are (lower, upper) pairs.
294    pub fn get_bounds(&self) -> &HashMap<usize, (f64, f64)> {
295        match self {
296            VariableEnum::Rn(var) => &var.bounds,
297            VariableEnum::SE2(var) => &var.bounds,
298            VariableEnum::SE3(var) => &var.bounds,
299            VariableEnum::SO2(var) => &var.bounds,
300            VariableEnum::SO3(var) => &var.bounds,
301        }
302    }
303
304    /// Get the fixed indices for this variable.
305    ///
306    /// Returns a reference to the set of indices that should remain fixed during optimization.
307    pub fn get_fixed_indices(&self) -> &HashSet<usize> {
308        match self {
309            VariableEnum::Rn(var) => &var.fixed_indices,
310            VariableEnum::SE2(var) => &var.fixed_indices,
311            VariableEnum::SE3(var) => &var.fixed_indices,
312            VariableEnum::SO2(var) => &var.fixed_indices,
313            VariableEnum::SO3(var) => &var.fixed_indices,
314        }
315    }
316
317    /// Set the value of this variable from a vector representation.
318    ///
319    /// This is used to update the variable after applying constraints (bounds and fixed indices).
320    pub fn set_from_vector(&mut self, vec: &DVector<f64>) {
321        match self {
322            VariableEnum::Rn(var) => {
323                var.set_value(rn::Rn::new(vec.clone()));
324            }
325            VariableEnum::SE2(var) => {
326                let new_se2: se2::SE2 = vec.clone().into();
327                var.set_value(new_se2);
328            }
329            VariableEnum::SE3(var) => {
330                let new_se3: se3::SE3 = vec.clone().into();
331                var.set_value(new_se3);
332            }
333            VariableEnum::SO2(var) => {
334                let new_so2: so2::SO2 = vec.clone().into();
335                var.set_value(new_so2);
336            }
337            VariableEnum::SO3(var) => {
338                let new_so3: so3::SO3 = vec.clone().into();
339                var.set_value(new_so3);
340            }
341        }
342    }
343}
344
345/// The optimization problem definition for factor graph optimization.
346///
347/// Manages residual blocks (factors/constraints), variables, and the sparse Jacobian structure.
348/// Supports mixed manifold types (SE2, SE3, SO2, SO3, Rn) in a single problem and provides
349/// efficient parallel residual/Jacobian computation.
350///
351/// # Architecture
352///
353/// The Problem acts as a container and coordinator:
354/// - Stores all residual blocks (factors with optional loss functions)
355/// - Tracks the global structure (which variables connect to which factors)
356/// - Builds and maintains the sparse Jacobian pattern
357/// - Provides parallel residual/Jacobian evaluation using rayon
358/// - Manages variable constraints (fixed indices, bounds)
359///
360/// # Workflow
361///
362/// 1. **Construction**: Create a new Problem with `Problem::new()`
363/// 2. **Add Factors**: Use `add_residual_block()` to add constraints
364/// 3. **Initialize Variables**: Use `initialize_variables()` with initial values
365/// 4. **Build Sparsity**: Use `build_symbolic_structure()` once before optimization
366/// 5. **Linearize**: Call `compute_residual_and_jacobian_sparse()` each iteration
367/// 6. **Extract Covariance**: Use `compute_and_set_covariances()` after convergence
368///
369/// # Example
370///
371/// ```
372/// use apex_solver::core::problem::Problem;
373/// use apex_solver::factors::BetweenFactor;
374/// use apex_solver::manifold::ManifoldType;
375/// use apex_solver::manifold::se2::SE2;
376/// use nalgebra::dvector;
377/// use std::collections::HashMap;
378///
379/// let mut problem = Problem::new();
380///
381/// // Add a between factor
382/// let factor = Box::new(BetweenFactor::new(SE2::from_xy_angle(1.0, 0.0, 0.1)));
383/// problem.add_residual_block(&["x0", "x1"], factor, None);
384///
385/// // Initialize variables
386/// let mut initial = HashMap::new();
387/// initial.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
388/// initial.insert("x1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
389///
390/// let variables = problem.initialize_variables(&initial);
391/// assert_eq!(variables.len(), 2);
392/// ```
393pub struct Problem {
394    /// Total dimension of the stacked residual vector (sum of all residual block dimensions)
395    pub total_residual_dimension: usize,
396
397    /// Counter for assigning unique IDs to residual blocks
398    residual_id_count: usize,
399
400    /// Map from residual block ID to ResidualBlock instance
401    residual_blocks: HashMap<usize, ResidualBlock>,
402
403    /// Variables with fixed indices (e.g., fix first pose's x,y coordinates)
404    /// Maps variable name -> set of indices to fix
405    pub fixed_variable_indexes: HashMap<String, HashSet<usize>>,
406
407    /// Variable bounds (box constraints on individual DOF)
408    /// Maps variable name -> (index -> (lower_bound, upper_bound))
409    pub variable_bounds: HashMap<String, HashMap<usize, (f64, f64)>>,
410}
411impl Default for Problem {
412    fn default() -> Self {
413        Self::new()
414    }
415}
416
417impl Problem {
418    /// Create a new empty optimization problem.
419    ///
420    /// # Returns
421    ///
422    /// A new `Problem` with no residual blocks or variables
423    ///
424    /// # Example
425    ///
426    /// ```
427    /// use apex_solver::core::problem::Problem;
428    ///
429    /// let problem = Problem::new();
430    /// assert_eq!(problem.num_residual_blocks(), 0);
431    /// assert_eq!(problem.total_residual_dimension, 0);
432    /// ```
433    pub fn new() -> Self {
434        Self {
435            total_residual_dimension: 0,
436            residual_id_count: 0,
437            residual_blocks: HashMap::new(),
438            fixed_variable_indexes: HashMap::new(),
439            variable_bounds: HashMap::new(),
440        }
441    }
442
443    /// Add a residual block (factor with optional loss function) to the problem.
444    ///
445    /// This is the primary method for building the factor graph. Each call adds one constraint
446    /// connecting one or more variables.
447    ///
448    /// # Arguments
449    ///
450    /// * `variable_key_size_list` - Names of the variables this factor connects (order matters)
451    /// * `factor` - The factor implementation that computes residuals and Jacobians
452    /// * `loss_func` - Optional robust loss function for outlier rejection
453    ///
454    /// # Returns
455    ///
456    /// The unique ID assigned to this residual block
457    ///
458    /// # Example
459    ///
460    /// ```
461    /// use apex_solver::core::problem::Problem;
462    /// use apex_solver::factors::{BetweenFactor, PriorFactor};
463    /// use apex_solver::core::loss_functions::HuberLoss;
464    /// use nalgebra::dvector;
465    /// use apex_solver::manifold::se2::SE2;
466    /// # use apex_solver::error::ApexSolverResult;
467    /// # fn example() -> ApexSolverResult<()> {
468    ///
469    /// let mut problem = Problem::new();
470    ///
471    /// // Add prior factor (unary constraint)
472    /// let prior = Box::new(PriorFactor { data: dvector![0.0, 0.0, 0.0] });
473    /// let id1 = problem.add_residual_block(&["x0"], prior, None);
474    ///
475    /// // Add between factor with robust loss (binary constraint)
476    /// let between = Box::new(BetweenFactor::new(SE2::from_xy_angle(1.0, 0.0, 0.1)));
477    /// let loss: Option<Box<dyn apex_solver::core::loss_functions::LossFunction + Send>> =
478    ///     Some(Box::new(HuberLoss::new(1.0)?));
479    /// let id2 = problem.add_residual_block(&["x0", "x1"], between, loss);
480    ///
481    /// assert_eq!(id1, 0);
482    /// assert_eq!(id2, 1);
483    /// assert_eq!(problem.num_residual_blocks(), 2);
484    /// # Ok(())
485    /// # }
486    /// # example().unwrap();
487    /// ```
488    pub fn add_residual_block(
489        &mut self,
490        variable_key_size_list: &[&str],
491        factor: Box<dyn Factor + Send>,
492        loss_func: Option<Box<dyn LossFunction + Send>>,
493    ) -> usize {
494        let new_residual_dimension = factor.get_dimension();
495        self.residual_blocks.insert(
496            self.residual_id_count,
497            ResidualBlock::new(
498                self.residual_id_count,
499                self.total_residual_dimension,
500                variable_key_size_list,
501                factor,
502                loss_func,
503            ),
504        );
505        let block_id = self.residual_id_count;
506        self.residual_id_count += 1;
507
508        self.total_residual_dimension += new_residual_dimension;
509
510        block_id
511    }
512
513    pub fn remove_residual_block(&mut self, block_id: usize) -> Option<ResidualBlock> {
514        if let Some(residual_block) = self.residual_blocks.remove(&block_id) {
515            self.total_residual_dimension -= residual_block.factor.get_dimension();
516            Some(residual_block)
517        } else {
518            None
519        }
520    }
521
522    pub fn fix_variable(&mut self, var_to_fix: &str, idx: usize) {
523        if let Some(var_mut) = self.fixed_variable_indexes.get_mut(var_to_fix) {
524            var_mut.insert(idx);
525        } else {
526            self.fixed_variable_indexes
527                .insert(var_to_fix.to_owned(), HashSet::from([idx]));
528        }
529    }
530
531    pub fn unfix_variable(&mut self, var_to_unfix: &str) {
532        self.fixed_variable_indexes.remove(var_to_unfix);
533    }
534
535    pub fn set_variable_bounds(
536        &mut self,
537        var_to_bound: &str,
538        idx: usize,
539        lower_bound: f64,
540        upper_bound: f64,
541    ) {
542        if lower_bound > upper_bound {
543            warn!("lower bound is larger than upper bound");
544        } else if let Some(var_mut) = self.variable_bounds.get_mut(var_to_bound) {
545            var_mut.insert(idx, (lower_bound, upper_bound));
546        } else {
547            self.variable_bounds.insert(
548                var_to_bound.to_owned(),
549                HashMap::from([(idx, (lower_bound, upper_bound))]),
550            );
551        }
552    }
553
554    pub fn remove_variable_bounds(&mut self, var_to_unbound: &str) {
555        self.variable_bounds.remove(var_to_unbound);
556    }
557
558    /// Initialize variables from initial values with manifold types.
559    ///
560    /// Converts raw initial values into typed `Variable<M>` instances wrapped in `VariableEnum`.
561    /// This method also applies any fixed indices or bounds that were set via `fix_variable()`
562    /// or `set_variable_bounds()`.
563    ///
564    /// # Arguments
565    ///
566    /// * `initial_values` - Map from variable name to (manifold type, initial value vector)
567    ///
568    /// # Returns
569    ///
570    /// Map from variable name to `VariableEnum` (typed variables ready for optimization)
571    ///
572    /// # Manifold Formats
573    ///
574    /// - **SE2**: `[x, y, theta]` (3 elements)
575    /// - **SE3**: `[tx, ty, tz, qw, qx, qy, qz]` (7 elements)
576    /// - **SO2**: `[theta]` (1 element)
577    /// - **SO3**: `[qw, qx, qy, qz]` (4 elements)
578    /// - **Rn**: `[x1, x2, ..., xn]` (n elements)
579    ///
580    /// # Example
581    ///
582    /// ```
583    /// use apex_solver::core::problem::Problem;
584    /// use apex_solver::manifold::ManifoldType;
585    /// use nalgebra::dvector;
586    /// use std::collections::HashMap;
587    ///
588    /// let problem = Problem::new();
589    ///
590    /// let mut initial = HashMap::new();
591    /// initial.insert("pose0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
592    /// initial.insert("pose1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
593    /// initial.insert("landmark".to_string(), (ManifoldType::RN, dvector![5.0, 3.0]));
594    ///
595    /// let variables = problem.initialize_variables(&initial);
596    /// assert_eq!(variables.len(), 3);
597    /// ```
598    pub fn initialize_variables(
599        &self,
600        initial_values: &HashMap<String, (ManifoldType, DVector<f64>)>,
601    ) -> HashMap<String, VariableEnum> {
602        let variables: HashMap<String, VariableEnum> = initial_values
603            .iter()
604            .map(|(k, v)| {
605                let variable_enum = match v.0 {
606                    ManifoldType::SO2 => {
607                        assert_eq!(
608                            v.1.len(),
609                            1,
610                            "Variable '{}': SO2 expects 1 element, got {}",
611                            k,
612                            v.1.len()
613                        );
614                        let mut var = Variable::new(so2::SO2::from(v.1.clone()));
615                        if let Some(indexes) = self.fixed_variable_indexes.get(k) {
616                            var.fixed_indices = indexes.clone();
617                        }
618                        if let Some(bounds) = self.variable_bounds.get(k) {
619                            var.bounds = bounds.clone();
620                        }
621                        VariableEnum::SO2(var)
622                    }
623                    ManifoldType::SO3 => {
624                        assert_eq!(
625                            v.1.len(),
626                            4,
627                            "Variable '{}': SO3 expects 4 elements, got {}",
628                            k,
629                            v.1.len()
630                        );
631                        let mut var = Variable::new(so3::SO3::from(v.1.clone()));
632                        if let Some(indexes) = self.fixed_variable_indexes.get(k) {
633                            var.fixed_indices = indexes.clone();
634                        }
635                        if let Some(bounds) = self.variable_bounds.get(k) {
636                            var.bounds = bounds.clone();
637                        }
638                        VariableEnum::SO3(var)
639                    }
640                    ManifoldType::SE2 => {
641                        assert_eq!(
642                            v.1.len(),
643                            3,
644                            "Variable '{}': SE2 expects 3 elements, got {}",
645                            k,
646                            v.1.len()
647                        );
648                        let mut var = Variable::new(se2::SE2::from(v.1.clone()));
649                        if let Some(indexes) = self.fixed_variable_indexes.get(k) {
650                            var.fixed_indices = indexes.clone();
651                        }
652                        if let Some(bounds) = self.variable_bounds.get(k) {
653                            var.bounds = bounds.clone();
654                        }
655                        VariableEnum::SE2(var)
656                    }
657                    ManifoldType::SE3 => {
658                        assert_eq!(
659                            v.1.len(),
660                            7,
661                            "Variable '{}': SE3 expects 7 elements, got {}",
662                            k,
663                            v.1.len()
664                        );
665                        let mut var = Variable::new(se3::SE3::from(v.1.clone()));
666                        if let Some(indexes) = self.fixed_variable_indexes.get(k) {
667                            var.fixed_indices = indexes.clone();
668                        }
669                        if let Some(bounds) = self.variable_bounds.get(k) {
670                            var.bounds = bounds.clone();
671                        }
672                        VariableEnum::SE3(var)
673                    }
674                    ManifoldType::RN => {
675                        let mut var = Variable::new(rn::Rn::from(v.1.clone()));
676                        if let Some(indexes) = self.fixed_variable_indexes.get(k) {
677                            var.fixed_indices = indexes.clone();
678                        }
679                        if let Some(bounds) = self.variable_bounds.get(k) {
680                            var.bounds = bounds.clone();
681                        }
682                        VariableEnum::Rn(var)
683                    }
684                };
685
686                (k.to_owned(), variable_enum)
687            })
688            .collect();
689        variables
690    }
691
692    /// Get the number of residual blocks
693    pub fn num_residual_blocks(&self) -> usize {
694        self.residual_blocks.len()
695    }
696
697    /// Build symbolic structure for sparse Jacobian computation
698    ///
699    /// This method constructs the sparsity pattern of the Jacobian matrix before numerical
700    /// computation. It determines which entries in the Jacobian will be non-zero based on
701    /// the structure of the optimization problem (which residual blocks connect which variables).
702    ///
703    /// # Purpose
704    /// - Pre-allocates memory for sparse matrix operations
705    /// - Enables efficient sparse linear algebra (avoiding dense operations)
706    /// - Computed once at the beginning, used throughout optimization
707    ///
708    /// # Arguments
709    /// * `variables` - Map of variable names to their values and properties (SE2, SE3, etc.)
710    /// * `variable_index_sparce_matrix` - Map from variable name to starting column index in Jacobian
711    /// * `total_dof` - Total degrees of freedom (number of columns in Jacobian)
712    ///
713    /// # Returns
714    /// A `SymbolicStructure` containing:
715    /// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
716    /// - `order`: An ordering/permutation for efficient numerical computation
717    ///
718    /// # Algorithm
719    /// For each residual block:
720    /// 1. Identify which variables it depends on
721    /// 2. For each (residual_dimension × variable_dof) block, mark entries as non-zero
722    /// 3. Convert to optimized sparse matrix representation
723    ///
724    /// # Example Structure
725    /// For a simple problem with 3 SE2 poses (9 DOF total):
726    /// - Between(x0, x1): Creates 3×6 block at rows 0-2, cols 0-5
727    /// - Between(x1, x2): Creates 3×6 block at rows 3-5, cols 3-8
728    /// - Prior(x0): Creates 3×3 block at rows 6-8, cols 0-2
729    ///
730    /// Result: 9×9 sparse Jacobian with 45 non-zero entries
731    pub fn build_symbolic_structure(
732        &self,
733        variables: &HashMap<String, VariableEnum>,
734        variable_index_sparce_matrix: &HashMap<String, usize>,
735        total_dof: usize,
736    ) -> ApexSolverResult<SymbolicStructure> {
737        // Vector to accumulate all (row, col) pairs that will be non-zero in the Jacobian
738        // Each Pair represents one entry in the sparse matrix
739        let mut indices = Vec::<Pair<usize, usize>>::new();
740
741        // Iterate through all residual blocks (factors/constraints) in the problem
742        // Each residual block contributes a block of entries to the Jacobian
743        self.residual_blocks.iter().for_each(|(_, residual_block)| {
744            // Create local indexing for this residual block's variables
745            // Maps each variable to its local starting index and size within this factor
746            // Example: For Between(x0, x1) with SE2: [(0, 3), (3, 3)]
747            //   - x0 starts at local index 0, has 3 DOF
748            //   - x1 starts at local index 3, has 3 DOF
749            let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
750            let mut count_variable_local_idx: usize = 0;
751
752            // Build the local index mapping for this residual block
753            for var_key in &residual_block.variable_key_list {
754                if let Some(variable) = variables.get(var_key) {
755                    // Store (local_start_index, dof_size) for this variable
756                    variable_local_idx_size_list
757                        .push((count_variable_local_idx, variable.get_size()));
758                    count_variable_local_idx += variable.get_size();
759                }
760            }
761
762            // For each variable in this residual block, generate Jacobian entries
763            for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
764                if let Some(variable_global_idx) = variable_index_sparce_matrix.get(var_key) {
765                    // Get the DOF size for this variable
766                    let (_, var_size) = variable_local_idx_size_list[i];
767
768                    // Generate all (row, col) pairs for the Jacobian block:
769                    // ∂(residual) / ∂(variable)
770                    //
771                    // For a residual block with dimension R and variable with DOF V:
772                    // Creates R × V entries in the Jacobian
773
774                    // Iterate over each residual dimension (rows)
775                    for row_idx in 0..residual_block.factor.get_dimension() {
776                        // Iterate over each variable DOF (columns)
777                        for col_idx in 0..var_size {
778                            // Compute global row index:
779                            // Start from this residual block's first row, add offset
780                            let global_row_idx = residual_block.residual_row_start_idx + row_idx;
781
782                            // Compute global column index:
783                            // Start from this variable's first column, add offset
784                            let global_col_idx = variable_global_idx + col_idx;
785
786                            // Record this (row, col) pair as a non-zero entry
787                            indices.push(Pair::new(global_row_idx, global_col_idx));
788                        }
789                    }
790                }
791            }
792        });
793
794        // Convert the list of (row, col) pairs into an optimized symbolic sparse matrix
795        // This performs:
796        // 1. Duplicate elimination (same entry might be referenced multiple times)
797        // 2. Sorting for column-wise storage format
798        // 3. Computing a fill-reducing ordering for numerical stability
799        // 4. Allocating the symbolic structure (no values yet, just pattern)
800        let (pattern, order) = SymbolicSparseColMat::try_new_from_indices(
801            self.total_residual_dimension, // Number of rows (total residual dimension)
802            total_dof,                     // Number of columns (total DOF)
803            &indices,                      // List of non-zero entry locations
804        )
805        .map_err(|e| {
806            CoreError::SymbolicStructure(
807                "Failed to build symbolic sparse matrix structure".to_string(),
808            )
809            .log_with_source(e)
810        })?;
811
812        // Return the symbolic structure that will be filled with numerical values later
813        Ok(SymbolicStructure { pattern, order })
814    }
815
816    /// Compute only the residual vector for the current variable values.
817    ///
818    /// This is an optimized version that skips Jacobian computation when only the cost
819    /// function value is needed (e.g., during initialization or step evaluation).
820    ///
821    /// # Arguments
822    ///
823    /// * `variables` - Current variable values (from `initialize_variables()` or updated)
824    ///
825    /// # Returns
826    ///
827    /// Residual vector as N×1 column matrix (N = total residual dimension)
828    ///
829    /// # Performance
830    ///
831    /// Approximately **2x faster** than `compute_residual_and_jacobian_sparse()` since it:
832    /// - Skips Jacobian computation for each residual block
833    /// - Avoids Jacobian matrix assembly and storage
834    /// - Only parallelizes residual evaluation
835    ///
836    /// # When to Use
837    ///
838    /// - **Initial cost computation**: When setting up optimization state
839    /// - **Step evaluation**: When computing new cost after applying parameter updates
840    /// - **Cost-only queries**: When you don't need gradients
841    ///
842    /// Use `compute_residual_and_jacobian_sparse()` when you need both residual and Jacobian
843    /// (e.g., in the main optimization iteration loop for linearization).
844    ///
845    /// # Example
846    ///
847    /// ```no_run
848    /// # use apex_solver::core::problem::Problem;
849    /// # use std::collections::HashMap;
850    /// # fn example() -> Result<(), Box<dyn std::error::Error>> {
851    /// # let problem = Problem::new();
852    /// # let variables = HashMap::new();
853    /// // Initial cost evaluation (no Jacobian needed)
854    /// let residual = problem.compute_residual_sparse(&variables)?;
855    /// let initial_cost = residual.norm_l2() * residual.norm_l2();
856    /// # Ok(())
857    /// # }
858    /// ```
859    pub fn compute_residual_sparse(
860        &self,
861        variables: &HashMap<String, VariableEnum>,
862    ) -> ApexSolverResult<Mat<f64>> {
863        let total_residual = Arc::new(Mutex::new(Col::<f64>::zeros(self.total_residual_dimension)));
864
865        // Compute residuals in parallel (no Jacobian needed)
866        let result: Result<Vec<()>, ApexSolverError> = self
867            .residual_blocks
868            .par_iter()
869            .map(|(_, residual_block)| {
870                self.compute_residual_block(residual_block, variables, &total_residual)
871            })
872            .collect();
873
874        result?;
875
876        let total_residual = Arc::try_unwrap(total_residual)
877            .map_err(|_| {
878                CoreError::ParallelComputation(
879                    "Failed to unwrap Arc for total residual".to_string(),
880                )
881                .log()
882            })?
883            .into_inner()
884            .map_err(|e| {
885                CoreError::ParallelComputation(
886                    "Failed to extract mutex inner value for total residual".to_string(),
887                )
888                .log_with_source(e)
889            })?;
890
891        // Convert faer Col to Mat (column vector as n×1 matrix)
892        let residual_faer = total_residual.as_ref().as_mat().to_owned();
893        Ok(residual_faer)
894    }
895
896    /// Compute residual vector and sparse Jacobian matrix for the current variable values.
897    ///
898    /// This is the core linearization method called during each optimization iteration. It:
899    /// 1. Evaluates all residual blocks in parallel using rayon
900    /// 2. Assembles the full residual vector
901    /// 3. Constructs the sparse Jacobian matrix using the precomputed symbolic structure
902    ///
903    /// # Arguments
904    ///
905    /// * `variables` - Current variable values (from `initialize_variables()` or updated)
906    /// * `variable_index_sparce_matrix` - Map from variable name to starting column in Jacobian
907    /// * `symbolic_structure` - Precomputed sparsity pattern (from `build_symbolic_structure()`)
908    ///
909    /// # Returns
910    ///
911    /// Tuple `(residual, jacobian)` where:
912    /// - `residual`: N×1 column matrix (total residual dimension)
913    /// - `jacobian`: N×M sparse matrix (N = residual dim, M = total DOF)
914    ///
915    /// # Performance
916    ///
917    /// This method is highly optimized:
918    /// - **Parallel evaluation**: Each residual block is evaluated independently using rayon
919    /// - **Sparse storage**: Only non-zero Jacobian entries are stored and computed
920    /// - **Memory efficient**: Preallocated sparse structure avoids dynamic allocations
921    ///
922    /// Typically accounts for 40-60% of total optimization time (including sparse matrix ops).
923    ///
924    /// # When to Use
925    ///
926    /// Use this method in the main optimization loop when you need both residual and Jacobian
927    /// for linearization. For cost-only evaluation, use `compute_residual_sparse()` instead.
928    ///
929    /// # Example
930    ///
931    /// ```
932    /// # use apex_solver::core::problem::Problem;
933    /// # use std::collections::HashMap;
934    /// // Inside optimizer loop, compute both residual and Jacobian for linearization
935    /// // let (residual, jacobian) = problem.compute_residual_and_jacobian_sparse(
936    /// //     &variables,
937    /// //     &variable_index_map,
938    /// //     &symbolic_structure,
939    /// // )?;
940    /// //
941    /// // Use for linear system: J^T J dx = -J^T r
942    /// ```
943    pub fn compute_residual_and_jacobian_sparse(
944        &self,
945        variables: &HashMap<String, VariableEnum>,
946        variable_index_sparce_matrix: &HashMap<String, usize>,
947        symbolic_structure: &SymbolicStructure,
948    ) -> ApexSolverResult<(Mat<f64>, SparseColMat<usize, f64>)> {
949        let total_residual = Arc::new(Mutex::new(Col::<f64>::zeros(self.total_residual_dimension)));
950
951        // OPTIMIZATION: Pre-allocate exact size to avoid double allocation and flattening
952        // This eliminates the Vec<Vec<f64>> → Vec<f64> conversion overhead
953        let total_nnz = symbolic_structure.pattern.compute_nnz();
954
955        // Collect block results with pre-computed sizes
956        let jacobian_blocks: Result<Vec<(usize, Vec<f64>)>, ApexSolverError> = self
957            .residual_blocks
958            .par_iter()
959            .map(|(_, residual_block)| {
960                let values = self.compute_residual_and_jacobian_block(
961                    residual_block,
962                    variables,
963                    variable_index_sparce_matrix,
964                    &total_residual,
965                )?;
966                let size = values.len();
967                Ok((size, values))
968            })
969            .collect();
970
971        let jacobian_blocks = jacobian_blocks?;
972
973        // Pre-allocate final vec with exact size
974        let mut jacobian_values = Vec::with_capacity(total_nnz);
975        for (_size, mut block_values) in jacobian_blocks {
976            jacobian_values.append(&mut block_values);
977        }
978
979        let total_residual = Arc::try_unwrap(total_residual)
980            .map_err(|_| {
981                CoreError::ParallelComputation(
982                    "Failed to unwrap Arc for total residual".to_string(),
983                )
984                .log()
985            })?
986            .into_inner()
987            .map_err(|e| {
988                CoreError::ParallelComputation(
989                    "Failed to extract mutex inner value for total residual".to_string(),
990                )
991                .log_with_source(e)
992            })?;
993
994        // Convert faer Col to Mat (column vector as n×1 matrix)
995        let residual_faer = total_residual.as_ref().as_mat().to_owned();
996        let jacobian_sparse = SparseColMat::new_from_argsort(
997            symbolic_structure.pattern.clone(),
998            &symbolic_structure.order,
999            jacobian_values.as_slice(),
1000        )
1001        .map_err(|e| {
1002            CoreError::SymbolicStructure(
1003                "Failed to create sparse Jacobian from argsort".to_string(),
1004            )
1005            .log_with_source(e)
1006        })?;
1007
1008        Ok((residual_faer, jacobian_sparse))
1009    }
1010
1011    /// Compute only the residual for a single residual block (no Jacobian).
1012    ///
1013    /// Helper method for `compute_residual_sparse()`.
1014    fn compute_residual_block(
1015        &self,
1016        residual_block: &ResidualBlock,
1017        variables: &HashMap<String, VariableEnum>,
1018        total_residual: &Arc<Mutex<Col<f64>>>,
1019    ) -> ApexSolverResult<()> {
1020        let mut param_vectors: Vec<DVector<f64>> = Vec::new();
1021
1022        for var_key in &residual_block.variable_key_list {
1023            if let Some(variable) = variables.get(var_key) {
1024                param_vectors.push(variable.to_vector());
1025            }
1026        }
1027
1028        // Compute only residual (linearize still computes Jacobian internally,
1029        // but we don't extract/store it)
1030        let (mut res, _) = residual_block.factor.linearize(&param_vectors, false);
1031
1032        // Apply loss function if present (critical for robust optimization)
1033        if let Some(loss_func) = &residual_block.loss_func {
1034            let squared_norm = res.dot(&res);
1035            let corrector = Corrector::new(loss_func.as_ref(), squared_norm);
1036            corrector.correct_residuals(&mut res);
1037        }
1038
1039        let mut total_residual = total_residual.lock().map_err(|e| {
1040            CoreError::ParallelComputation("Failed to acquire lock on total residual".to_string())
1041                .log_with_source(e)
1042        })?;
1043
1044        // Copy residual values from nalgebra DVector to faer Col
1045        let start_idx = residual_block.residual_row_start_idx;
1046        let dim = residual_block.factor.get_dimension();
1047        let mut total_residual_mut = total_residual.as_mut();
1048        for i in 0..dim {
1049            total_residual_mut[start_idx + i] = res[i];
1050        }
1051
1052        Ok(())
1053    }
1054
1055    /// Compute residual and Jacobian for a single residual block.
1056    ///
1057    /// Helper method for `compute_residual_and_jacobian_sparse()`.
1058    fn compute_residual_and_jacobian_block(
1059        &self,
1060        residual_block: &ResidualBlock,
1061        variables: &HashMap<String, VariableEnum>,
1062        variable_index_sparce_matrix: &HashMap<String, usize>,
1063        total_residual: &Arc<Mutex<Col<f64>>>,
1064    ) -> ApexSolverResult<Vec<f64>> {
1065        let mut param_vectors: Vec<DVector<f64>> = Vec::new();
1066        let mut var_sizes: Vec<usize> = Vec::new();
1067        let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
1068        let mut count_variable_local_idx: usize = 0;
1069
1070        for var_key in &residual_block.variable_key_list {
1071            if let Some(variable) = variables.get(var_key) {
1072                param_vectors.push(variable.to_vector());
1073                let var_size = variable.get_size();
1074                var_sizes.push(var_size);
1075                variable_local_idx_size_list.push((count_variable_local_idx, var_size));
1076                count_variable_local_idx += var_size;
1077            }
1078        }
1079
1080        let (mut res, jac_opt) = residual_block.factor.linearize(&param_vectors, true);
1081        let mut jac = jac_opt.ok_or_else(|| {
1082            CoreError::FactorLinearization(
1083                "Factor returned None for Jacobian when compute_jacobian=true".to_string(),
1084            )
1085            .log()
1086        })?;
1087
1088        // Apply loss function if present (critical for robust optimization)
1089        if let Some(loss_func) = &residual_block.loss_func {
1090            let squared_norm = res.dot(&res);
1091            let corrector = Corrector::new(loss_func.as_ref(), squared_norm);
1092            corrector.correct_jacobian(&res, &mut jac);
1093            corrector.correct_residuals(&mut res);
1094        }
1095
1096        // Update total residual
1097        {
1098            let mut total_residual = total_residual.lock().map_err(|e| {
1099                CoreError::ParallelComputation(
1100                    "Failed to acquire lock on total residual".to_string(),
1101                )
1102                .log_with_source(e)
1103            })?;
1104
1105            // Copy residual values from nalgebra DVector to faer Col
1106            let start_idx = residual_block.residual_row_start_idx;
1107            let dim = residual_block.factor.get_dimension();
1108            let mut total_residual_mut = total_residual.as_mut();
1109            for i in 0..dim {
1110                total_residual_mut[start_idx + i] = res[i];
1111            }
1112        }
1113
1114        // Extract Jacobian values in the correct order
1115        let mut local_jacobian_values = Vec::new();
1116        for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
1117            if variable_index_sparce_matrix.contains_key(var_key) {
1118                let (variable_local_idx, var_size) = variable_local_idx_size_list[i];
1119                let variable_jac = jac.view((0, variable_local_idx), (jac.shape().0, var_size));
1120
1121                for row_idx in 0..jac.shape().0 {
1122                    for col_idx in 0..var_size {
1123                        local_jacobian_values.push(variable_jac[(row_idx, col_idx)]);
1124                    }
1125                }
1126            } else {
1127                return Err(CoreError::Variable(format!(
1128                    "Missing key {} in variable-to-column-index mapping",
1129                    var_key
1130                ))
1131                .log()
1132                .into());
1133            }
1134        }
1135
1136        Ok(local_jacobian_values)
1137    }
1138
1139    /// Log residual vector to a text file
1140    pub fn log_residual_to_file(
1141        &self,
1142        residual: &nalgebra::DVector<f64>,
1143        filename: &str,
1144    ) -> Result<(), Error> {
1145        let mut file = File::create(filename)?;
1146        writeln!(file, "# Residual vector - {} elements", residual.len())?;
1147        for (i, &value) in residual.iter().enumerate() {
1148            writeln!(file, "{}: {:.12}", i, value)?;
1149        }
1150        Ok(())
1151    }
1152
1153    /// Log sparse Jacobian matrix to a text file
1154    pub fn log_sparse_jacobian_to_file(
1155        &self,
1156        jacobian: &SparseColMat<usize, f64>,
1157        filename: &str,
1158    ) -> Result<(), Error> {
1159        let mut file = File::create(filename)?;
1160        writeln!(
1161            file,
1162            "# Sparse Jacobian matrix - {} x {} ({} non-zeros)",
1163            jacobian.nrows(),
1164            jacobian.ncols(),
1165            jacobian.compute_nnz()
1166        )?;
1167        writeln!(file, "# Matrix saved as dimensions and non-zero count only")?;
1168        writeln!(file, "# For detailed access, convert to dense matrix first")?;
1169        Ok(())
1170    }
1171
1172    /// Log variables to a text file
1173    pub fn log_variables_to_file(
1174        &self,
1175        variables: &HashMap<String, VariableEnum>,
1176        filename: &str,
1177    ) -> Result<(), Error> {
1178        let mut file = File::create(filename)?;
1179        writeln!(file, "# Variables - {} total", variables.len())?;
1180        writeln!(file, "# Format: variable_name: [values...]")?;
1181
1182        let mut sorted_vars: Vec<_> = variables.keys().collect();
1183        sorted_vars.sort();
1184
1185        for var_name in sorted_vars {
1186            let var_vector = variables[var_name].to_vector();
1187            write!(file, "{}: [", var_name)?;
1188            for (i, &value) in var_vector.iter().enumerate() {
1189                write!(file, "{:.12}", value)?;
1190                if i < var_vector.len() - 1 {
1191                    write!(file, ", ")?;
1192                }
1193            }
1194            writeln!(file, "]")?;
1195        }
1196        Ok(())
1197    }
1198
1199    /// Compute per-variable covariances and set them in Variable objects
1200    ///
1201    /// This method computes the full covariance matrix by inverting the Hessian
1202    /// from the linear solver, then extracts per-variable covariance blocks and
1203    /// stores them in the corresponding Variable objects.
1204    ///
1205    /// # Arguments
1206    /// * `linear_solver` - Mutable reference to the linear solver containing the cached Hessian
1207    /// * `variables` - Mutable map of variables where covariances will be stored
1208    /// * `variable_index_map` - Map from variable names to their starting column indices
1209    ///
1210    /// # Returns
1211    /// `Some(HashMap)` containing per-variable covariance matrices if successful, `None` otherwise
1212    ///
1213    pub fn compute_and_set_covariances(
1214        &self,
1215        linear_solver: &mut Box<dyn SparseLinearSolver>,
1216        variables: &mut HashMap<String, VariableEnum>,
1217        variable_index_map: &HashMap<String, usize>,
1218    ) -> Option<HashMap<String, Mat<f64>>> {
1219        // Compute the full covariance matrix (H^{-1}) using the linear solver
1220        linear_solver.compute_covariance_matrix()?;
1221        let full_cov = linear_solver.get_covariance_matrix()?.clone();
1222
1223        // Extract per-variable covariance blocks from the full matrix
1224        let per_var_covariances =
1225            extract_variable_covariances(&full_cov, variables, variable_index_map);
1226
1227        // Set covariances in Variable objects for easy access
1228        for (var_name, cov) in &per_var_covariances {
1229            if let Some(var) = variables.get_mut(var_name) {
1230                var.set_covariance(cov.clone());
1231            }
1232        }
1233
1234        Some(per_var_covariances)
1235    }
1236}
1237
1238#[cfg(test)]
1239mod tests {
1240    use super::*;
1241    use crate::core::loss_functions::HuberLoss;
1242    use crate::factors::{BetweenFactor, PriorFactor};
1243    use apex_manifolds::{ManifoldType, se2::SE2, se3::SE3};
1244    use nalgebra::{Quaternion, Vector3, dvector};
1245    use std::collections::HashMap;
1246
1247    type TestResult = Result<(), Box<dyn std::error::Error>>;
1248    type TestProblemResult = Result<
1249        (
1250            Problem,
1251            HashMap<String, (ManifoldType, nalgebra::DVector<f64>)>,
1252        ),
1253        Box<dyn std::error::Error>,
1254    >;
1255
1256    /// Create a test SE2 dataset with 10 vertices in a loop
1257    fn create_se2_test_problem() -> TestProblemResult {
1258        let mut problem = Problem::new();
1259        let mut initial_values = HashMap::new();
1260
1261        // Create 10 SE2 poses in a rough circle pattern
1262        let poses = vec![
1263            (0.0, 0.0, 0.0),    // x0: origin
1264            (1.0, 0.0, 0.1),    // x1: move right
1265            (1.5, 1.0, 0.5),    // x2: move up-right
1266            (1.0, 2.0, 1.0),    // x3: move up
1267            (0.0, 2.5, 1.5),    // x4: move up-left
1268            (-1.0, 2.0, 2.0),   // x5: move left
1269            (-1.5, 1.0, 2.5),   // x6: move down-left
1270            (-1.0, 0.0, 3.0),   // x7: move down
1271            (-0.5, -0.5, -2.8), // x8: move down-right
1272            (0.5, -0.5, -2.3),  // x9: back towards origin
1273        ];
1274
1275        // Add vertices using [x, y, theta] ordering
1276        for (i, (x, y, theta)) in poses.iter().enumerate() {
1277            let var_name = format!("x{}", i);
1278            let se2_data = dvector![*x, *y, *theta];
1279            initial_values.insert(var_name, (ManifoldType::SE2, se2_data));
1280        }
1281
1282        // Add chain of between factors
1283        for i in 0..9 {
1284            let from_pose = poses[i];
1285            let to_pose = poses[i + 1];
1286
1287            // Compute relative transformation
1288            let dx = to_pose.0 - from_pose.0;
1289            let dy = to_pose.1 - from_pose.1;
1290            let dtheta = to_pose.2 - from_pose.2;
1291
1292            let between_factor = BetweenFactor::new(SE2::from_xy_angle(dx, dy, dtheta));
1293            problem.add_residual_block(
1294                &[&format!("x{}", i), &format!("x{}", i + 1)],
1295                Box::new(between_factor),
1296                Some(Box::new(HuberLoss::new(1.0)?)),
1297            );
1298        }
1299
1300        // Add loop closure from x9 back to x0
1301        let dx = poses[0].0 - poses[9].0;
1302        let dy = poses[0].1 - poses[9].1;
1303        let dtheta = poses[0].2 - poses[9].2;
1304
1305        let loop_closure = BetweenFactor::new(SE2::from_xy_angle(dx, dy, dtheta));
1306        problem.add_residual_block(
1307            &["x9", "x0"],
1308            Box::new(loop_closure),
1309            Some(Box::new(HuberLoss::new(1.0)?)),
1310        );
1311
1312        // Add prior factor for x0
1313        let prior_factor = PriorFactor {
1314            data: dvector![0.0, 0.0, 0.0],
1315        };
1316        problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1317
1318        Ok((problem, initial_values))
1319    }
1320
1321    /// Create a test SE3 dataset with 8 vertices in a 3D pattern
1322    fn create_se3_test_problem() -> TestProblemResult {
1323        let mut problem = Problem::new();
1324        let mut initial_values = HashMap::new();
1325
1326        // Create 8 SE3 poses in a rough 3D cube pattern
1327        let poses = [
1328            // Bottom face of cube
1329            (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0),   // x0: origin
1330            (1.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.995), // x1: +X
1331            (1.0, 1.0, 0.0, 0.0, 0.0, 0.2, 0.98),  // x2: +X+Y
1332            (0.0, 1.0, 0.0, 0.0, 0.0, 0.3, 0.955), // x3: +Y
1333            // Top face of cube
1334            (0.0, 0.0, 1.0, 0.1, 0.0, 0.0, 0.995), // x4: +Z
1335            (1.0, 0.0, 1.0, 0.1, 0.0, 0.1, 0.99),  // x5: +X+Z
1336            (1.0, 1.0, 1.0, 0.1, 0.0, 0.2, 0.975), // x6: +X+Y+Z
1337            (0.0, 1.0, 1.0, 0.1, 0.0, 0.3, 0.95),  // x7: +Y+Z
1338        ];
1339
1340        // Add vertices using [tx, ty, tz, qw, qx, qy, qz] ordering
1341        for (i, (tx, ty, tz, qx, qy, qz, qw)) in poses.iter().enumerate() {
1342            let var_name = format!("x{}", i);
1343            let se3_data = dvector![*tx, *ty, *tz, *qw, *qx, *qy, *qz];
1344            initial_values.insert(var_name, (ManifoldType::SE3, se3_data));
1345        }
1346
1347        // Add between factors connecting the cube edges
1348        let edges = vec![
1349            (0, 1),
1350            (1, 2),
1351            (2, 3),
1352            (3, 0), // Bottom face
1353            (4, 5),
1354            (5, 6),
1355            (6, 7),
1356            (7, 4), // Top face
1357            (0, 4),
1358            (1, 5),
1359            (2, 6),
1360            (3, 7), // Vertical edges
1361        ];
1362
1363        for (from_idx, to_idx) in edges {
1364            let from_pose = poses[from_idx];
1365            let to_pose = poses[to_idx];
1366
1367            // Create a simple relative transformation (simplified for testing)
1368            let relative_se3 = SE3::from_translation_quaternion(
1369                Vector3::new(
1370                    to_pose.0 - from_pose.0, // dx
1371                    to_pose.1 - from_pose.1, // dy
1372                    to_pose.2 - from_pose.2, // dz
1373                ),
1374                Quaternion::new(1.0, 0.0, 0.0, 0.0), // identity quaternion
1375            );
1376
1377            let between_factor = BetweenFactor::new(relative_se3);
1378            problem.add_residual_block(
1379                &[&format!("x{}", from_idx), &format!("x{}", to_idx)],
1380                Box::new(between_factor),
1381                Some(Box::new(HuberLoss::new(1.0)?)),
1382            );
1383        }
1384
1385        // Add prior factor for x0
1386        let prior_factor = PriorFactor {
1387            data: dvector![0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
1388        };
1389        problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1390
1391        Ok((problem, initial_values))
1392    }
1393
1394    #[test]
1395    fn test_problem_construction_se2() -> TestResult {
1396        let (problem, initial_values) = create_se2_test_problem()?;
1397
1398        // Test basic problem properties
1399        assert_eq!(problem.num_residual_blocks(), 11); // 9 between + 1 loop closure + 1 prior
1400        assert_eq!(problem.total_residual_dimension, 33); // 11 * 3
1401        assert_eq!(initial_values.len(), 10);
1402
1403        Ok(())
1404    }
1405
1406    #[test]
1407    fn test_problem_construction_se3() -> TestResult {
1408        let (problem, initial_values) = create_se3_test_problem()?;
1409
1410        // Test basic problem properties
1411        assert_eq!(problem.num_residual_blocks(), 13); // 12 between + 1 prior
1412        assert_eq!(problem.total_residual_dimension, 79); // 12 * 6 + 1 * 7 (SE3 between factors are 6-dim, prior factor is 7-dim)
1413        assert_eq!(initial_values.len(), 8);
1414
1415        Ok(())
1416    }
1417
1418    #[test]
1419    fn test_variable_initialization_se2() -> TestResult {
1420        let (problem, initial_values) = create_se2_test_problem()?;
1421
1422        // Test variable initialization
1423        let variables = problem.initialize_variables(&initial_values);
1424        assert_eq!(variables.len(), 10);
1425
1426        // Test variable sizes
1427        for (name, var) in &variables {
1428            assert_eq!(
1429                var.get_size(),
1430                3,
1431                "SE2 variable {} should have size 3",
1432                name
1433            );
1434        }
1435
1436        // Test conversion to DVector
1437        for (name, var) in &variables {
1438            let vec = var.to_vector();
1439            assert_eq!(
1440                vec.len(),
1441                3,
1442                "SE2 variable {} vector should have length 3",
1443                name
1444            );
1445        }
1446
1447        Ok(())
1448    }
1449
1450    #[test]
1451    fn test_variable_initialization_se3() -> TestResult {
1452        let (problem, initial_values) = create_se3_test_problem()?;
1453
1454        // Test variable initialization
1455        let variables = problem.initialize_variables(&initial_values);
1456        assert_eq!(variables.len(), 8);
1457
1458        // Test variable sizes
1459        for (name, var) in &variables {
1460            assert_eq!(
1461                var.get_size(),
1462                6,
1463                "SE3 variable {} should have size 6 (DOF)",
1464                name
1465            );
1466        }
1467
1468        // Test conversion to DVector
1469        for (name, var) in &variables {
1470            let vec = var.to_vector();
1471            assert_eq!(
1472                vec.len(),
1473                7,
1474                "SE3 variable {} vector should have length 7",
1475                name
1476            );
1477        }
1478
1479        Ok(())
1480    }
1481
1482    #[test]
1483    fn test_column_mapping_se2() -> TestResult {
1484        let (problem, initial_values) = create_se2_test_problem()?;
1485        let variables = problem.initialize_variables(&initial_values);
1486
1487        // Create column mapping for variables
1488        let mut variable_index_sparce_matrix = HashMap::new();
1489        let mut col_offset = 0;
1490        let mut sorted_vars: Vec<_> = variables.keys().collect();
1491        sorted_vars.sort(); // Ensure consistent ordering
1492
1493        for var_name in sorted_vars {
1494            variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1495            col_offset += variables[var_name].get_size();
1496        }
1497
1498        // Test total degrees of freedom
1499        let total_dof: usize = variables.values().map(|v| v.get_size()).sum();
1500        assert_eq!(total_dof, 30); // 10 variables * 3 DOF each
1501        assert_eq!(col_offset, 30);
1502
1503        // Test each variable has correct column mapping
1504        for (var_name, &col_idx) in &variable_index_sparce_matrix {
1505            assert!(
1506                col_idx < total_dof,
1507                "Column index {} for {} should be < {}",
1508                col_idx,
1509                var_name,
1510                total_dof
1511            );
1512        }
1513
1514        Ok(())
1515    }
1516
1517    #[test]
1518    fn test_symbolic_structure_se2() -> TestResult {
1519        let (problem, initial_values) = create_se2_test_problem()?;
1520        let variables = problem.initialize_variables(&initial_values);
1521
1522        // Create column mapping
1523        let mut variable_index_sparce_matrix = HashMap::new();
1524        let mut col_offset = 0;
1525        let mut sorted_vars: Vec<_> = variables.keys().collect();
1526        sorted_vars.sort();
1527
1528        for var_name in sorted_vars {
1529            variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1530            col_offset += variables[var_name].get_size();
1531        }
1532
1533        // Build symbolic structure
1534        let symbolic_structure = problem.build_symbolic_structure(
1535            &variables,
1536            &variable_index_sparce_matrix,
1537            col_offset,
1538        )?;
1539
1540        // Test symbolic structure dimensions
1541        assert_eq!(
1542            symbolic_structure.pattern.nrows(),
1543            problem.total_residual_dimension
1544        );
1545        assert_eq!(symbolic_structure.pattern.ncols(), 30); // total DOF
1546
1547        Ok(())
1548    }
1549
1550    #[test]
1551    fn test_residual_jacobian_computation_se2() -> TestResult {
1552        let (problem, initial_values) = create_se2_test_problem()?;
1553        let variables = problem.initialize_variables(&initial_values);
1554
1555        // Create column mapping
1556        let mut variable_index_sparce_matrix = HashMap::new();
1557        let mut col_offset = 0;
1558        let mut sorted_vars: Vec<_> = variables.keys().collect();
1559        sorted_vars.sort();
1560
1561        for var_name in sorted_vars {
1562            variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1563            col_offset += variables[var_name].get_size();
1564        }
1565
1566        // Test sparse computation
1567        let symbolic_structure = problem.build_symbolic_structure(
1568            &variables,
1569            &variable_index_sparce_matrix,
1570            col_offset,
1571        )?;
1572        let (residual_sparse, jacobian_sparse) = problem.compute_residual_and_jacobian_sparse(
1573            &variables,
1574            &variable_index_sparce_matrix,
1575            &symbolic_structure,
1576        )?;
1577
1578        // Test sparse dimensions
1579        assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1580        assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1581        assert_eq!(jacobian_sparse.ncols(), 30);
1582
1583        Ok(())
1584    }
1585
1586    #[test]
1587    fn test_residual_jacobian_computation_se3() -> TestResult {
1588        let (problem, initial_values) = create_se3_test_problem()?;
1589        let variables = problem.initialize_variables(&initial_values);
1590
1591        // Create column mapping
1592        let mut variable_index_sparce_matrix = HashMap::new();
1593        let mut col_offset = 0;
1594        let mut sorted_vars: Vec<_> = variables.keys().collect();
1595        sorted_vars.sort();
1596
1597        for var_name in sorted_vars {
1598            variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1599            col_offset += variables[var_name].get_size();
1600        }
1601
1602        // Test sparse computation
1603        let symbolic_structure = problem.build_symbolic_structure(
1604            &variables,
1605            &variable_index_sparce_matrix,
1606            col_offset,
1607        )?;
1608        let (residual_sparse, jacobian_sparse) = problem.compute_residual_and_jacobian_sparse(
1609            &variables,
1610            &variable_index_sparce_matrix,
1611            &symbolic_structure,
1612        )?;
1613
1614        // Test sparse dimensions match
1615        assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1616        assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1617        assert_eq!(jacobian_sparse.ncols(), 48); // 8 variables * 6 DOF each
1618
1619        Ok(())
1620    }
1621
1622    #[test]
1623    fn test_residual_block_operations() -> TestResult {
1624        let mut problem = Problem::new();
1625
1626        // Test adding residual blocks
1627        let block_id1 = problem.add_residual_block(
1628            &["x0", "x1"],
1629            Box::new(BetweenFactor::new(SE2::from_xy_angle(1.0, 0.0, 0.1))),
1630            Some(Box::new(HuberLoss::new(1.0)?)),
1631        );
1632
1633        let block_id2 = problem.add_residual_block(
1634            &["x0"],
1635            Box::new(PriorFactor {
1636                data: dvector![0.0, 0.0, 0.0],
1637            }),
1638            None,
1639        );
1640
1641        assert_eq!(problem.num_residual_blocks(), 2);
1642        assert_eq!(problem.total_residual_dimension, 6); // 3 + 3
1643        assert_eq!(block_id1, 0);
1644        assert_eq!(block_id2, 1);
1645
1646        // Test removing residual blocks
1647        let removed_block = problem.remove_residual_block(block_id1);
1648        assert!(removed_block.is_some());
1649        assert_eq!(problem.num_residual_blocks(), 1);
1650        assert_eq!(problem.total_residual_dimension, 3); // Only prior factor remains
1651
1652        // Test removing non-existent block
1653        let non_existent = problem.remove_residual_block(999);
1654        assert!(non_existent.is_none());
1655
1656        Ok(())
1657    }
1658
1659    #[test]
1660    fn test_variable_constraints() -> TestResult {
1661        let mut problem = Problem::new();
1662
1663        // Test fixing variables
1664        problem.fix_variable("x0", 0);
1665        problem.fix_variable("x0", 1);
1666        problem.fix_variable("x1", 2);
1667
1668        assert!(problem.fixed_variable_indexes.contains_key("x0"));
1669        assert!(problem.fixed_variable_indexes.contains_key("x1"));
1670        assert_eq!(problem.fixed_variable_indexes["x0"].len(), 2);
1671        assert_eq!(problem.fixed_variable_indexes["x1"].len(), 1);
1672
1673        // Test unfixing variables
1674        problem.unfix_variable("x0");
1675        assert!(!problem.fixed_variable_indexes.contains_key("x0"));
1676        assert!(problem.fixed_variable_indexes.contains_key("x1"));
1677
1678        // Test variable bounds
1679        problem.set_variable_bounds("x2", 0, -1.0, 1.0);
1680        problem.set_variable_bounds("x2", 1, -2.0, 2.0);
1681        problem.set_variable_bounds("x3", 0, 0.0, 5.0);
1682
1683        assert!(problem.variable_bounds.contains_key("x2"));
1684        assert!(problem.variable_bounds.contains_key("x3"));
1685        assert_eq!(problem.variable_bounds["x2"].len(), 2);
1686        assert_eq!(problem.variable_bounds["x3"].len(), 1);
1687
1688        // Test removing bounds
1689        problem.remove_variable_bounds("x2");
1690        assert!(!problem.variable_bounds.contains_key("x2"));
1691        assert!(problem.variable_bounds.contains_key("x3"));
1692
1693        Ok(())
1694    }
1695}