apex_solver/core/problem.rs
1//! Optimization problem definition and sparse Jacobian computation.
2//!
3//! The `Problem` struct is the central component that defines a factor graph optimization problem.
4//! It manages residual blocks (constraints), variables, and the construction of sparse Jacobian
5//! matrices for efficient nonlinear least squares optimization.
6//!
7//! # Factor Graph Representation
8//!
9//! The optimization problem is represented as a bipartite factor graph:
10//!
11//! ```text
12//! Variables: x0 --- x1 --- x2 --- x3
13//! | | | |
14//! Factors: f0 f1 f2 f3 (constraints/measurements)
15//! ```
16//!
17//! Each factor connects one or more variables and contributes a residual (error) term to the
18//! overall cost function:
19//!
20//! ```text
21//! minimize Σ_i ρ(||r_i(x)||²)
22//! ```
23//!
24//! where `r_i(x)` is the residual for factor i, and ρ is an optional robust loss function.
25//!
26//! # Key Responsibilities
27//!
28//! 1. **Residual Block Management**: Add/remove factors and track their structure
29//! 2. **Variable Management**: Initialize variables with manifold types and constraints
30//! 3. **Sparsity Pattern**: Build symbolic structure for efficient sparse linear algebra
31//! 4. **Linearization**: Compute residuals and Jacobians in parallel
32//! 5. **Covariance**: Extract per-variable uncertainty estimates after optimization
33//!
34//! # Sparse Jacobian Structure
35//!
36//! The Jacobian matrix `J = ∂r/∂x` is sparse because each factor only depends on a small
37//! subset of variables. For example, a between factor connecting x0 and x1 contributes
38//! a 3×6 block (SE2) or 6×12 block (SE3) to the Jacobian, leaving the rest as zeros.
39//!
40//! The Problem pre-computes the sparsity pattern once, then efficiently fills in the
41//! numerical values during each iteration.
42//!
43//! # Mixed Manifold Support
44//!
45//! The Problem supports mixed manifold types in a single optimization problem via
46//! [`VariableEnum`]. This allows:
47//! - SE2 and SE3 poses in the same graph
48//! - SO3 rotations with R³ landmarks
49//! - Any combination of supported manifolds
50//!
51//! # Example: Building a Problem
52//!
53//! ```
54//! use apex_solver::core::problem::Problem;
55//! use apex_solver::factors::{BetweenFactorSE2, PriorFactor};
56//! use apex_solver::core::loss_functions::HuberLoss;
57//! use apex_solver::manifold::ManifoldType;
58//! use nalgebra::{DVector, dvector};
59//! use std::collections::HashMap;
60//! # use apex_solver::error::ApexSolverResult;
61//! # fn example() -> ApexSolverResult<()> {
62//!
63//! let mut problem = Problem::new();
64//!
65//! // Add prior factor to anchor the first pose
66//! let prior = Box::new(PriorFactor {
67//! data: dvector![0.0, 0.0, 0.0],
68//! });
69//! problem.add_residual_block(&["x0"], prior, None);
70//!
71//! // Add between factor with robust loss
72//! let between = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
73//! let loss: Option<Box<dyn apex_solver::core::loss_functions::LossFunction + Send>> =
74//! Some(Box::new(HuberLoss::new(1.0)?));
75//! problem.add_residual_block(&["x0", "x1"], between, loss);
76//!
77//! // Initialize variables
78//! let mut initial_values = HashMap::new();
79//! initial_values.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
80//! initial_values.insert("x1".to_string(), (ManifoldType::SE2, dvector![0.9, 0.1, 0.12]));
81//!
82//! let variables = problem.initialize_variables(&initial_values);
83//! # Ok(())
84//! # }
85//! # example().unwrap();
86//! ```
87
88use std::{
89 collections::{HashMap, HashSet},
90 fs::File,
91 io::{Error, Write},
92 sync::{Arc, Mutex},
93};
94
95use faer::{
96 Col, Mat, MatRef,
97 sparse::{Argsort, Pair, SparseColMat, SymbolicSparseColMat},
98};
99use nalgebra::DVector;
100use rayon::prelude::*;
101use tracing::warn;
102
103use crate::{
104 core::{
105 CoreError, corrector::Corrector, loss_functions::LossFunction,
106 residual_block::ResidualBlock, variable::Variable,
107 },
108 error::{ApexSolverError, ApexSolverResult},
109 factors::Factor,
110 linalg::{SparseLinearSolver, extract_variable_covariances},
111 manifold::{ManifoldType, rn, se2, se3, so2, so3},
112};
113
114/// Symbolic structure for sparse matrix operations.
115///
116/// Contains the sparsity pattern (which entries are non-zero) and an ordering
117/// for efficient numerical computation. This is computed once at the beginning
118/// and reused throughout optimization.
119///
120/// # Fields
121///
122/// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
123/// - `order`: A fill-reducing ordering/permutation for numerical stability
124pub struct SymbolicStructure {
125 pub pattern: SymbolicSparseColMat<usize>,
126 pub order: Argsort<usize>,
127}
128
129/// Enum to handle mixed manifold variable types
130#[derive(Clone)]
131pub enum VariableEnum {
132 Rn(Variable<rn::Rn>),
133 SE2(Variable<se2::SE2>),
134 SE3(Variable<se3::SE3>),
135 SO2(Variable<so2::SO2>),
136 SO3(Variable<so3::SO3>),
137}
138
139impl VariableEnum {
140 /// Get the tangent space size for this variable
141 pub fn get_size(&self) -> usize {
142 match self {
143 VariableEnum::Rn(var) => var.get_size(),
144 VariableEnum::SE2(var) => var.get_size(),
145 VariableEnum::SE3(var) => var.get_size(),
146 VariableEnum::SO2(var) => var.get_size(),
147 VariableEnum::SO3(var) => var.get_size(),
148 }
149 }
150
151 /// Convert to DVector for use with Factor trait
152 pub fn to_vector(&self) -> DVector<f64> {
153 match self {
154 VariableEnum::Rn(var) => var.value.clone().into(),
155 VariableEnum::SE2(var) => var.value.clone().into(),
156 VariableEnum::SE3(var) => var.value.clone().into(),
157 VariableEnum::SO2(var) => var.value.clone().into(),
158 VariableEnum::SO3(var) => var.value.clone().into(),
159 }
160 }
161
162 /// Apply a tangent space step to update this variable.
163 ///
164 /// This method applies a manifold plus operation: x_new = x ⊞ δx
165 /// where δx is a tangent vector. It supports all manifold types.
166 ///
167 /// # Arguments
168 /// * `step_slice` - View into the full step vector for this variable's DOF
169 ///
170 /// # Implementation Notes
171 /// Uses explicit clone instead of unsafe memory copy (`IntoNalgebra`) for small vectors.
172 /// This is safe and performant for typical manifold dimensions (1-6 DOF).
173 ///
174 pub fn apply_tangent_step(&mut self, step_slice: MatRef<f64>) {
175 match self {
176 VariableEnum::SE3(var) => {
177 // SE3 has 6 DOF in tangent space
178 let mut step_data: Vec<f64> = (0..6).map(|i| step_slice[(i, 0)]).collect();
179
180 // Enforce fixed indices: zero out step components for fixed DOF
181 for &fixed_idx in &var.fixed_indices {
182 if fixed_idx < 6 {
183 step_data[fixed_idx] = 0.0;
184 }
185 }
186
187 let step_dvector = DVector::from_vec(step_data);
188 let tangent = se3::SE3Tangent::from(step_dvector);
189 let new_value = var.plus(&tangent);
190 var.set_value(new_value);
191 }
192 VariableEnum::SE2(var) => {
193 // SE2 has 3 DOF in tangent space
194 let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
195
196 // Enforce fixed indices: zero out step components for fixed DOF
197 for &fixed_idx in &var.fixed_indices {
198 if fixed_idx < 3 {
199 step_data[fixed_idx] = 0.0;
200 }
201 }
202
203 let step_dvector = DVector::from_vec(step_data);
204 let tangent = se2::SE2Tangent::from(step_dvector);
205 let new_value = var.plus(&tangent);
206 var.set_value(new_value);
207 }
208 VariableEnum::SO3(var) => {
209 // SO3 has 3 DOF in tangent space
210 let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
211
212 // Enforce fixed indices: zero out step components for fixed DOF
213 for &fixed_idx in &var.fixed_indices {
214 if fixed_idx < 3 {
215 step_data[fixed_idx] = 0.0;
216 }
217 }
218
219 let step_dvector = DVector::from_vec(step_data);
220 let tangent = so3::SO3Tangent::from(step_dvector);
221 let new_value = var.plus(&tangent);
222 var.set_value(new_value);
223 }
224 VariableEnum::SO2(var) => {
225 // SO2 has 1 DOF in tangent space
226 let mut step_data = step_slice[(0, 0)];
227
228 // Enforce fixed indices: zero out step if index 0 is fixed
229 if var.fixed_indices.contains(&0) {
230 step_data = 0.0;
231 }
232
233 let step_dvector = DVector::from_vec(vec![step_data]);
234 let tangent = so2::SO2Tangent::from(step_dvector);
235 let new_value = var.plus(&tangent);
236 var.set_value(new_value);
237 }
238 VariableEnum::Rn(var) => {
239 // Rn has dynamic size
240 let size = var.get_size();
241 let mut step_data: Vec<f64> = (0..size).map(|i| step_slice[(i, 0)]).collect();
242
243 // Enforce fixed indices: zero out step components for fixed DOF
244 for &fixed_idx in &var.fixed_indices {
245 if fixed_idx < size {
246 step_data[fixed_idx] = 0.0;
247 }
248 }
249
250 let step_dvector = DVector::from_vec(step_data);
251 let tangent = rn::RnTangent::new(step_dvector);
252 let new_value = var.plus(&tangent);
253 var.set_value(new_value);
254 }
255 }
256 }
257
258 /// Get the covariance matrix for this variable (if computed).
259 ///
260 /// Returns `None` if covariance has not been computed.
261 ///
262 /// # Returns
263 /// Reference to the covariance matrix in tangent space
264 pub fn get_covariance(&self) -> Option<&Mat<f64>> {
265 match self {
266 VariableEnum::Rn(var) => var.get_covariance(),
267 VariableEnum::SE2(var) => var.get_covariance(),
268 VariableEnum::SE3(var) => var.get_covariance(),
269 VariableEnum::SO2(var) => var.get_covariance(),
270 VariableEnum::SO3(var) => var.get_covariance(),
271 }
272 }
273
274 /// Set the covariance matrix for this variable.
275 ///
276 /// The covariance matrix should be square with dimension equal to
277 /// the tangent space dimension of this variable.
278 ///
279 /// # Arguments
280 /// * `cov` - Covariance matrix in tangent space
281 pub fn set_covariance(&mut self, cov: Mat<f64>) {
282 match self {
283 VariableEnum::Rn(var) => var.set_covariance(cov),
284 VariableEnum::SE2(var) => var.set_covariance(cov),
285 VariableEnum::SE3(var) => var.set_covariance(cov),
286 VariableEnum::SO2(var) => var.set_covariance(cov),
287 VariableEnum::SO3(var) => var.set_covariance(cov),
288 }
289 }
290
291 /// Clear the covariance matrix for this variable.
292 pub fn clear_covariance(&mut self) {
293 match self {
294 VariableEnum::Rn(var) => var.clear_covariance(),
295 VariableEnum::SE2(var) => var.clear_covariance(),
296 VariableEnum::SE3(var) => var.clear_covariance(),
297 VariableEnum::SO2(var) => var.clear_covariance(),
298 VariableEnum::SO3(var) => var.clear_covariance(),
299 }
300 }
301
302 /// Get the bounds for this variable.
303 ///
304 /// Returns a reference to the bounds map where keys are indices and values are (lower, upper) pairs.
305 pub fn get_bounds(&self) -> &HashMap<usize, (f64, f64)> {
306 match self {
307 VariableEnum::Rn(var) => &var.bounds,
308 VariableEnum::SE2(var) => &var.bounds,
309 VariableEnum::SE3(var) => &var.bounds,
310 VariableEnum::SO2(var) => &var.bounds,
311 VariableEnum::SO3(var) => &var.bounds,
312 }
313 }
314
315 /// Get the fixed indices for this variable.
316 ///
317 /// Returns a reference to the set of indices that should remain fixed during optimization.
318 pub fn get_fixed_indices(&self) -> &HashSet<usize> {
319 match self {
320 VariableEnum::Rn(var) => &var.fixed_indices,
321 VariableEnum::SE2(var) => &var.fixed_indices,
322 VariableEnum::SE3(var) => &var.fixed_indices,
323 VariableEnum::SO2(var) => &var.fixed_indices,
324 VariableEnum::SO3(var) => &var.fixed_indices,
325 }
326 }
327
328 /// Set the value of this variable from a vector representation.
329 ///
330 /// This is used to update the variable after applying constraints (bounds and fixed indices).
331 pub fn set_from_vector(&mut self, vec: &DVector<f64>) {
332 match self {
333 VariableEnum::Rn(var) => {
334 var.set_value(rn::Rn::new(vec.clone()));
335 }
336 VariableEnum::SE2(var) => {
337 let new_se2: se2::SE2 = vec.clone().into();
338 var.set_value(new_se2);
339 }
340 VariableEnum::SE3(var) => {
341 let new_se3: se3::SE3 = vec.clone().into();
342 var.set_value(new_se3);
343 }
344 VariableEnum::SO2(var) => {
345 let new_so2: so2::SO2 = vec.clone().into();
346 var.set_value(new_so2);
347 }
348 VariableEnum::SO3(var) => {
349 let new_so3: so3::SO3 = vec.clone().into();
350 var.set_value(new_so3);
351 }
352 }
353 }
354}
355
356/// The optimization problem definition for factor graph optimization.
357///
358/// Manages residual blocks (factors/constraints), variables, and the sparse Jacobian structure.
359/// Supports mixed manifold types (SE2, SE3, SO2, SO3, Rn) in a single problem and provides
360/// efficient parallel residual/Jacobian computation.
361///
362/// # Architecture
363///
364/// The Problem acts as a container and coordinator:
365/// - Stores all residual blocks (factors with optional loss functions)
366/// - Tracks the global structure (which variables connect to which factors)
367/// - Builds and maintains the sparse Jacobian pattern
368/// - Provides parallel residual/Jacobian evaluation using rayon
369/// - Manages variable constraints (fixed indices, bounds)
370///
371/// # Workflow
372///
373/// 1. **Construction**: Create a new Problem with `Problem::new()`
374/// 2. **Add Factors**: Use `add_residual_block()` to add constraints
375/// 3. **Initialize Variables**: Use `initialize_variables()` with initial values
376/// 4. **Build Sparsity**: Use `build_symbolic_structure()` once before optimization
377/// 5. **Linearize**: Call `compute_residual_and_jacobian_sparse()` each iteration
378/// 6. **Extract Covariance**: Use `compute_and_set_covariances()` after convergence
379///
380/// # Example
381///
382/// ```
383/// use apex_solver::core::problem::Problem;
384/// use apex_solver::factors::BetweenFactorSE2;
385/// use apex_solver::manifold::ManifoldType;
386/// use nalgebra::dvector;
387/// use std::collections::HashMap;
388///
389/// let mut problem = Problem::new();
390///
391/// // Add a between factor
392/// let factor = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
393/// problem.add_residual_block(&["x0", "x1"], factor, None);
394///
395/// // Initialize variables
396/// let mut initial = HashMap::new();
397/// initial.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
398/// initial.insert("x1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
399///
400/// let variables = problem.initialize_variables(&initial);
401/// assert_eq!(variables.len(), 2);
402/// ```
403pub struct Problem {
404 /// Total dimension of the stacked residual vector (sum of all residual block dimensions)
405 pub total_residual_dimension: usize,
406
407 /// Counter for assigning unique IDs to residual blocks
408 residual_id_count: usize,
409
410 /// Map from residual block ID to ResidualBlock instance
411 residual_blocks: HashMap<usize, ResidualBlock>,
412
413 /// Variables with fixed indices (e.g., fix first pose's x,y coordinates)
414 /// Maps variable name -> set of indices to fix
415 pub fixed_variable_indexes: HashMap<String, HashSet<usize>>,
416
417 /// Variable bounds (box constraints on individual DOF)
418 /// Maps variable name -> (index -> (lower_bound, upper_bound))
419 pub variable_bounds: HashMap<String, HashMap<usize, (f64, f64)>>,
420}
421impl Default for Problem {
422 fn default() -> Self {
423 Self::new()
424 }
425}
426
427impl Problem {
428 /// Create a new empty optimization problem.
429 ///
430 /// # Returns
431 ///
432 /// A new `Problem` with no residual blocks or variables
433 ///
434 /// # Example
435 ///
436 /// ```
437 /// use apex_solver::core::problem::Problem;
438 ///
439 /// let problem = Problem::new();
440 /// assert_eq!(problem.num_residual_blocks(), 0);
441 /// assert_eq!(problem.total_residual_dimension, 0);
442 /// ```
443 pub fn new() -> Self {
444 Self {
445 total_residual_dimension: 0,
446 residual_id_count: 0,
447 residual_blocks: HashMap::new(),
448 fixed_variable_indexes: HashMap::new(),
449 variable_bounds: HashMap::new(),
450 }
451 }
452
453 /// Add a residual block (factor with optional loss function) to the problem.
454 ///
455 /// This is the primary method for building the factor graph. Each call adds one constraint
456 /// connecting one or more variables.
457 ///
458 /// # Arguments
459 ///
460 /// * `variable_key_size_list` - Names of the variables this factor connects (order matters)
461 /// * `factor` - The factor implementation that computes residuals and Jacobians
462 /// * `loss_func` - Optional robust loss function for outlier rejection
463 ///
464 /// # Returns
465 ///
466 /// The unique ID assigned to this residual block
467 ///
468 /// # Example
469 ///
470 /// ```
471 /// use apex_solver::core::problem::Problem;
472 /// use apex_solver::factors::{BetweenFactorSE2, PriorFactor};
473 /// use apex_solver::core::loss_functions::HuberLoss;
474 /// use nalgebra::dvector;
475 /// # use apex_solver::error::ApexSolverResult;
476 /// # fn example() -> ApexSolverResult<()> {
477 ///
478 /// let mut problem = Problem::new();
479 ///
480 /// // Add prior factor (unary constraint)
481 /// let prior = Box::new(PriorFactor { data: dvector![0.0, 0.0, 0.0] });
482 /// let id1 = problem.add_residual_block(&["x0"], prior, None);
483 ///
484 /// // Add between factor with robust loss (binary constraint)
485 /// let between = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
486 /// let loss: Option<Box<dyn apex_solver::core::loss_functions::LossFunction + Send>> =
487 /// Some(Box::new(HuberLoss::new(1.0)?));
488 /// let id2 = problem.add_residual_block(&["x0", "x1"], between, loss);
489 ///
490 /// assert_eq!(id1, 0);
491 /// assert_eq!(id2, 1);
492 /// assert_eq!(problem.num_residual_blocks(), 2);
493 /// # Ok(())
494 /// # }
495 /// # example().unwrap();
496 /// ```
497 pub fn add_residual_block(
498 &mut self,
499 variable_key_size_list: &[&str],
500 factor: Box<dyn Factor + Send>,
501 loss_func: Option<Box<dyn LossFunction + Send>>,
502 ) -> usize {
503 let new_residual_dimension = factor.get_dimension();
504 self.residual_blocks.insert(
505 self.residual_id_count,
506 ResidualBlock::new(
507 self.residual_id_count,
508 self.total_residual_dimension,
509 variable_key_size_list,
510 factor,
511 loss_func,
512 ),
513 );
514 let block_id = self.residual_id_count;
515 self.residual_id_count += 1;
516
517 self.total_residual_dimension += new_residual_dimension;
518
519 block_id
520 }
521
522 pub fn remove_residual_block(&mut self, block_id: usize) -> Option<ResidualBlock> {
523 if let Some(residual_block) = self.residual_blocks.remove(&block_id) {
524 self.total_residual_dimension -= residual_block.factor.get_dimension();
525 Some(residual_block)
526 } else {
527 None
528 }
529 }
530
531 pub fn fix_variable(&mut self, var_to_fix: &str, idx: usize) {
532 if let Some(var_mut) = self.fixed_variable_indexes.get_mut(var_to_fix) {
533 var_mut.insert(idx);
534 } else {
535 self.fixed_variable_indexes
536 .insert(var_to_fix.to_owned(), HashSet::from([idx]));
537 }
538 }
539
540 pub fn unfix_variable(&mut self, var_to_unfix: &str) {
541 self.fixed_variable_indexes.remove(var_to_unfix);
542 }
543
544 pub fn set_variable_bounds(
545 &mut self,
546 var_to_bound: &str,
547 idx: usize,
548 lower_bound: f64,
549 upper_bound: f64,
550 ) {
551 if lower_bound > upper_bound {
552 warn!("lower bound is larger than upper bound");
553 } else if let Some(var_mut) = self.variable_bounds.get_mut(var_to_bound) {
554 var_mut.insert(idx, (lower_bound, upper_bound));
555 } else {
556 self.variable_bounds.insert(
557 var_to_bound.to_owned(),
558 HashMap::from([(idx, (lower_bound, upper_bound))]),
559 );
560 }
561 }
562
563 pub fn remove_variable_bounds(&mut self, var_to_unbound: &str) {
564 self.variable_bounds.remove(var_to_unbound);
565 }
566
567 /// Initialize variables from initial values with manifold types.
568 ///
569 /// Converts raw initial values into typed `Variable<M>` instances wrapped in `VariableEnum`.
570 /// This method also applies any fixed indices or bounds that were set via `fix_variable()`
571 /// or `set_variable_bounds()`.
572 ///
573 /// # Arguments
574 ///
575 /// * `initial_values` - Map from variable name to (manifold type, initial value vector)
576 ///
577 /// # Returns
578 ///
579 /// Map from variable name to `VariableEnum` (typed variables ready for optimization)
580 ///
581 /// # Manifold Formats
582 ///
583 /// - **SE2**: `[x, y, theta]` (3 elements)
584 /// - **SE3**: `[tx, ty, tz, qw, qx, qy, qz]` (7 elements)
585 /// - **SO2**: `[theta]` (1 element)
586 /// - **SO3**: `[qw, qx, qy, qz]` (4 elements)
587 /// - **Rn**: `[x1, x2, ..., xn]` (n elements)
588 ///
589 /// # Example
590 ///
591 /// ```
592 /// use apex_solver::core::problem::Problem;
593 /// use apex_solver::manifold::ManifoldType;
594 /// use nalgebra::dvector;
595 /// use std::collections::HashMap;
596 ///
597 /// let problem = Problem::new();
598 ///
599 /// let mut initial = HashMap::new();
600 /// initial.insert("pose0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
601 /// initial.insert("pose1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
602 /// initial.insert("landmark".to_string(), (ManifoldType::RN, dvector![5.0, 3.0]));
603 ///
604 /// let variables = problem.initialize_variables(&initial);
605 /// assert_eq!(variables.len(), 3);
606 /// ```
607 pub fn initialize_variables(
608 &self,
609 initial_values: &HashMap<String, (ManifoldType, DVector<f64>)>,
610 ) -> HashMap<String, VariableEnum> {
611 let variables: HashMap<String, VariableEnum> = initial_values
612 .iter()
613 .map(|(k, v)| {
614 let variable_enum = match v.0 {
615 ManifoldType::SO2 => {
616 let mut var = Variable::new(so2::SO2::from(v.1.clone()));
617 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
618 var.fixed_indices = indexes.clone();
619 }
620 if let Some(bounds) = self.variable_bounds.get(k) {
621 var.bounds = bounds.clone();
622 }
623 VariableEnum::SO2(var)
624 }
625 ManifoldType::SO3 => {
626 let mut var = Variable::new(so3::SO3::from(v.1.clone()));
627 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
628 var.fixed_indices = indexes.clone();
629 }
630 if let Some(bounds) = self.variable_bounds.get(k) {
631 var.bounds = bounds.clone();
632 }
633 VariableEnum::SO3(var)
634 }
635 ManifoldType::SE2 => {
636 let mut var = Variable::new(se2::SE2::from(v.1.clone()));
637 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
638 var.fixed_indices = indexes.clone();
639 }
640 if let Some(bounds) = self.variable_bounds.get(k) {
641 var.bounds = bounds.clone();
642 }
643 VariableEnum::SE2(var)
644 }
645 ManifoldType::SE3 => {
646 let mut var = Variable::new(se3::SE3::from(v.1.clone()));
647 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
648 var.fixed_indices = indexes.clone();
649 }
650 if let Some(bounds) = self.variable_bounds.get(k) {
651 var.bounds = bounds.clone();
652 }
653 VariableEnum::SE3(var)
654 }
655 ManifoldType::RN => {
656 let mut var = Variable::new(rn::Rn::from(v.1.clone()));
657 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
658 var.fixed_indices = indexes.clone();
659 }
660 if let Some(bounds) = self.variable_bounds.get(k) {
661 var.bounds = bounds.clone();
662 }
663 VariableEnum::Rn(var)
664 }
665 };
666
667 (k.to_owned(), variable_enum)
668 })
669 .collect();
670 variables
671 }
672
673 /// Get the number of residual blocks
674 pub fn num_residual_blocks(&self) -> usize {
675 self.residual_blocks.len()
676 }
677
678 /// Build symbolic structure for sparse Jacobian computation
679 ///
680 /// This method constructs the sparsity pattern of the Jacobian matrix before numerical
681 /// computation. It determines which entries in the Jacobian will be non-zero based on
682 /// the structure of the optimization problem (which residual blocks connect which variables).
683 ///
684 /// # Purpose
685 /// - Pre-allocates memory for sparse matrix operations
686 /// - Enables efficient sparse linear algebra (avoiding dense operations)
687 /// - Computed once at the beginning, used throughout optimization
688 ///
689 /// # Arguments
690 /// * `variables` - Map of variable names to their values and properties (SE2, SE3, etc.)
691 /// * `variable_index_sparce_matrix` - Map from variable name to starting column index in Jacobian
692 /// * `total_dof` - Total degrees of freedom (number of columns in Jacobian)
693 ///
694 /// # Returns
695 /// A `SymbolicStructure` containing:
696 /// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
697 /// - `order`: An ordering/permutation for efficient numerical computation
698 ///
699 /// # Algorithm
700 /// For each residual block:
701 /// 1. Identify which variables it depends on
702 /// 2. For each (residual_dimension × variable_dof) block, mark entries as non-zero
703 /// 3. Convert to optimized sparse matrix representation
704 ///
705 /// # Example Structure
706 /// For a simple problem with 3 SE2 poses (9 DOF total):
707 /// - Between(x0, x1): Creates 3×6 block at rows 0-2, cols 0-5
708 /// - Between(x1, x2): Creates 3×6 block at rows 3-5, cols 3-8
709 /// - Prior(x0): Creates 3×3 block at rows 6-8, cols 0-2
710 ///
711 /// Result: 9×9 sparse Jacobian with 45 non-zero entries
712 pub fn build_symbolic_structure(
713 &self,
714 variables: &HashMap<String, VariableEnum>,
715 variable_index_sparce_matrix: &HashMap<String, usize>,
716 total_dof: usize,
717 ) -> ApexSolverResult<SymbolicStructure> {
718 // Vector to accumulate all (row, col) pairs that will be non-zero in the Jacobian
719 // Each Pair represents one entry in the sparse matrix
720 let mut indices = Vec::<Pair<usize, usize>>::new();
721
722 // Iterate through all residual blocks (factors/constraints) in the problem
723 // Each residual block contributes a block of entries to the Jacobian
724 self.residual_blocks.iter().for_each(|(_, residual_block)| {
725 // Create local indexing for this residual block's variables
726 // Maps each variable to its local starting index and size within this factor
727 // Example: For Between(x0, x1) with SE2: [(0, 3), (3, 3)]
728 // - x0 starts at local index 0, has 3 DOF
729 // - x1 starts at local index 3, has 3 DOF
730 let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
731 let mut count_variable_local_idx: usize = 0;
732
733 // Build the local index mapping for this residual block
734 for var_key in &residual_block.variable_key_list {
735 if let Some(variable) = variables.get(var_key) {
736 // Store (local_start_index, dof_size) for this variable
737 variable_local_idx_size_list
738 .push((count_variable_local_idx, variable.get_size()));
739 count_variable_local_idx += variable.get_size();
740 }
741 }
742
743 // For each variable in this residual block, generate Jacobian entries
744 for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
745 if let Some(variable_global_idx) = variable_index_sparce_matrix.get(var_key) {
746 // Get the DOF size for this variable
747 let (_, var_size) = variable_local_idx_size_list[i];
748
749 // Generate all (row, col) pairs for the Jacobian block:
750 // ∂(residual) / ∂(variable)
751 //
752 // For a residual block with dimension R and variable with DOF V:
753 // Creates R × V entries in the Jacobian
754
755 // Iterate over each residual dimension (rows)
756 for row_idx in 0..residual_block.factor.get_dimension() {
757 // Iterate over each variable DOF (columns)
758 for col_idx in 0..var_size {
759 // Compute global row index:
760 // Start from this residual block's first row, add offset
761 let global_row_idx = residual_block.residual_row_start_idx + row_idx;
762
763 // Compute global column index:
764 // Start from this variable's first column, add offset
765 let global_col_idx = variable_global_idx + col_idx;
766
767 // Record this (row, col) pair as a non-zero entry
768 indices.push(Pair::new(global_row_idx, global_col_idx));
769 }
770 }
771 }
772 }
773 });
774
775 // Convert the list of (row, col) pairs into an optimized symbolic sparse matrix
776 // This performs:
777 // 1. Duplicate elimination (same entry might be referenced multiple times)
778 // 2. Sorting for column-wise storage format
779 // 3. Computing a fill-reducing ordering for numerical stability
780 // 4. Allocating the symbolic structure (no values yet, just pattern)
781 let (pattern, order) = SymbolicSparseColMat::try_new_from_indices(
782 self.total_residual_dimension, // Number of rows (total residual dimension)
783 total_dof, // Number of columns (total DOF)
784 &indices, // List of non-zero entry locations
785 )
786 .map_err(|e| {
787 CoreError::SymbolicStructure(
788 "Failed to build symbolic sparse matrix structure".to_string(),
789 )
790 .log_with_source(e)
791 })?;
792
793 // Return the symbolic structure that will be filled with numerical values later
794 Ok(SymbolicStructure { pattern, order })
795 }
796
797 /// Compute only the residual vector for the current variable values.
798 ///
799 /// This is an optimized version that skips Jacobian computation when only the cost
800 /// function value is needed (e.g., during initialization or step evaluation).
801 ///
802 /// # Arguments
803 ///
804 /// * `variables` - Current variable values (from `initialize_variables()` or updated)
805 ///
806 /// # Returns
807 ///
808 /// Residual vector as N×1 column matrix (N = total residual dimension)
809 ///
810 /// # Performance
811 ///
812 /// Approximately **2x faster** than `compute_residual_and_jacobian_sparse()` since it:
813 /// - Skips Jacobian computation for each residual block
814 /// - Avoids Jacobian matrix assembly and storage
815 /// - Only parallelizes residual evaluation
816 ///
817 /// # When to Use
818 ///
819 /// - **Initial cost computation**: When setting up optimization state
820 /// - **Step evaluation**: When computing new cost after applying parameter updates
821 /// - **Cost-only queries**: When you don't need gradients
822 ///
823 /// Use `compute_residual_and_jacobian_sparse()` when you need both residual and Jacobian
824 /// (e.g., in the main optimization iteration loop for linearization).
825 ///
826 /// # Example
827 ///
828 /// ```rust,ignore
829 /// // Initial cost evaluation (no Jacobian needed)
830 /// let residual = problem.compute_residual_sparse(&variables)?;
831 /// let initial_cost = residual.norm_l2() * residual.norm_l2();
832 /// ```
833 pub fn compute_residual_sparse(
834 &self,
835 variables: &HashMap<String, VariableEnum>,
836 ) -> ApexSolverResult<Mat<f64>> {
837 let total_residual = Arc::new(Mutex::new(Col::<f64>::zeros(self.total_residual_dimension)));
838
839 // Compute residuals in parallel (no Jacobian needed)
840 let result: Result<Vec<()>, ApexSolverError> = self
841 .residual_blocks
842 .par_iter()
843 .map(|(_, residual_block)| {
844 self.compute_residual_block(residual_block, variables, &total_residual)
845 })
846 .collect();
847
848 result?;
849
850 let total_residual = Arc::try_unwrap(total_residual)
851 .map_err(|_| {
852 CoreError::ParallelComputation(
853 "Failed to unwrap Arc for total residual".to_string(),
854 )
855 .log()
856 })?
857 .into_inner()
858 .map_err(|e| {
859 CoreError::ParallelComputation(
860 "Failed to extract mutex inner value for total residual".to_string(),
861 )
862 .log_with_source(e)
863 })?;
864
865 // Convert faer Col to Mat (column vector as n×1 matrix)
866 let residual_faer = total_residual.as_ref().as_mat().to_owned();
867 Ok(residual_faer)
868 }
869
870 /// Compute residual vector and sparse Jacobian matrix for the current variable values.
871 ///
872 /// This is the core linearization method called during each optimization iteration. It:
873 /// 1. Evaluates all residual blocks in parallel using rayon
874 /// 2. Assembles the full residual vector
875 /// 3. Constructs the sparse Jacobian matrix using the precomputed symbolic structure
876 ///
877 /// # Arguments
878 ///
879 /// * `variables` - Current variable values (from `initialize_variables()` or updated)
880 /// * `variable_index_sparce_matrix` - Map from variable name to starting column in Jacobian
881 /// * `symbolic_structure` - Precomputed sparsity pattern (from `build_symbolic_structure()`)
882 ///
883 /// # Returns
884 ///
885 /// Tuple `(residual, jacobian)` where:
886 /// - `residual`: N×1 column matrix (total residual dimension)
887 /// - `jacobian`: N×M sparse matrix (N = residual dim, M = total DOF)
888 ///
889 /// # Performance
890 ///
891 /// This method is highly optimized:
892 /// - **Parallel evaluation**: Each residual block is evaluated independently using rayon
893 /// - **Sparse storage**: Only non-zero Jacobian entries are stored and computed
894 /// - **Memory efficient**: Preallocated sparse structure avoids dynamic allocations
895 ///
896 /// Typically accounts for 40-60% of total optimization time (including sparse matrix ops).
897 ///
898 /// # When to Use
899 ///
900 /// Use this method in the main optimization loop when you need both residual and Jacobian
901 /// for linearization. For cost-only evaluation, use `compute_residual_sparse()` instead.
902 ///
903 /// # Example
904 ///
905 /// ```rust,ignore
906 /// // Inside optimizer loop:
907 /// let (residual, jacobian) = problem.compute_residual_and_jacobian_sparse(
908 /// &variables,
909 /// &variable_index_map,
910 /// &symbolic_structure,
911 /// )?;
912 ///
913 /// // Use for linear system: J^T J dx = -J^T r
914 /// ```
915 pub fn compute_residual_and_jacobian_sparse(
916 &self,
917 variables: &HashMap<String, VariableEnum>,
918 variable_index_sparce_matrix: &HashMap<String, usize>,
919 symbolic_structure: &SymbolicStructure,
920 ) -> ApexSolverResult<(Mat<f64>, SparseColMat<usize, f64>)> {
921 let total_residual = Arc::new(Mutex::new(Col::<f64>::zeros(self.total_residual_dimension)));
922
923 // OPTIMIZATION: Pre-allocate exact size to avoid double allocation and flattening
924 // This eliminates the Vec<Vec<f64>> → Vec<f64> conversion overhead
925 let total_nnz = symbolic_structure.pattern.compute_nnz();
926
927 // Collect block results with pre-computed sizes
928 let jacobian_blocks: Result<Vec<(usize, Vec<f64>)>, ApexSolverError> = self
929 .residual_blocks
930 .par_iter()
931 .map(|(_, residual_block)| {
932 let values = self.compute_residual_and_jacobian_block(
933 residual_block,
934 variables,
935 variable_index_sparce_matrix,
936 &total_residual,
937 )?;
938 let size = values.len();
939 Ok((size, values))
940 })
941 .collect();
942
943 let jacobian_blocks = jacobian_blocks?;
944
945 // Pre-allocate final vec with exact size
946 let mut jacobian_values = Vec::with_capacity(total_nnz);
947 for (_size, mut block_values) in jacobian_blocks {
948 jacobian_values.append(&mut block_values);
949 }
950
951 let total_residual = Arc::try_unwrap(total_residual)
952 .map_err(|_| {
953 CoreError::ParallelComputation(
954 "Failed to unwrap Arc for total residual".to_string(),
955 )
956 .log()
957 })?
958 .into_inner()
959 .map_err(|e| {
960 CoreError::ParallelComputation(
961 "Failed to extract mutex inner value for total residual".to_string(),
962 )
963 .log_with_source(e)
964 })?;
965
966 // Convert faer Col to Mat (column vector as n×1 matrix)
967 let residual_faer = total_residual.as_ref().as_mat().to_owned();
968 let jacobian_sparse = SparseColMat::new_from_argsort(
969 symbolic_structure.pattern.clone(),
970 &symbolic_structure.order,
971 jacobian_values.as_slice(),
972 )
973 .map_err(|e| {
974 CoreError::SymbolicStructure(
975 "Failed to create sparse Jacobian from argsort".to_string(),
976 )
977 .log_with_source(e)
978 })?;
979
980 Ok((residual_faer, jacobian_sparse))
981 }
982
983 /// Compute only the residual for a single residual block (no Jacobian).
984 ///
985 /// Helper method for `compute_residual_sparse()`.
986 fn compute_residual_block(
987 &self,
988 residual_block: &ResidualBlock,
989 variables: &HashMap<String, VariableEnum>,
990 total_residual: &Arc<Mutex<Col<f64>>>,
991 ) -> ApexSolverResult<()> {
992 let mut param_vectors: Vec<DVector<f64>> = Vec::new();
993
994 for var_key in &residual_block.variable_key_list {
995 if let Some(variable) = variables.get(var_key) {
996 param_vectors.push(variable.to_vector());
997 }
998 }
999
1000 // Compute only residual (linearize still computes Jacobian internally,
1001 // but we don't extract/store it)
1002 let (mut res, _) = residual_block.factor.linearize(¶m_vectors, false);
1003
1004 // Apply loss function if present (critical for robust optimization)
1005 if let Some(loss_func) = &residual_block.loss_func {
1006 let squared_norm = res.dot(&res);
1007 let corrector = Corrector::new(loss_func.as_ref(), squared_norm);
1008 corrector.correct_residuals(&mut res);
1009 }
1010
1011 let mut total_residual = total_residual.lock().map_err(|e| {
1012 CoreError::ParallelComputation("Failed to acquire lock on total residual".to_string())
1013 .log_with_source(e)
1014 })?;
1015
1016 // Copy residual values from nalgebra DVector to faer Col
1017 let start_idx = residual_block.residual_row_start_idx;
1018 let dim = residual_block.factor.get_dimension();
1019 let mut total_residual_mut = total_residual.as_mut();
1020 for i in 0..dim {
1021 total_residual_mut[start_idx + i] = res[i];
1022 }
1023
1024 Ok(())
1025 }
1026
1027 /// Compute residual and Jacobian for a single residual block.
1028 ///
1029 /// Helper method for `compute_residual_and_jacobian_sparse()`.
1030 fn compute_residual_and_jacobian_block(
1031 &self,
1032 residual_block: &ResidualBlock,
1033 variables: &HashMap<String, VariableEnum>,
1034 variable_index_sparce_matrix: &HashMap<String, usize>,
1035 total_residual: &Arc<Mutex<Col<f64>>>,
1036 ) -> ApexSolverResult<Vec<f64>> {
1037 let mut param_vectors: Vec<DVector<f64>> = Vec::new();
1038 let mut var_sizes: Vec<usize> = Vec::new();
1039 let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
1040 let mut count_variable_local_idx: usize = 0;
1041
1042 for var_key in &residual_block.variable_key_list {
1043 if let Some(variable) = variables.get(var_key) {
1044 param_vectors.push(variable.to_vector());
1045 let var_size = variable.get_size();
1046 var_sizes.push(var_size);
1047 variable_local_idx_size_list.push((count_variable_local_idx, var_size));
1048 count_variable_local_idx += var_size;
1049 }
1050 }
1051
1052 let (mut res, jac_opt) = residual_block.factor.linearize(¶m_vectors, true);
1053 let mut jac = jac_opt.ok_or_else(|| {
1054 CoreError::FactorLinearization(
1055 "Factor returned None for Jacobian when compute_jacobian=true".to_string(),
1056 )
1057 .log()
1058 })?;
1059
1060 // Apply loss function if present (critical for robust optimization)
1061 if let Some(loss_func) = &residual_block.loss_func {
1062 let squared_norm = res.dot(&res);
1063 let corrector = Corrector::new(loss_func.as_ref(), squared_norm);
1064 corrector.correct_jacobian(&res, &mut jac);
1065 corrector.correct_residuals(&mut res);
1066 }
1067
1068 // Update total residual
1069 {
1070 let mut total_residual = total_residual.lock().map_err(|e| {
1071 CoreError::ParallelComputation(
1072 "Failed to acquire lock on total residual".to_string(),
1073 )
1074 .log_with_source(e)
1075 })?;
1076
1077 // Copy residual values from nalgebra DVector to faer Col
1078 let start_idx = residual_block.residual_row_start_idx;
1079 let dim = residual_block.factor.get_dimension();
1080 let mut total_residual_mut = total_residual.as_mut();
1081 for i in 0..dim {
1082 total_residual_mut[start_idx + i] = res[i];
1083 }
1084 }
1085
1086 // Extract Jacobian values in the correct order
1087 let mut local_jacobian_values = Vec::new();
1088 for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
1089 if variable_index_sparce_matrix.contains_key(var_key) {
1090 let (variable_local_idx, var_size) = variable_local_idx_size_list[i];
1091 let variable_jac = jac.view((0, variable_local_idx), (jac.shape().0, var_size));
1092
1093 for row_idx in 0..jac.shape().0 {
1094 for col_idx in 0..var_size {
1095 local_jacobian_values.push(variable_jac[(row_idx, col_idx)]);
1096 }
1097 }
1098 } else {
1099 return Err(CoreError::Variable(format!(
1100 "Missing key {} in variable-to-column-index mapping",
1101 var_key
1102 ))
1103 .log()
1104 .into());
1105 }
1106 }
1107
1108 Ok(local_jacobian_values)
1109 }
1110
1111 /// Log residual vector to a text file
1112 pub fn log_residual_to_file(
1113 &self,
1114 residual: &nalgebra::DVector<f64>,
1115 filename: &str,
1116 ) -> Result<(), Error> {
1117 let mut file = File::create(filename)?;
1118 writeln!(file, "# Residual vector - {} elements", residual.len())?;
1119 for (i, &value) in residual.iter().enumerate() {
1120 writeln!(file, "{}: {:.12}", i, value)?;
1121 }
1122 Ok(())
1123 }
1124
1125 /// Log sparse Jacobian matrix to a text file
1126 pub fn log_sparse_jacobian_to_file(
1127 &self,
1128 jacobian: &SparseColMat<usize, f64>,
1129 filename: &str,
1130 ) -> Result<(), Error> {
1131 let mut file = File::create(filename)?;
1132 writeln!(
1133 file,
1134 "# Sparse Jacobian matrix - {} x {} ({} non-zeros)",
1135 jacobian.nrows(),
1136 jacobian.ncols(),
1137 jacobian.compute_nnz()
1138 )?;
1139 writeln!(file, "# Matrix saved as dimensions and non-zero count only")?;
1140 writeln!(file, "# For detailed access, convert to dense matrix first")?;
1141 Ok(())
1142 }
1143
1144 /// Log variables to a text file
1145 pub fn log_variables_to_file(
1146 &self,
1147 variables: &HashMap<String, VariableEnum>,
1148 filename: &str,
1149 ) -> Result<(), Error> {
1150 let mut file = File::create(filename)?;
1151 writeln!(file, "# Variables - {} total", variables.len())?;
1152 writeln!(file, "# Format: variable_name: [values...]")?;
1153
1154 let mut sorted_vars: Vec<_> = variables.keys().collect();
1155 sorted_vars.sort();
1156
1157 for var_name in sorted_vars {
1158 let var_vector = variables[var_name].to_vector();
1159 write!(file, "{}: [", var_name)?;
1160 for (i, &value) in var_vector.iter().enumerate() {
1161 write!(file, "{:.12}", value)?;
1162 if i < var_vector.len() - 1 {
1163 write!(file, ", ")?;
1164 }
1165 }
1166 writeln!(file, "]")?;
1167 }
1168 Ok(())
1169 }
1170
1171 /// Compute per-variable covariances and set them in Variable objects
1172 ///
1173 /// This method computes the full covariance matrix by inverting the Hessian
1174 /// from the linear solver, then extracts per-variable covariance blocks and
1175 /// stores them in the corresponding Variable objects.
1176 ///
1177 /// # Arguments
1178 /// * `linear_solver` - Mutable reference to the linear solver containing the cached Hessian
1179 /// * `variables` - Mutable map of variables where covariances will be stored
1180 /// * `variable_index_map` - Map from variable names to their starting column indices
1181 ///
1182 /// # Returns
1183 /// `Some(HashMap)` containing per-variable covariance matrices if successful, `None` otherwise
1184 ///
1185 pub fn compute_and_set_covariances(
1186 &self,
1187 linear_solver: &mut Box<dyn SparseLinearSolver>,
1188 variables: &mut HashMap<String, VariableEnum>,
1189 variable_index_map: &HashMap<String, usize>,
1190 ) -> Option<HashMap<String, Mat<f64>>> {
1191 // Compute the full covariance matrix (H^{-1}) using the linear solver
1192 linear_solver.compute_covariance_matrix()?;
1193 let full_cov = linear_solver.get_covariance_matrix()?.clone();
1194
1195 // Extract per-variable covariance blocks from the full matrix
1196 let per_var_covariances =
1197 extract_variable_covariances(&full_cov, variables, variable_index_map);
1198
1199 // Set covariances in Variable objects for easy access
1200 for (var_name, cov) in &per_var_covariances {
1201 if let Some(var) = variables.get_mut(var_name) {
1202 var.set_covariance(cov.clone());
1203 }
1204 }
1205
1206 Some(per_var_covariances)
1207 }
1208}
1209
1210#[cfg(test)]
1211mod tests {
1212 use super::*;
1213 use crate::core::loss_functions::HuberLoss;
1214 use crate::factors::{BetweenFactorSE2, BetweenFactorSE3, PriorFactor};
1215 use crate::manifold::{ManifoldType, se3::SE3};
1216 use nalgebra::{Quaternion, Vector3, dvector};
1217 use std::collections::HashMap;
1218
1219 type TestResult = Result<(), Box<dyn std::error::Error>>;
1220 type TestProblemResult = Result<
1221 (
1222 Problem,
1223 HashMap<String, (ManifoldType, nalgebra::DVector<f64>)>,
1224 ),
1225 Box<dyn std::error::Error>,
1226 >;
1227
1228 /// Create a test SE2 dataset with 10 vertices in a loop
1229 fn create_se2_test_problem() -> TestProblemResult {
1230 let mut problem = Problem::new();
1231 let mut initial_values = HashMap::new();
1232
1233 // Create 10 SE2 poses in a rough circle pattern
1234 let poses = vec![
1235 (0.0, 0.0, 0.0), // x0: origin
1236 (1.0, 0.0, 0.1), // x1: move right
1237 (1.5, 1.0, 0.5), // x2: move up-right
1238 (1.0, 2.0, 1.0), // x3: move up
1239 (0.0, 2.5, 1.5), // x4: move up-left
1240 (-1.0, 2.0, 2.0), // x5: move left
1241 (-1.5, 1.0, 2.5), // x6: move down-left
1242 (-1.0, 0.0, 3.0), // x7: move down
1243 (-0.5, -0.5, -2.8), // x8: move down-right
1244 (0.5, -0.5, -2.3), // x9: back towards origin
1245 ];
1246
1247 // Add vertices using [x, y, theta] ordering
1248 for (i, (x, y, theta)) in poses.iter().enumerate() {
1249 let var_name = format!("x{}", i);
1250 let se2_data = dvector![*x, *y, *theta];
1251 initial_values.insert(var_name, (ManifoldType::SE2, se2_data));
1252 }
1253
1254 // Add chain of between factors
1255 for i in 0..9 {
1256 let from_pose = poses[i];
1257 let to_pose = poses[i + 1];
1258
1259 // Compute relative transformation
1260 let dx = to_pose.0 - from_pose.0;
1261 let dy = to_pose.1 - from_pose.1;
1262 let dtheta = to_pose.2 - from_pose.2;
1263
1264 let between_factor = BetweenFactorSE2::new(dx, dy, dtheta);
1265 problem.add_residual_block(
1266 &[&format!("x{}", i), &format!("x{}", i + 1)],
1267 Box::new(between_factor),
1268 Some(Box::new(HuberLoss::new(1.0)?)),
1269 );
1270 }
1271
1272 // Add loop closure from x9 back to x0
1273 let dx = poses[0].0 - poses[9].0;
1274 let dy = poses[0].1 - poses[9].1;
1275 let dtheta = poses[0].2 - poses[9].2;
1276
1277 let loop_closure = BetweenFactorSE2::new(dx, dy, dtheta);
1278 problem.add_residual_block(
1279 &["x9", "x0"],
1280 Box::new(loop_closure),
1281 Some(Box::new(HuberLoss::new(1.0)?)),
1282 );
1283
1284 // Add prior factor for x0
1285 let prior_factor = PriorFactor {
1286 data: dvector![0.0, 0.0, 0.0],
1287 };
1288 problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1289
1290 Ok((problem, initial_values))
1291 }
1292
1293 /// Create a test SE3 dataset with 8 vertices in a 3D pattern
1294 fn create_se3_test_problem() -> TestProblemResult {
1295 let mut problem = Problem::new();
1296 let mut initial_values = HashMap::new();
1297
1298 // Create 8 SE3 poses in a rough 3D cube pattern
1299 let poses = [
1300 // Bottom face of cube
1301 (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0), // x0: origin
1302 (1.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.995), // x1: +X
1303 (1.0, 1.0, 0.0, 0.0, 0.0, 0.2, 0.98), // x2: +X+Y
1304 (0.0, 1.0, 0.0, 0.0, 0.0, 0.3, 0.955), // x3: +Y
1305 // Top face of cube
1306 (0.0, 0.0, 1.0, 0.1, 0.0, 0.0, 0.995), // x4: +Z
1307 (1.0, 0.0, 1.0, 0.1, 0.0, 0.1, 0.99), // x5: +X+Z
1308 (1.0, 1.0, 1.0, 0.1, 0.0, 0.2, 0.975), // x6: +X+Y+Z
1309 (0.0, 1.0, 1.0, 0.1, 0.0, 0.3, 0.95), // x7: +Y+Z
1310 ];
1311
1312 // Add vertices using [tx, ty, tz, qw, qx, qy, qz] ordering
1313 for (i, (tx, ty, tz, qx, qy, qz, qw)) in poses.iter().enumerate() {
1314 let var_name = format!("x{}", i);
1315 let se3_data = dvector![*tx, *ty, *tz, *qw, *qx, *qy, *qz];
1316 initial_values.insert(var_name, (ManifoldType::SE3, se3_data));
1317 }
1318
1319 // Add between factors connecting the cube edges
1320 let edges = vec![
1321 (0, 1),
1322 (1, 2),
1323 (2, 3),
1324 (3, 0), // Bottom face
1325 (4, 5),
1326 (5, 6),
1327 (6, 7),
1328 (7, 4), // Top face
1329 (0, 4),
1330 (1, 5),
1331 (2, 6),
1332 (3, 7), // Vertical edges
1333 ];
1334
1335 for (from_idx, to_idx) in edges {
1336 let from_pose = poses[from_idx];
1337 let to_pose = poses[to_idx];
1338
1339 // Create a simple relative transformation (simplified for testing)
1340 let relative_se3 = SE3::from_translation_quaternion(
1341 Vector3::new(
1342 to_pose.0 - from_pose.0, // dx
1343 to_pose.1 - from_pose.1, // dy
1344 to_pose.2 - from_pose.2, // dz
1345 ),
1346 Quaternion::new(1.0, 0.0, 0.0, 0.0), // identity quaternion
1347 );
1348
1349 let between_factor = BetweenFactorSE3::new(relative_se3);
1350 problem.add_residual_block(
1351 &[&format!("x{}", from_idx), &format!("x{}", to_idx)],
1352 Box::new(between_factor),
1353 Some(Box::new(HuberLoss::new(1.0)?)),
1354 );
1355 }
1356
1357 // Add prior factor for x0
1358 let prior_factor = PriorFactor {
1359 data: dvector![0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
1360 };
1361 problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1362
1363 Ok((problem, initial_values))
1364 }
1365
1366 #[test]
1367 fn test_problem_construction_se2() -> TestResult {
1368 let (problem, initial_values) = create_se2_test_problem()?;
1369
1370 // Test basic problem properties
1371 assert_eq!(problem.num_residual_blocks(), 11); // 9 between + 1 loop closure + 1 prior
1372 assert_eq!(problem.total_residual_dimension, 33); // 11 * 3
1373 assert_eq!(initial_values.len(), 10);
1374
1375 Ok(())
1376 }
1377
1378 #[test]
1379 fn test_problem_construction_se3() -> TestResult {
1380 let (problem, initial_values) = create_se3_test_problem()?;
1381
1382 // Test basic problem properties
1383 assert_eq!(problem.num_residual_blocks(), 13); // 12 between + 1 prior
1384 assert_eq!(problem.total_residual_dimension, 79); // 12 * 6 + 1 * 7 (SE3 between factors are 6-dim, prior factor is 7-dim)
1385 assert_eq!(initial_values.len(), 8);
1386
1387 Ok(())
1388 }
1389
1390 #[test]
1391 fn test_variable_initialization_se2() -> TestResult {
1392 let (problem, initial_values) = create_se2_test_problem()?;
1393
1394 // Test variable initialization
1395 let variables = problem.initialize_variables(&initial_values);
1396 assert_eq!(variables.len(), 10);
1397
1398 // Test variable sizes
1399 for (name, var) in &variables {
1400 assert_eq!(
1401 var.get_size(),
1402 3,
1403 "SE2 variable {} should have size 3",
1404 name
1405 );
1406 }
1407
1408 // Test conversion to DVector
1409 for (name, var) in &variables {
1410 let vec = var.to_vector();
1411 assert_eq!(
1412 vec.len(),
1413 3,
1414 "SE2 variable {} vector should have length 3",
1415 name
1416 );
1417 }
1418
1419 Ok(())
1420 }
1421
1422 #[test]
1423 fn test_variable_initialization_se3() -> TestResult {
1424 let (problem, initial_values) = create_se3_test_problem()?;
1425
1426 // Test variable initialization
1427 let variables = problem.initialize_variables(&initial_values);
1428 assert_eq!(variables.len(), 8);
1429
1430 // Test variable sizes
1431 for (name, var) in &variables {
1432 assert_eq!(
1433 var.get_size(),
1434 6,
1435 "SE3 variable {} should have size 6 (DOF)",
1436 name
1437 );
1438 }
1439
1440 // Test conversion to DVector
1441 for (name, var) in &variables {
1442 let vec = var.to_vector();
1443 assert_eq!(
1444 vec.len(),
1445 7,
1446 "SE3 variable {} vector should have length 7",
1447 name
1448 );
1449 }
1450
1451 Ok(())
1452 }
1453
1454 #[test]
1455 fn test_column_mapping_se2() -> TestResult {
1456 let (problem, initial_values) = create_se2_test_problem()?;
1457 let variables = problem.initialize_variables(&initial_values);
1458
1459 // Create column mapping for variables
1460 let mut variable_index_sparce_matrix = HashMap::new();
1461 let mut col_offset = 0;
1462 let mut sorted_vars: Vec<_> = variables.keys().collect();
1463 sorted_vars.sort(); // Ensure consistent ordering
1464
1465 for var_name in sorted_vars {
1466 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1467 col_offset += variables[var_name].get_size();
1468 }
1469
1470 // Test total degrees of freedom
1471 let total_dof: usize = variables.values().map(|v| v.get_size()).sum();
1472 assert_eq!(total_dof, 30); // 10 variables * 3 DOF each
1473 assert_eq!(col_offset, 30);
1474
1475 // Test each variable has correct column mapping
1476 for (var_name, &col_idx) in &variable_index_sparce_matrix {
1477 assert!(
1478 col_idx < total_dof,
1479 "Column index {} for {} should be < {}",
1480 col_idx,
1481 var_name,
1482 total_dof
1483 );
1484 }
1485
1486 Ok(())
1487 }
1488
1489 #[test]
1490 fn test_symbolic_structure_se2() -> TestResult {
1491 let (problem, initial_values) = create_se2_test_problem()?;
1492 let variables = problem.initialize_variables(&initial_values);
1493
1494 // Create column mapping
1495 let mut variable_index_sparce_matrix = HashMap::new();
1496 let mut col_offset = 0;
1497 let mut sorted_vars: Vec<_> = variables.keys().collect();
1498 sorted_vars.sort();
1499
1500 for var_name in sorted_vars {
1501 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1502 col_offset += variables[var_name].get_size();
1503 }
1504
1505 // Build symbolic structure
1506 let symbolic_structure = problem.build_symbolic_structure(
1507 &variables,
1508 &variable_index_sparce_matrix,
1509 col_offset,
1510 )?;
1511
1512 // Test symbolic structure dimensions
1513 assert_eq!(
1514 symbolic_structure.pattern.nrows(),
1515 problem.total_residual_dimension
1516 );
1517 assert_eq!(symbolic_structure.pattern.ncols(), 30); // total DOF
1518
1519 Ok(())
1520 }
1521
1522 #[test]
1523 fn test_residual_jacobian_computation_se2() -> TestResult {
1524 let (problem, initial_values) = create_se2_test_problem()?;
1525 let variables = problem.initialize_variables(&initial_values);
1526
1527 // Create column mapping
1528 let mut variable_index_sparce_matrix = HashMap::new();
1529 let mut col_offset = 0;
1530 let mut sorted_vars: Vec<_> = variables.keys().collect();
1531 sorted_vars.sort();
1532
1533 for var_name in sorted_vars {
1534 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1535 col_offset += variables[var_name].get_size();
1536 }
1537
1538 // Test sparse computation
1539 let symbolic_structure = problem.build_symbolic_structure(
1540 &variables,
1541 &variable_index_sparce_matrix,
1542 col_offset,
1543 )?;
1544 let (residual_sparse, jacobian_sparse) = problem.compute_residual_and_jacobian_sparse(
1545 &variables,
1546 &variable_index_sparce_matrix,
1547 &symbolic_structure,
1548 )?;
1549
1550 // Test sparse dimensions
1551 assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1552 assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1553 assert_eq!(jacobian_sparse.ncols(), 30);
1554
1555 Ok(())
1556 }
1557
1558 #[test]
1559 fn test_residual_jacobian_computation_se3() -> TestResult {
1560 let (problem, initial_values) = create_se3_test_problem()?;
1561 let variables = problem.initialize_variables(&initial_values);
1562
1563 // Create column mapping
1564 let mut variable_index_sparce_matrix = HashMap::new();
1565 let mut col_offset = 0;
1566 let mut sorted_vars: Vec<_> = variables.keys().collect();
1567 sorted_vars.sort();
1568
1569 for var_name in sorted_vars {
1570 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1571 col_offset += variables[var_name].get_size();
1572 }
1573
1574 // Test sparse computation
1575 let symbolic_structure = problem.build_symbolic_structure(
1576 &variables,
1577 &variable_index_sparce_matrix,
1578 col_offset,
1579 )?;
1580 let (residual_sparse, jacobian_sparse) = problem.compute_residual_and_jacobian_sparse(
1581 &variables,
1582 &variable_index_sparce_matrix,
1583 &symbolic_structure,
1584 )?;
1585
1586 // Test sparse dimensions match
1587 assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1588 assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1589 assert_eq!(jacobian_sparse.ncols(), 48); // 8 variables * 6 DOF each
1590
1591 Ok(())
1592 }
1593
1594 #[test]
1595 fn test_residual_block_operations() -> TestResult {
1596 let mut problem = Problem::new();
1597
1598 // Test adding residual blocks
1599 let block_id1 = problem.add_residual_block(
1600 &["x0", "x1"],
1601 Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1)),
1602 Some(Box::new(HuberLoss::new(1.0)?)),
1603 );
1604
1605 let block_id2 = problem.add_residual_block(
1606 &["x0"],
1607 Box::new(PriorFactor {
1608 data: dvector![0.0, 0.0, 0.0],
1609 }),
1610 None,
1611 );
1612
1613 assert_eq!(problem.num_residual_blocks(), 2);
1614 assert_eq!(problem.total_residual_dimension, 6); // 3 + 3
1615 assert_eq!(block_id1, 0);
1616 assert_eq!(block_id2, 1);
1617
1618 // Test removing residual blocks
1619 let removed_block = problem.remove_residual_block(block_id1);
1620 assert!(removed_block.is_some());
1621 assert_eq!(problem.num_residual_blocks(), 1);
1622 assert_eq!(problem.total_residual_dimension, 3); // Only prior factor remains
1623
1624 // Test removing non-existent block
1625 let non_existent = problem.remove_residual_block(999);
1626 assert!(non_existent.is_none());
1627
1628 Ok(())
1629 }
1630
1631 #[test]
1632 fn test_variable_constraints() -> TestResult {
1633 let mut problem = Problem::new();
1634
1635 // Test fixing variables
1636 problem.fix_variable("x0", 0);
1637 problem.fix_variable("x0", 1);
1638 problem.fix_variable("x1", 2);
1639
1640 assert!(problem.fixed_variable_indexes.contains_key("x0"));
1641 assert!(problem.fixed_variable_indexes.contains_key("x1"));
1642 assert_eq!(problem.fixed_variable_indexes["x0"].len(), 2);
1643 assert_eq!(problem.fixed_variable_indexes["x1"].len(), 1);
1644
1645 // Test unfixing variables
1646 problem.unfix_variable("x0");
1647 assert!(!problem.fixed_variable_indexes.contains_key("x0"));
1648 assert!(problem.fixed_variable_indexes.contains_key("x1"));
1649
1650 // Test variable bounds
1651 problem.set_variable_bounds("x2", 0, -1.0, 1.0);
1652 problem.set_variable_bounds("x2", 1, -2.0, 2.0);
1653 problem.set_variable_bounds("x3", 0, 0.0, 5.0);
1654
1655 assert!(problem.variable_bounds.contains_key("x2"));
1656 assert!(problem.variable_bounds.contains_key("x3"));
1657 assert_eq!(problem.variable_bounds["x2"].len(), 2);
1658 assert_eq!(problem.variable_bounds["x3"].len(), 1);
1659
1660 // Test removing bounds
1661 problem.remove_variable_bounds("x2");
1662 assert!(!problem.variable_bounds.contains_key("x2"));
1663 assert!(problem.variable_bounds.contains_key("x3"));
1664
1665 Ok(())
1666 }
1667}