apex_solver/core/problem.rs
1//! Optimization problem definition and sparse Jacobian computation.
2//!
3//! The `Problem` struct is the central component that defines a factor graph optimization problem.
4//! It manages residual blocks (constraints), variables, and the construction of sparse Jacobian
5//! matrices for efficient nonlinear least squares optimization.
6//!
7//! # Factor Graph Representation
8//!
9//! The optimization problem is represented as a bipartite factor graph:
10//!
11//! ```text
12//! Variables: x0 --- x1 --- x2 --- x3
13//! | | | |
14//! Factors: f0 f1 f2 f3 (constraints/measurements)
15//! ```
16//!
17//! Each factor connects one or more variables and contributes a residual (error) term to the
18//! overall cost function:
19//!
20//! ```text
21//! minimize Σ_i ρ(||r_i(x)||²)
22//! ```
23//!
24//! where `r_i(x)` is the residual for factor i, and ρ is an optional robust loss function.
25//!
26//! # Key Responsibilities
27//!
28//! 1. **Residual Block Management**: Add/remove factors and track their structure
29//! 2. **Variable Management**: Initialize variables with manifold types and constraints
30//! 3. **Sparsity Pattern**: Build symbolic structure for efficient sparse linear algebra
31//! 4. **Linearization**: Compute residuals and Jacobians in parallel
32//! 5. **Covariance**: Extract per-variable uncertainty estimates after optimization
33//!
34//! # Sparse Jacobian Structure
35//!
36//! The Jacobian matrix `J = ∂r/∂x` is sparse because each factor only depends on a small
37//! subset of variables. For example, a between factor connecting x0 and x1 contributes
38//! a 3×6 block (SE2) or 6×12 block (SE3) to the Jacobian, leaving the rest as zeros.
39//!
40//! The Problem pre-computes the sparsity pattern once, then efficiently fills in the
41//! numerical values during each iteration.
42//!
43//! # Mixed Manifold Support
44//!
45//! The Problem supports mixed manifold types in a single optimization problem via
46//! [`VariableEnum`]. This allows:
47//! - SE2 and SE3 poses in the same graph
48//! - SO3 rotations with R³ landmarks
49//! - Any combination of supported manifolds
50//!
51//! # Example: Building a Problem
52//!
53//! ```
54//! use apex_solver::core::problem::Problem;
55//! use apex_solver::core::factors::{BetweenFactorSE2, PriorFactor};
56//! use apex_solver::core::loss_functions::HuberLoss;
57//! use apex_solver::manifold::ManifoldType;
58//! use nalgebra::{DVector, dvector};
59//! use std::collections::HashMap;
60//!
61//! let mut problem = Problem::new();
62//!
63//! // Add prior factor to anchor the first pose
64//! let prior = Box::new(PriorFactor {
65//! data: dvector![0.0, 0.0, 0.0],
66//! });
67//! problem.add_residual_block(&["x0"], prior, None);
68//!
69//! // Add between factor with robust loss
70//! let between = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
71//! let loss: Option<Box<dyn apex_solver::core::loss_functions::Loss + Send>> =
72//! Some(Box::new(HuberLoss::new(1.0).unwrap()));
73//! problem.add_residual_block(&["x0", "x1"], between, loss);
74//!
75//! // Initialize variables
76//! let mut initial_values = HashMap::new();
77//! initial_values.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
78//! initial_values.insert("x1".to_string(), (ManifoldType::SE2, dvector![0.9, 0.1, 0.12]));
79//!
80//! let variables = problem.initialize_variables(&initial_values);
81//!
82//! println!("Problem has {} residual blocks", problem.num_residual_blocks());
83//! println!("Total residual dimension: {}", problem.total_residual_dimension);
84//! ```
85
86use std::{collections, fs, io::Write, sync};
87
88use faer;
89use faer::sparse;
90use nalgebra;
91use rayon::prelude::*;
92
93use crate::{
94 core::{corrector, factors, loss_functions, residual_block, variable},
95 error, linalg, manifold,
96 manifold::{rn::Rn, se2::SE2, se3::SE3, so2::SO2, so3::SO3},
97};
98
99/// Symbolic structure for sparse matrix operations.
100///
101/// Contains the sparsity pattern (which entries are non-zero) and an ordering
102/// for efficient numerical computation. This is computed once at the beginning
103/// and reused throughout optimization.
104///
105/// # Fields
106///
107/// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
108/// - `order`: A fill-reducing ordering/permutation for numerical stability
109pub struct SymbolicStructure {
110 pub pattern: sparse::SymbolicSparseColMat<usize>,
111 pub order: sparse::Argsort<usize>,
112}
113
114/// Enum to handle mixed manifold variable types
115#[derive(Clone, Debug)]
116pub enum VariableEnum {
117 Rn(variable::Variable<Rn>),
118 SE2(variable::Variable<SE2>),
119 SE3(variable::Variable<SE3>),
120 SO2(variable::Variable<SO2>),
121 SO3(variable::Variable<SO3>),
122}
123
124impl VariableEnum {
125 /// Get the tangent space size for this variable
126 pub fn get_size(&self) -> usize {
127 match self {
128 VariableEnum::Rn(var) => var.get_size(),
129 VariableEnum::SE2(var) => var.get_size(),
130 VariableEnum::SE3(var) => var.get_size(),
131 VariableEnum::SO2(var) => var.get_size(),
132 VariableEnum::SO3(var) => var.get_size(),
133 }
134 }
135
136 /// Convert to DVector for use with Factor trait
137 pub fn to_vector(&self) -> nalgebra::DVector<f64> {
138 match self {
139 VariableEnum::Rn(var) => var.value.clone().into(),
140 VariableEnum::SE2(var) => var.value.clone().into(),
141 VariableEnum::SE3(var) => var.value.clone().into(),
142 VariableEnum::SO2(var) => var.value.clone().into(),
143 VariableEnum::SO3(var) => var.value.clone().into(),
144 }
145 }
146
147 /// Apply a tangent space step to update this variable.
148 ///
149 /// This method applies a manifold plus operation: x_new = x ⊞ δx
150 /// where δx is a tangent vector. It supports all manifold types.
151 ///
152 /// # Arguments
153 /// * `step_slice` - View into the full step vector for this variable's DOF
154 ///
155 /// # Implementation Notes
156 /// Uses explicit clone instead of unsafe memory copy (`IntoNalgebra`) for small vectors.
157 /// This is safe and performant for typical manifold dimensions (1-6 DOF).
158 ///
159 pub fn apply_tangent_step(&mut self, step_slice: faer::MatRef<f64>) {
160 match self {
161 VariableEnum::SE3(var) => {
162 // SE3 has 6 DOF in tangent space
163 let mut step_data: Vec<f64> = (0..6).map(|i| step_slice[(i, 0)]).collect();
164
165 // Enforce fixed indices: zero out step components for fixed DOF
166 for &fixed_idx in &var.fixed_indices {
167 if fixed_idx < 6 {
168 step_data[fixed_idx] = 0.0;
169 }
170 }
171
172 let step_dvector = nalgebra::DVector::from_vec(step_data);
173 let tangent = manifold::se3::SE3Tangent::from(step_dvector);
174 let new_value = var.plus(&tangent);
175 var.set_value(new_value);
176 }
177 VariableEnum::SE2(var) => {
178 // SE2 has 3 DOF in tangent space
179 let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
180
181 // Enforce fixed indices: zero out step components for fixed DOF
182 for &fixed_idx in &var.fixed_indices {
183 if fixed_idx < 3 {
184 step_data[fixed_idx] = 0.0;
185 }
186 }
187
188 let step_dvector = nalgebra::DVector::from_vec(step_data);
189 let tangent = manifold::se2::SE2Tangent::from(step_dvector);
190 let new_value = var.plus(&tangent);
191 var.set_value(new_value);
192 }
193 VariableEnum::SO3(var) => {
194 // SO3 has 3 DOF in tangent space
195 let mut step_data: Vec<f64> = (0..3).map(|i| step_slice[(i, 0)]).collect();
196
197 // Enforce fixed indices: zero out step components for fixed DOF
198 for &fixed_idx in &var.fixed_indices {
199 if fixed_idx < 3 {
200 step_data[fixed_idx] = 0.0;
201 }
202 }
203
204 let step_dvector = nalgebra::DVector::from_vec(step_data);
205 let tangent = manifold::so3::SO3Tangent::from(step_dvector);
206 let new_value = var.plus(&tangent);
207 var.set_value(new_value);
208 }
209 VariableEnum::SO2(var) => {
210 // SO2 has 1 DOF in tangent space
211 let mut step_data = step_slice[(0, 0)];
212
213 // Enforce fixed indices: zero out step if index 0 is fixed
214 if var.fixed_indices.contains(&0) {
215 step_data = 0.0;
216 }
217
218 let step_dvector = nalgebra::DVector::from_vec(vec![step_data]);
219 let tangent = manifold::so2::SO2Tangent::from(step_dvector);
220 let new_value = var.plus(&tangent);
221 var.set_value(new_value);
222 }
223 VariableEnum::Rn(var) => {
224 // Rn has dynamic size
225 let size = var.get_size();
226 let mut step_data: Vec<f64> = (0..size).map(|i| step_slice[(i, 0)]).collect();
227
228 // Enforce fixed indices: zero out step components for fixed DOF
229 for &fixed_idx in &var.fixed_indices {
230 if fixed_idx < size {
231 step_data[fixed_idx] = 0.0;
232 }
233 }
234
235 let step_dvector = nalgebra::DVector::from_vec(step_data);
236 let tangent = manifold::rn::RnTangent::new(step_dvector);
237 let new_value = var.plus(&tangent);
238 var.set_value(new_value);
239 }
240 }
241 }
242
243 /// Get the covariance matrix for this variable (if computed).
244 ///
245 /// Returns `None` if covariance has not been computed.
246 ///
247 /// # Returns
248 /// Reference to the covariance matrix in tangent space
249 pub fn get_covariance(&self) -> Option<&faer::Mat<f64>> {
250 match self {
251 VariableEnum::Rn(var) => var.get_covariance(),
252 VariableEnum::SE2(var) => var.get_covariance(),
253 VariableEnum::SE3(var) => var.get_covariance(),
254 VariableEnum::SO2(var) => var.get_covariance(),
255 VariableEnum::SO3(var) => var.get_covariance(),
256 }
257 }
258
259 /// Set the covariance matrix for this variable.
260 ///
261 /// The covariance matrix should be square with dimension equal to
262 /// the tangent space dimension of this variable.
263 ///
264 /// # Arguments
265 /// * `cov` - Covariance matrix in tangent space
266 pub fn set_covariance(&mut self, cov: faer::Mat<f64>) {
267 match self {
268 VariableEnum::Rn(var) => var.set_covariance(cov),
269 VariableEnum::SE2(var) => var.set_covariance(cov),
270 VariableEnum::SE3(var) => var.set_covariance(cov),
271 VariableEnum::SO2(var) => var.set_covariance(cov),
272 VariableEnum::SO3(var) => var.set_covariance(cov),
273 }
274 }
275
276 /// Clear the covariance matrix for this variable.
277 pub fn clear_covariance(&mut self) {
278 match self {
279 VariableEnum::Rn(var) => var.clear_covariance(),
280 VariableEnum::SE2(var) => var.clear_covariance(),
281 VariableEnum::SE3(var) => var.clear_covariance(),
282 VariableEnum::SO2(var) => var.clear_covariance(),
283 VariableEnum::SO3(var) => var.clear_covariance(),
284 }
285 }
286
287 /// Get the bounds for this variable.
288 ///
289 /// Returns a reference to the bounds map where keys are indices and values are (lower, upper) pairs.
290 pub fn get_bounds(&self) -> &collections::HashMap<usize, (f64, f64)> {
291 match self {
292 VariableEnum::Rn(var) => &var.bounds,
293 VariableEnum::SE2(var) => &var.bounds,
294 VariableEnum::SE3(var) => &var.bounds,
295 VariableEnum::SO2(var) => &var.bounds,
296 VariableEnum::SO3(var) => &var.bounds,
297 }
298 }
299
300 /// Get the fixed indices for this variable.
301 ///
302 /// Returns a reference to the set of indices that should remain fixed during optimization.
303 pub fn get_fixed_indices(&self) -> &collections::HashSet<usize> {
304 match self {
305 VariableEnum::Rn(var) => &var.fixed_indices,
306 VariableEnum::SE2(var) => &var.fixed_indices,
307 VariableEnum::SE3(var) => &var.fixed_indices,
308 VariableEnum::SO2(var) => &var.fixed_indices,
309 VariableEnum::SO3(var) => &var.fixed_indices,
310 }
311 }
312
313 /// Set the value of this variable from a vector representation.
314 ///
315 /// This is used to update the variable after applying constraints (bounds and fixed indices).
316 pub fn set_from_vector(&mut self, vec: &nalgebra::DVector<f64>) {
317 match self {
318 VariableEnum::Rn(var) => {
319 var.set_value(Rn::new(vec.clone()));
320 }
321 VariableEnum::SE2(var) => {
322 let new_se2: SE2 = vec.clone().into();
323 var.set_value(new_se2);
324 }
325 VariableEnum::SE3(var) => {
326 let new_se3: SE3 = vec.clone().into();
327 var.set_value(new_se3);
328 }
329 VariableEnum::SO2(var) => {
330 let new_so2: SO2 = vec.clone().into();
331 var.set_value(new_so2);
332 }
333 VariableEnum::SO3(var) => {
334 let new_so3: SO3 = vec.clone().into();
335 var.set_value(new_so3);
336 }
337 }
338 }
339}
340
341/// The optimization problem definition for factor graph optimization.
342///
343/// Manages residual blocks (factors/constraints), variables, and the sparse Jacobian structure.
344/// Supports mixed manifold types (SE2, SE3, SO2, SO3, Rn) in a single problem and provides
345/// efficient parallel residual/Jacobian computation.
346///
347/// # Architecture
348///
349/// The Problem acts as a container and coordinator:
350/// - Stores all residual blocks (factors with optional loss functions)
351/// - Tracks the global structure (which variables connect to which factors)
352/// - Builds and maintains the sparse Jacobian pattern
353/// - Provides parallel residual/Jacobian evaluation using rayon
354/// - Manages variable constraints (fixed indices, bounds)
355///
356/// # Workflow
357///
358/// 1. **Construction**: Create a new Problem with `Problem::new()`
359/// 2. **Add Factors**: Use `add_residual_block()` to add constraints
360/// 3. **Initialize Variables**: Use `initialize_variables()` with initial values
361/// 4. **Build Sparsity**: Use `build_symbolic_structure()` once before optimization
362/// 5. **Linearize**: Call `compute_residual_and_jacobian_sparse()` each iteration
363/// 6. **Extract Covariance**: Use `compute_and_set_covariances()` after convergence
364///
365/// # Example
366///
367/// ```
368/// use apex_solver::core::problem::Problem;
369/// use apex_solver::core::factors::BetweenFactorSE2;
370/// use apex_solver::manifold::ManifoldType;
371/// use nalgebra::dvector;
372/// use std::collections::HashMap;
373///
374/// let mut problem = Problem::new();
375///
376/// // Add a between factor
377/// let factor = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
378/// problem.add_residual_block(&["x0", "x1"], factor, None);
379///
380/// // Initialize variables
381/// let mut initial = HashMap::new();
382/// initial.insert("x0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
383/// initial.insert("x1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
384///
385/// let variables = problem.initialize_variables(&initial);
386/// assert_eq!(variables.len(), 2);
387/// ```
388pub struct Problem {
389 /// Total dimension of the stacked residual vector (sum of all residual block dimensions)
390 pub total_residual_dimension: usize,
391
392 /// Counter for assigning unique IDs to residual blocks
393 residual_id_count: usize,
394
395 /// Map from residual block ID to ResidualBlock instance
396 residual_blocks: collections::HashMap<usize, residual_block::ResidualBlock>,
397
398 /// Variables with fixed indices (e.g., fix first pose's x,y coordinates)
399 /// Maps variable name -> set of indices to fix
400 pub fixed_variable_indexes: collections::HashMap<String, collections::HashSet<usize>>,
401
402 /// Variable bounds (box constraints on individual DOF)
403 /// Maps variable name -> (index -> (lower_bound, upper_bound))
404 pub variable_bounds: collections::HashMap<String, collections::HashMap<usize, (f64, f64)>>,
405}
406impl Default for Problem {
407 fn default() -> Self {
408 Self::new()
409 }
410}
411
412impl Problem {
413 /// Create a new empty optimization problem.
414 ///
415 /// # Returns
416 ///
417 /// A new `Problem` with no residual blocks or variables
418 ///
419 /// # Example
420 ///
421 /// ```
422 /// use apex_solver::core::problem::Problem;
423 ///
424 /// let problem = Problem::new();
425 /// assert_eq!(problem.num_residual_blocks(), 0);
426 /// assert_eq!(problem.total_residual_dimension, 0);
427 /// ```
428 pub fn new() -> Self {
429 Self {
430 total_residual_dimension: 0,
431 residual_id_count: 0,
432 residual_blocks: collections::HashMap::new(),
433 fixed_variable_indexes: collections::HashMap::new(),
434 variable_bounds: collections::HashMap::new(),
435 }
436 }
437
438 /// Add a residual block (factor with optional loss function) to the problem.
439 ///
440 /// This is the primary method for building the factor graph. Each call adds one constraint
441 /// connecting one or more variables.
442 ///
443 /// # Arguments
444 ///
445 /// * `variable_key_size_list` - Names of the variables this factor connects (order matters)
446 /// * `factor` - The factor implementation that computes residuals and Jacobians
447 /// * `loss_func` - Optional robust loss function for outlier rejection
448 ///
449 /// # Returns
450 ///
451 /// The unique ID assigned to this residual block
452 ///
453 /// # Example
454 ///
455 /// ```
456 /// use apex_solver::core::problem::Problem;
457 /// use apex_solver::core::factors::{BetweenFactorSE2, PriorFactor};
458 /// use apex_solver::core::loss_functions::HuberLoss;
459 /// use nalgebra::dvector;
460 ///
461 /// let mut problem = Problem::new();
462 ///
463 /// // Add prior factor (unary constraint)
464 /// let prior = Box::new(PriorFactor { data: dvector![0.0, 0.0, 0.0] });
465 /// let id1 = problem.add_residual_block(&["x0"], prior, None);
466 ///
467 /// // Add between factor with robust loss (binary constraint)
468 /// let between = Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1));
469 /// let loss: Option<Box<dyn apex_solver::core::loss_functions::Loss + Send>> =
470 /// Some(Box::new(HuberLoss::new(1.0).unwrap()));
471 /// let id2 = problem.add_residual_block(&["x0", "x1"], between, loss);
472 ///
473 /// assert_eq!(id1, 0);
474 /// assert_eq!(id2, 1);
475 /// assert_eq!(problem.num_residual_blocks(), 2);
476 /// ```
477 pub fn add_residual_block(
478 &mut self,
479 variable_key_size_list: &[&str],
480 factor: Box<dyn factors::Factor + Send>,
481 loss_func: Option<Box<dyn loss_functions::Loss + Send>>,
482 ) -> usize {
483 let new_residual_dimension = factor.get_dimension();
484 self.residual_blocks.insert(
485 self.residual_id_count,
486 residual_block::ResidualBlock::new(
487 self.residual_id_count,
488 self.total_residual_dimension,
489 variable_key_size_list,
490 factor,
491 loss_func,
492 ),
493 );
494 let block_id = self.residual_id_count;
495 self.residual_id_count += 1;
496
497 self.total_residual_dimension += new_residual_dimension;
498
499 block_id
500 }
501
502 pub fn remove_residual_block(
503 &mut self,
504 block_id: usize,
505 ) -> Option<residual_block::ResidualBlock> {
506 if let Some(residual_block) = self.residual_blocks.remove(&block_id) {
507 self.total_residual_dimension -= residual_block.factor.get_dimension();
508 Some(residual_block)
509 } else {
510 None
511 }
512 }
513
514 pub fn fix_variable(&mut self, var_to_fix: &str, idx: usize) {
515 if let Some(var_mut) = self.fixed_variable_indexes.get_mut(var_to_fix) {
516 var_mut.insert(idx);
517 } else {
518 self.fixed_variable_indexes
519 .insert(var_to_fix.to_owned(), collections::HashSet::from([idx]));
520 }
521 }
522
523 pub fn unfix_variable(&mut self, var_to_unfix: &str) {
524 self.fixed_variable_indexes.remove(var_to_unfix);
525 }
526
527 pub fn set_variable_bounds(
528 &mut self,
529 var_to_bound: &str,
530 idx: usize,
531 lower_bound: f64,
532 upper_bound: f64,
533 ) {
534 if lower_bound > upper_bound {
535 log::error!("lower bound is larger than upper bound");
536 } else if let Some(var_mut) = self.variable_bounds.get_mut(var_to_bound) {
537 var_mut.insert(idx, (lower_bound, upper_bound));
538 } else {
539 self.variable_bounds.insert(
540 var_to_bound.to_owned(),
541 collections::HashMap::from([(idx, (lower_bound, upper_bound))]),
542 );
543 }
544 }
545
546 pub fn remove_variable_bounds(&mut self, var_to_unbound: &str) {
547 self.variable_bounds.remove(var_to_unbound);
548 }
549
550 /// Initialize variables from initial values with manifold types.
551 ///
552 /// Converts raw initial values into typed `Variable<M>` instances wrapped in `VariableEnum`.
553 /// This method also applies any fixed indices or bounds that were set via `fix_variable()`
554 /// or `set_variable_bounds()`.
555 ///
556 /// # Arguments
557 ///
558 /// * `initial_values` - Map from variable name to (manifold type, initial value vector)
559 ///
560 /// # Returns
561 ///
562 /// Map from variable name to `VariableEnum` (typed variables ready for optimization)
563 ///
564 /// # Manifold Formats
565 ///
566 /// - **SE2**: `[x, y, theta]` (3 elements)
567 /// - **SE3**: `[tx, ty, tz, qw, qx, qy, qz]` (7 elements)
568 /// - **SO2**: `[theta]` (1 element)
569 /// - **SO3**: `[qw, qx, qy, qz]` (4 elements)
570 /// - **Rn**: `[x1, x2, ..., xn]` (n elements)
571 ///
572 /// # Example
573 ///
574 /// ```
575 /// use apex_solver::core::problem::Problem;
576 /// use apex_solver::manifold::ManifoldType;
577 /// use nalgebra::dvector;
578 /// use std::collections::HashMap;
579 ///
580 /// let problem = Problem::new();
581 ///
582 /// let mut initial = HashMap::new();
583 /// initial.insert("pose0".to_string(), (ManifoldType::SE2, dvector![0.0, 0.0, 0.0]));
584 /// initial.insert("pose1".to_string(), (ManifoldType::SE2, dvector![1.0, 0.0, 0.1]));
585 /// initial.insert("landmark".to_string(), (ManifoldType::RN, dvector![5.0, 3.0]));
586 ///
587 /// let variables = problem.initialize_variables(&initial);
588 /// assert_eq!(variables.len(), 3);
589 /// ```
590 pub fn initialize_variables(
591 &self,
592 initial_values: &collections::HashMap<
593 String,
594 (manifold::ManifoldType, nalgebra::DVector<f64>),
595 >,
596 ) -> collections::HashMap<String, VariableEnum> {
597 let variables: collections::HashMap<String, VariableEnum> = initial_values
598 .iter()
599 .map(|(k, v)| {
600 let variable_enum = match v.0 {
601 manifold::ManifoldType::SO2 => {
602 let mut var = variable::Variable::new(SO2::from(v.1.clone()));
603 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
604 var.fixed_indices = indexes.clone();
605 }
606 if let Some(bounds) = self.variable_bounds.get(k) {
607 var.bounds = bounds.clone();
608 }
609 VariableEnum::SO2(var)
610 }
611 manifold::ManifoldType::SO3 => {
612 let mut var = variable::Variable::new(SO3::from(v.1.clone()));
613 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
614 var.fixed_indices = indexes.clone();
615 }
616 if let Some(bounds) = self.variable_bounds.get(k) {
617 var.bounds = bounds.clone();
618 }
619 VariableEnum::SO3(var)
620 }
621 manifold::ManifoldType::SE2 => {
622 let mut var = variable::Variable::new(SE2::from(v.1.clone()));
623 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
624 var.fixed_indices = indexes.clone();
625 }
626 if let Some(bounds) = self.variable_bounds.get(k) {
627 var.bounds = bounds.clone();
628 }
629 VariableEnum::SE2(var)
630 }
631 manifold::ManifoldType::SE3 => {
632 let mut var = variable::Variable::new(SE3::from(v.1.clone()));
633 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
634 var.fixed_indices = indexes.clone();
635 }
636 if let Some(bounds) = self.variable_bounds.get(k) {
637 var.bounds = bounds.clone();
638 }
639 VariableEnum::SE3(var)
640 }
641 manifold::ManifoldType::RN => {
642 let mut var = variable::Variable::new(Rn::from(v.1.clone()));
643 if let Some(indexes) = self.fixed_variable_indexes.get(k) {
644 var.fixed_indices = indexes.clone();
645 }
646 if let Some(bounds) = self.variable_bounds.get(k) {
647 var.bounds = bounds.clone();
648 }
649 VariableEnum::Rn(var)
650 }
651 };
652
653 (k.to_owned(), variable_enum)
654 })
655 .collect();
656 variables
657 }
658
659 /// Get the number of residual blocks
660 pub fn num_residual_blocks(&self) -> usize {
661 self.residual_blocks.len()
662 }
663
664 /// Build symbolic structure for sparse Jacobian computation
665 ///
666 /// This method constructs the sparsity pattern of the Jacobian matrix before numerical
667 /// computation. It determines which entries in the Jacobian will be non-zero based on
668 /// the structure of the optimization problem (which residual blocks connect which variables).
669 ///
670 /// # Purpose
671 /// - Pre-allocates memory for sparse matrix operations
672 /// - Enables efficient sparse linear algebra (avoiding dense operations)
673 /// - Computed once at the beginning, used throughout optimization
674 ///
675 /// # Arguments
676 /// * `variables` - Map of variable names to their values and properties (SE2, SE3, etc.)
677 /// * `variable_index_sparce_matrix` - Map from variable name to starting column index in Jacobian
678 /// * `total_dof` - Total degrees of freedom (number of columns in Jacobian)
679 ///
680 /// # Returns
681 /// A `SymbolicStructure` containing:
682 /// - `pattern`: The symbolic sparse column matrix structure (row/col indices of non-zeros)
683 /// - `order`: An ordering/permutation for efficient numerical computation
684 ///
685 /// # Algorithm
686 /// For each residual block:
687 /// 1. Identify which variables it depends on
688 /// 2. For each (residual_dimension × variable_dof) block, mark entries as non-zero
689 /// 3. Convert to optimized sparse matrix representation
690 ///
691 /// # Example Structure
692 /// For a simple problem with 3 SE2 poses (9 DOF total):
693 /// - Between(x0, x1): Creates 3×6 block at rows 0-2, cols 0-5
694 /// - Between(x1, x2): Creates 3×6 block at rows 3-5, cols 3-8
695 /// - Prior(x0): Creates 3×3 block at rows 6-8, cols 0-2
696 ///
697 /// Result: 9×9 sparse Jacobian with 45 non-zero entries
698 pub fn build_symbolic_structure(
699 &self,
700 variables: &collections::HashMap<String, VariableEnum>,
701 variable_index_sparce_matrix: &collections::HashMap<String, usize>,
702 total_dof: usize,
703 ) -> error::ApexResult<SymbolicStructure> {
704 // Vector to accumulate all (row, col) pairs that will be non-zero in the Jacobian
705 // Each Pair represents one entry in the sparse matrix
706 let mut indices = Vec::<sparse::Pair<usize, usize>>::new();
707
708 // Iterate through all residual blocks (factors/constraints) in the problem
709 // Each residual block contributes a block of entries to the Jacobian
710 self.residual_blocks.iter().for_each(|(_, residual_block)| {
711 // Create local indexing for this residual block's variables
712 // Maps each variable to its local starting index and size within this factor
713 // Example: For Between(x0, x1) with SE2: [(0, 3), (3, 3)]
714 // - x0 starts at local index 0, has 3 DOF
715 // - x1 starts at local index 3, has 3 DOF
716 let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
717 let mut count_variable_local_idx: usize = 0;
718
719 // Build the local index mapping for this residual block
720 for var_key in &residual_block.variable_key_list {
721 if let Some(variable) = variables.get(var_key) {
722 // Store (local_start_index, dof_size) for this variable
723 variable_local_idx_size_list
724 .push((count_variable_local_idx, variable.get_size()));
725 count_variable_local_idx += variable.get_size();
726 }
727 }
728
729 // For each variable in this residual block, generate Jacobian entries
730 for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
731 if let Some(variable_global_idx) = variable_index_sparce_matrix.get(var_key) {
732 // Get the DOF size for this variable
733 let (_, var_size) = variable_local_idx_size_list[i];
734
735 // Generate all (row, col) pairs for the Jacobian block:
736 // ∂(residual) / ∂(variable)
737 //
738 // For a residual block with dimension R and variable with DOF V:
739 // Creates R × V entries in the Jacobian
740
741 // Iterate over each residual dimension (rows)
742 for row_idx in 0..residual_block.factor.get_dimension() {
743 // Iterate over each variable DOF (columns)
744 for col_idx in 0..var_size {
745 // Compute global row index:
746 // Start from this residual block's first row, add offset
747 let global_row_idx = residual_block.residual_row_start_idx + row_idx;
748
749 // Compute global column index:
750 // Start from this variable's first column, add offset
751 let global_col_idx = variable_global_idx + col_idx;
752
753 // Record this (row, col) pair as a non-zero entry
754 indices.push(sparse::Pair::new(global_row_idx, global_col_idx));
755 }
756 }
757 }
758 }
759 });
760
761 // Convert the list of (row, col) pairs into an optimized symbolic sparse matrix
762 // This performs:
763 // 1. Duplicate elimination (same entry might be referenced multiple times)
764 // 2. Sorting for column-wise storage format
765 // 3. Computing a fill-reducing ordering for numerical stability
766 // 4. Allocating the symbolic structure (no values yet, just pattern)
767 let (pattern, order) = sparse::SymbolicSparseColMat::try_new_from_indices(
768 self.total_residual_dimension, // Number of rows (total residual dimension)
769 total_dof, // Number of columns (total DOF)
770 &indices, // List of non-zero entry locations
771 )
772 .map_err(|_| {
773 error::ApexError::MatrixOperation(
774 "Failed to build symbolic sparse matrix structure".to_string(),
775 )
776 })?;
777
778 // Return the symbolic structure that will be filled with numerical values later
779 Ok(SymbolicStructure { pattern, order })
780 }
781
782 /// Compute only the residual vector for the current variable values.
783 ///
784 /// This is an optimized version that skips Jacobian computation when only the cost
785 /// function value is needed (e.g., during initialization or step evaluation).
786 ///
787 /// # Arguments
788 ///
789 /// * `variables` - Current variable values (from `initialize_variables()` or updated)
790 ///
791 /// # Returns
792 ///
793 /// Residual vector as N×1 column matrix (N = total residual dimension)
794 ///
795 /// # Performance
796 ///
797 /// Approximately **2x faster** than `compute_residual_and_jacobian_sparse()` since it:
798 /// - Skips Jacobian computation for each residual block
799 /// - Avoids Jacobian matrix assembly and storage
800 /// - Only parallelizes residual evaluation
801 ///
802 /// # When to Use
803 ///
804 /// - **Initial cost computation**: When setting up optimization state
805 /// - **Step evaluation**: When computing new cost after applying parameter updates
806 /// - **Cost-only queries**: When you don't need gradients
807 ///
808 /// Use `compute_residual_and_jacobian_sparse()` when you need both residual and Jacobian
809 /// (e.g., in the main optimization iteration loop for linearization).
810 ///
811 /// # Example
812 ///
813 /// ```rust,ignore
814 /// // Initial cost evaluation (no Jacobian needed)
815 /// let residual = problem.compute_residual_sparse(&variables)?;
816 /// let initial_cost = residual.norm_l2() * residual.norm_l2();
817 /// ```
818 pub fn compute_residual_sparse(
819 &self,
820 variables: &collections::HashMap<String, VariableEnum>,
821 ) -> error::ApexResult<faer::Mat<f64>> {
822 let total_residual = sync::Arc::new(sync::Mutex::new(faer::Col::<f64>::zeros(
823 self.total_residual_dimension,
824 )));
825
826 // Compute residuals in parallel (no Jacobian needed)
827 let result: Result<Vec<()>, error::ApexError> = self
828 .residual_blocks
829 .par_iter()
830 .map(|(_, residual_block)| {
831 self.compute_residual_block(residual_block, variables, &total_residual)
832 })
833 .collect();
834
835 result?;
836
837 let total_residual = sync::Arc::try_unwrap(total_residual)
838 .map_err(|_| {
839 error::ApexError::ThreadError("Failed to unwrap Arc for total residual".to_string())
840 })?
841 .into_inner()
842 .map_err(|_| {
843 error::ApexError::ThreadError(
844 "Failed to extract mutex inner value for total residual".to_string(),
845 )
846 })?;
847
848 // Convert faer Col to Mat (column vector as n×1 matrix)
849 let residual_faer = total_residual.as_ref().as_mat().to_owned();
850 Ok(residual_faer)
851 }
852
853 /// Compute residual vector and sparse Jacobian matrix for the current variable values.
854 ///
855 /// This is the core linearization method called during each optimization iteration. It:
856 /// 1. Evaluates all residual blocks in parallel using rayon
857 /// 2. Assembles the full residual vector
858 /// 3. Constructs the sparse Jacobian matrix using the precomputed symbolic structure
859 ///
860 /// # Arguments
861 ///
862 /// * `variables` - Current variable values (from `initialize_variables()` or updated)
863 /// * `variable_index_sparce_matrix` - Map from variable name to starting column in Jacobian
864 /// * `symbolic_structure` - Precomputed sparsity pattern (from `build_symbolic_structure()`)
865 ///
866 /// # Returns
867 ///
868 /// Tuple `(residual, jacobian)` where:
869 /// - `residual`: N×1 column matrix (total residual dimension)
870 /// - `jacobian`: N×M sparse matrix (N = residual dim, M = total DOF)
871 ///
872 /// # Performance
873 ///
874 /// This method is highly optimized:
875 /// - **Parallel evaluation**: Each residual block is evaluated independently using rayon
876 /// - **Sparse storage**: Only non-zero Jacobian entries are stored and computed
877 /// - **Memory efficient**: Preallocated sparse structure avoids dynamic allocations
878 ///
879 /// Typically accounts for 40-60% of total optimization time (including sparse matrix ops).
880 ///
881 /// # When to Use
882 ///
883 /// Use this method in the main optimization loop when you need both residual and Jacobian
884 /// for linearization. For cost-only evaluation, use `compute_residual_sparse()` instead.
885 ///
886 /// # Example
887 ///
888 /// ```rust,ignore
889 /// // Inside optimizer loop:
890 /// let (residual, jacobian) = problem.compute_residual_and_jacobian_sparse(
891 /// &variables,
892 /// &variable_index_map,
893 /// &symbolic_structure,
894 /// )?;
895 ///
896 /// // Use for linear system: J^T J dx = -J^T r
897 /// ```
898 pub fn compute_residual_and_jacobian_sparse(
899 &self,
900 variables: &collections::HashMap<String, VariableEnum>,
901 variable_index_sparce_matrix: &collections::HashMap<String, usize>,
902 symbolic_structure: &SymbolicStructure,
903 ) -> error::ApexResult<(faer::Mat<f64>, sparse::SparseColMat<usize, f64>)> {
904 let total_residual = sync::Arc::new(sync::Mutex::new(faer::Col::<f64>::zeros(
905 self.total_residual_dimension,
906 )));
907
908 let jacobian_values: Result<Vec<Vec<f64>>, error::ApexError> = self
909 .residual_blocks
910 .par_iter()
911 .map(|(_, residual_block)| {
912 self.compute_residual_and_jacobian_block(
913 residual_block,
914 variables,
915 variable_index_sparce_matrix,
916 &total_residual,
917 )
918 })
919 .collect();
920
921 let jacobian_values: Vec<f64> = jacobian_values?.into_iter().flatten().collect();
922
923 let total_residual = sync::Arc::try_unwrap(total_residual)
924 .map_err(|_| {
925 error::ApexError::ThreadError("Failed to unwrap Arc for total residual".to_string())
926 })?
927 .into_inner()
928 .map_err(|_| {
929 error::ApexError::ThreadError(
930 "Failed to extract mutex inner value for total residual".to_string(),
931 )
932 })?;
933
934 // Convert faer Col to Mat (column vector as n×1 matrix)
935 let residual_faer = total_residual.as_ref().as_mat().to_owned();
936 let jacobian_sparse = sparse::SparseColMat::new_from_argsort(
937 symbolic_structure.pattern.clone(),
938 &symbolic_structure.order,
939 jacobian_values.as_slice(),
940 )
941 .map_err(|_| {
942 error::ApexError::MatrixOperation(
943 "Failed to create sparse Jacobian from argsort".to_string(),
944 )
945 })?;
946
947 Ok((residual_faer, jacobian_sparse))
948 }
949
950 /// Compute only the residual for a single residual block (no Jacobian).
951 ///
952 /// Helper method for `compute_residual_sparse()`.
953 fn compute_residual_block(
954 &self,
955 residual_block: &residual_block::ResidualBlock,
956 variables: &collections::HashMap<String, VariableEnum>,
957 total_residual: &sync::Arc<sync::Mutex<faer::Col<f64>>>,
958 ) -> error::ApexResult<()> {
959 let mut param_vectors: Vec<nalgebra::DVector<f64>> = Vec::new();
960
961 for var_key in &residual_block.variable_key_list {
962 if let Some(variable) = variables.get(var_key) {
963 param_vectors.push(variable.to_vector());
964 }
965 }
966
967 // Compute only residual (linearize still computes Jacobian internally,
968 // but we don't extract/store it)
969 let (mut res, _) = residual_block.factor.linearize(¶m_vectors, false);
970
971 // Apply loss function if present (critical for robust optimization)
972 if let Some(loss_func) = &residual_block.loss_func {
973 let squared_norm = res.dot(&res);
974 let corrector = corrector::Corrector::new(loss_func.as_ref(), squared_norm);
975 corrector.correct_residuals(&mut res);
976 }
977
978 let mut total_residual = total_residual.lock().map_err(|_| {
979 error::ApexError::ThreadError("Failed to acquire lock on total residual".to_string())
980 })?;
981
982 // Copy residual values from nalgebra DVector to faer Col
983 let start_idx = residual_block.residual_row_start_idx;
984 let dim = residual_block.factor.get_dimension();
985 let mut total_residual_mut = total_residual.as_mut();
986 for i in 0..dim {
987 total_residual_mut[start_idx + i] = res[i];
988 }
989
990 Ok(())
991 }
992
993 /// Compute residual and Jacobian for a single residual block.
994 ///
995 /// Helper method for `compute_residual_and_jacobian_sparse()`.
996 fn compute_residual_and_jacobian_block(
997 &self,
998 residual_block: &residual_block::ResidualBlock,
999 variables: &collections::HashMap<String, VariableEnum>,
1000 variable_index_sparce_matrix: &collections::HashMap<String, usize>,
1001 total_residual: &sync::Arc<sync::Mutex<faer::Col<f64>>>,
1002 ) -> error::ApexResult<Vec<f64>> {
1003 let mut param_vectors: Vec<nalgebra::DVector<f64>> = Vec::new();
1004 let mut var_sizes: Vec<usize> = Vec::new();
1005 let mut variable_local_idx_size_list = Vec::<(usize, usize)>::new();
1006 let mut count_variable_local_idx: usize = 0;
1007
1008 for var_key in &residual_block.variable_key_list {
1009 if let Some(variable) = variables.get(var_key) {
1010 param_vectors.push(variable.to_vector());
1011 let var_size = variable.get_size();
1012 var_sizes.push(var_size);
1013 variable_local_idx_size_list.push((count_variable_local_idx, var_size));
1014 count_variable_local_idx += var_size;
1015 }
1016 }
1017
1018 let (mut res, jac_opt) = residual_block.factor.linearize(¶m_vectors, true);
1019 let mut jac = jac_opt.expect("Jacobian should be computed when compute_jacobian=true");
1020
1021 // Apply loss function if present (critical for robust optimization)
1022 if let Some(loss_func) = &residual_block.loss_func {
1023 let squared_norm = res.dot(&res);
1024 let corrector = corrector::Corrector::new(loss_func.as_ref(), squared_norm);
1025 corrector.correct_jacobian(&res, &mut jac);
1026 corrector.correct_residuals(&mut res);
1027 }
1028
1029 // Update total residual
1030 {
1031 let mut total_residual = total_residual.lock().map_err(|_| {
1032 error::ApexError::ThreadError(
1033 "Failed to acquire lock on total residual".to_string(),
1034 )
1035 })?;
1036
1037 // Copy residual values from nalgebra DVector to faer Col
1038 let start_idx = residual_block.residual_row_start_idx;
1039 let dim = residual_block.factor.get_dimension();
1040 let mut total_residual_mut = total_residual.as_mut();
1041 for i in 0..dim {
1042 total_residual_mut[start_idx + i] = res[i];
1043 }
1044 }
1045
1046 // Extract Jacobian values in the correct order
1047 let mut local_jacobian_values = Vec::new();
1048 for (i, var_key) in residual_block.variable_key_list.iter().enumerate() {
1049 if variable_index_sparce_matrix.contains_key(var_key) {
1050 let (variable_local_idx, var_size) = variable_local_idx_size_list[i];
1051 let variable_jac = jac.view((0, variable_local_idx), (jac.shape().0, var_size));
1052
1053 for row_idx in 0..jac.shape().0 {
1054 for col_idx in 0..var_size {
1055 local_jacobian_values.push(variable_jac[(row_idx, col_idx)]);
1056 }
1057 }
1058 } else {
1059 return Err(error::ApexError::InvalidInput(format!(
1060 "Missing key {} in variable-to-column-index mapping",
1061 var_key
1062 )));
1063 }
1064 }
1065
1066 Ok(local_jacobian_values)
1067 }
1068
1069 /// Log residual vector to a text file
1070 pub fn log_residual_to_file(
1071 &self,
1072 residual: &nalgebra::DVector<f64>,
1073 filename: &str,
1074 ) -> Result<(), std::io::Error> {
1075 let mut file = fs::File::create(filename)?;
1076 writeln!(file, "# Residual vector - {} elements", residual.len())?;
1077 for (i, &value) in residual.iter().enumerate() {
1078 writeln!(file, "{}: {:.12}", i, value)?;
1079 }
1080 Ok(())
1081 }
1082
1083 /// Log sparse Jacobian matrix to a text file
1084 pub fn log_sparse_jacobian_to_file(
1085 &self,
1086 jacobian: &sparse::SparseColMat<usize, f64>,
1087 filename: &str,
1088 ) -> Result<(), std::io::Error> {
1089 let mut file = fs::File::create(filename)?;
1090 writeln!(
1091 file,
1092 "# Sparse Jacobian matrix - {} x {} ({} non-zeros)",
1093 jacobian.nrows(),
1094 jacobian.ncols(),
1095 jacobian.compute_nnz()
1096 )?;
1097 writeln!(file, "# Matrix saved as dimensions and non-zero count only")?;
1098 writeln!(file, "# For detailed access, convert to dense matrix first")?;
1099 Ok(())
1100 }
1101
1102 /// Log variables to a text file
1103 pub fn log_variables_to_file(
1104 &self,
1105 variables: &collections::HashMap<String, VariableEnum>,
1106 filename: &str,
1107 ) -> Result<(), std::io::Error> {
1108 let mut file = fs::File::create(filename)?;
1109 writeln!(file, "# Variables - {} total", variables.len())?;
1110 writeln!(file, "# Format: variable_name: [values...]")?;
1111
1112 let mut sorted_vars: Vec<_> = variables.keys().collect();
1113 sorted_vars.sort();
1114
1115 for var_name in sorted_vars {
1116 let var_vector = variables[var_name].to_vector();
1117 write!(file, "{}: [", var_name)?;
1118 for (i, &value) in var_vector.iter().enumerate() {
1119 write!(file, "{:.12}", value)?;
1120 if i < var_vector.len() - 1 {
1121 write!(file, ", ")?;
1122 }
1123 }
1124 writeln!(file, "]")?;
1125 }
1126 Ok(())
1127 }
1128
1129 /// Compute per-variable covariances and set them in Variable objects
1130 ///
1131 /// This method computes the full covariance matrix by inverting the Hessian
1132 /// from the linear solver, then extracts per-variable covariance blocks and
1133 /// stores them in the corresponding Variable objects.
1134 ///
1135 /// # Arguments
1136 /// * `linear_solver` - Mutable reference to the linear solver containing the cached Hessian
1137 /// * `variables` - Mutable map of variables where covariances will be stored
1138 /// * `variable_index_map` - Map from variable names to their starting column indices
1139 ///
1140 /// # Returns
1141 /// `Some(HashMap)` containing per-variable covariance matrices if successful, `None` otherwise
1142 ///
1143 pub fn compute_and_set_covariances(
1144 &self,
1145 linear_solver: &mut Box<dyn linalg::SparseLinearSolver>,
1146 variables: &mut collections::HashMap<String, VariableEnum>,
1147 variable_index_map: &collections::HashMap<String, usize>,
1148 ) -> Option<collections::HashMap<String, faer::Mat<f64>>> {
1149 // Compute the full covariance matrix (H^{-1}) using the linear solver
1150 linear_solver.compute_covariance_matrix()?;
1151 let full_cov = linear_solver.get_covariance_matrix()?.clone();
1152
1153 // Extract per-variable covariance blocks from the full matrix
1154 let per_var_covariances =
1155 linalg::extract_variable_covariances(&full_cov, variables, variable_index_map);
1156
1157 // Set covariances in Variable objects for easy access
1158 for (var_name, cov) in &per_var_covariances {
1159 if let Some(var) = variables.get_mut(var_name) {
1160 var.set_covariance(cov.clone());
1161 }
1162 }
1163
1164 Some(per_var_covariances)
1165 }
1166}
1167
1168#[cfg(test)]
1169mod tests {
1170 use super::*;
1171 use crate::core::factors::{BetweenFactorSE2, BetweenFactorSE3, PriorFactor};
1172 use crate::core::loss_functions::HuberLoss;
1173 use crate::manifold::{ManifoldType, se3::SE3};
1174 use nalgebra::{Quaternion, Vector3, dvector};
1175 use std::collections::HashMap;
1176
1177 /// Create a test SE2 dataset with 10 vertices in a loop
1178 fn create_se2_test_problem() -> (
1179 Problem,
1180 HashMap<String, (ManifoldType, nalgebra::DVector<f64>)>,
1181 ) {
1182 let mut problem = Problem::new();
1183 let mut initial_values = HashMap::new();
1184
1185 // Create 10 SE2 poses in a rough circle pattern
1186 let poses = vec![
1187 (0.0, 0.0, 0.0), // x0: origin
1188 (1.0, 0.0, 0.1), // x1: move right
1189 (1.5, 1.0, 0.5), // x2: move up-right
1190 (1.0, 2.0, 1.0), // x3: move up
1191 (0.0, 2.5, 1.5), // x4: move up-left
1192 (-1.0, 2.0, 2.0), // x5: move left
1193 (-1.5, 1.0, 2.5), // x6: move down-left
1194 (-1.0, 0.0, 3.0), // x7: move down
1195 (-0.5, -0.5, -2.8), // x8: move down-right
1196 (0.5, -0.5, -2.3), // x9: back towards origin
1197 ];
1198
1199 // Add vertices using [x, y, theta] ordering
1200 for (i, (x, y, theta)) in poses.iter().enumerate() {
1201 let var_name = format!("x{}", i);
1202 let se2_data = dvector![*x, *y, *theta];
1203 initial_values.insert(var_name, (manifold::ManifoldType::SE2, se2_data));
1204 }
1205
1206 // Add chain of between factors
1207 for i in 0..9 {
1208 let from_pose = poses[i];
1209 let to_pose = poses[i + 1];
1210
1211 // Compute relative transformation
1212 let dx = to_pose.0 - from_pose.0;
1213 let dy = to_pose.1 - from_pose.1;
1214 let dtheta = to_pose.2 - from_pose.2;
1215
1216 let between_factor = BetweenFactorSE2::new(dx, dy, dtheta);
1217 problem.add_residual_block(
1218 &[&format!("x{}", i), &format!("x{}", i + 1)],
1219 Box::new(between_factor),
1220 Some(Box::new(HuberLoss::new(1.0).unwrap())),
1221 );
1222 }
1223
1224 // Add loop closure from x9 back to x0
1225 let dx = poses[0].0 - poses[9].0;
1226 let dy = poses[0].1 - poses[9].1;
1227 let dtheta = poses[0].2 - poses[9].2;
1228
1229 let loop_closure = BetweenFactorSE2::new(dx, dy, dtheta);
1230 problem.add_residual_block(
1231 &["x9", "x0"],
1232 Box::new(loop_closure),
1233 Some(Box::new(HuberLoss::new(1.0).unwrap())),
1234 );
1235
1236 // Add prior factor for x0
1237 let prior_factor = PriorFactor {
1238 data: dvector![0.0, 0.0, 0.0],
1239 };
1240 problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1241
1242 (problem, initial_values)
1243 }
1244
1245 /// Create a test SE3 dataset with 8 vertices in a 3D pattern
1246 fn create_se3_test_problem() -> (
1247 Problem,
1248 HashMap<String, (manifold::ManifoldType, nalgebra::DVector<f64>)>,
1249 ) {
1250 let mut problem = Problem::new();
1251 let mut initial_values = HashMap::new();
1252
1253 // Create 8 SE3 poses in a rough 3D cube pattern
1254 let poses = vec![
1255 // Bottom face of cube
1256 (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0), // x0: origin
1257 (1.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.995), // x1: +X
1258 (1.0, 1.0, 0.0, 0.0, 0.0, 0.2, 0.98), // x2: +X+Y
1259 (0.0, 1.0, 0.0, 0.0, 0.0, 0.3, 0.955), // x3: +Y
1260 // Top face of cube
1261 (0.0, 0.0, 1.0, 0.1, 0.0, 0.0, 0.995), // x4: +Z
1262 (1.0, 0.0, 1.0, 0.1, 0.0, 0.1, 0.99), // x5: +X+Z
1263 (1.0, 1.0, 1.0, 0.1, 0.0, 0.2, 0.975), // x6: +X+Y+Z
1264 (0.0, 1.0, 1.0, 0.1, 0.0, 0.3, 0.95), // x7: +Y+Z
1265 ];
1266
1267 // Add vertices using [tx, ty, tz, qw, qx, qy, qz] ordering
1268 for (i, (tx, ty, tz, qx, qy, qz, qw)) in poses.iter().enumerate() {
1269 let var_name = format!("x{}", i);
1270 let se3_data = dvector![*tx, *ty, *tz, *qw, *qx, *qy, *qz];
1271 initial_values.insert(var_name, (manifold::ManifoldType::SE3, se3_data));
1272 }
1273
1274 // Add between factors connecting the cube edges
1275 let edges = vec![
1276 (0, 1),
1277 (1, 2),
1278 (2, 3),
1279 (3, 0), // Bottom face
1280 (4, 5),
1281 (5, 6),
1282 (6, 7),
1283 (7, 4), // Top face
1284 (0, 4),
1285 (1, 5),
1286 (2, 6),
1287 (3, 7), // Vertical edges
1288 ];
1289
1290 for (from_idx, to_idx) in edges {
1291 let from_pose = poses[from_idx];
1292 let to_pose = poses[to_idx];
1293
1294 // Create a simple relative transformation (simplified for testing)
1295 let relative_se3 = SE3::from_translation_quaternion(
1296 Vector3::new(
1297 to_pose.0 - from_pose.0, // dx
1298 to_pose.1 - from_pose.1, // dy
1299 to_pose.2 - from_pose.2, // dz
1300 ),
1301 Quaternion::new(1.0, 0.0, 0.0, 0.0), // identity quaternion
1302 );
1303
1304 let between_factor = BetweenFactorSE3::new(relative_se3);
1305 problem.add_residual_block(
1306 &[&format!("x{}", from_idx), &format!("x{}", to_idx)],
1307 Box::new(between_factor),
1308 Some(Box::new(HuberLoss::new(1.0).unwrap())),
1309 );
1310 }
1311
1312 // Add prior factor for x0
1313 let prior_factor = PriorFactor {
1314 data: dvector![0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
1315 };
1316 problem.add_residual_block(&["x0"], Box::new(prior_factor), None);
1317
1318 (problem, initial_values)
1319 }
1320
1321 #[test]
1322 fn test_problem_construction_se2() {
1323 let (problem, initial_values) = create_se2_test_problem();
1324
1325 // Test basic problem properties
1326 assert_eq!(problem.num_residual_blocks(), 11); // 9 between + 1 loop closure + 1 prior
1327 assert_eq!(problem.total_residual_dimension, 33); // 11 * 3
1328 assert_eq!(initial_values.len(), 10);
1329
1330 println!("SE2 Problem construction test passed");
1331 println!("Residual blocks: {}", problem.num_residual_blocks());
1332 println!("Total residual dim: {}", problem.total_residual_dimension);
1333 println!("Variables: {}", initial_values.len());
1334 }
1335
1336 #[test]
1337 fn test_problem_construction_se3() {
1338 let (problem, initial_values) = create_se3_test_problem();
1339
1340 // Test basic problem properties
1341 assert_eq!(problem.num_residual_blocks(), 13); // 12 between + 1 prior
1342 assert_eq!(problem.total_residual_dimension, 79); // 12 * 6 + 1 * 7 (SE3 between factors are 6-dim, prior factor is 7-dim)
1343 assert_eq!(initial_values.len(), 8);
1344
1345 println!("SE3 Problem construction test passed");
1346 println!("Residual blocks: {}", problem.num_residual_blocks());
1347 println!("Total residual dim: {}", problem.total_residual_dimension);
1348 println!("Variables: {}", initial_values.len());
1349 }
1350
1351 #[test]
1352 fn test_variable_initialization_se2() {
1353 let (problem, initial_values) = create_se2_test_problem();
1354
1355 // Test variable initialization
1356 let variables = problem.initialize_variables(&initial_values);
1357 assert_eq!(variables.len(), 10);
1358
1359 // Test variable sizes
1360 for (name, var) in &variables {
1361 assert_eq!(
1362 var.get_size(),
1363 3,
1364 "SE2 variable {} should have size 3",
1365 name
1366 );
1367 }
1368
1369 // Test conversion to DVector
1370 for (name, var) in &variables {
1371 let vec = var.to_vector();
1372 assert_eq!(
1373 vec.len(),
1374 3,
1375 "SE2 variable {} vector should have length 3",
1376 name
1377 );
1378 }
1379
1380 println!("SE2 Variable initialization test passed");
1381 println!("Variables created: {}", variables.len());
1382 }
1383
1384 #[test]
1385 fn test_variable_initialization_se3() {
1386 let (problem, initial_values) = create_se3_test_problem();
1387
1388 // Test variable initialization
1389 let variables = problem.initialize_variables(&initial_values);
1390 assert_eq!(variables.len(), 8);
1391
1392 // Test variable sizes
1393 for (name, var) in &variables {
1394 assert_eq!(
1395 var.get_size(),
1396 6,
1397 "SE3 variable {} should have size 6 (DOF)",
1398 name
1399 );
1400 }
1401
1402 // Test conversion to DVector
1403 for (name, var) in &variables {
1404 let vec = var.to_vector();
1405 assert_eq!(
1406 vec.len(),
1407 7,
1408 "SE3 variable {} vector should have length 7",
1409 name
1410 );
1411 }
1412
1413 println!("SE3 Variable initialization test passed");
1414 println!("Variables created: {}", variables.len());
1415 }
1416
1417 #[test]
1418 fn test_column_mapping_se2() {
1419 let (problem, initial_values) = create_se2_test_problem();
1420 let variables = problem.initialize_variables(&initial_values);
1421
1422 // Create column mapping for variables
1423 let mut variable_index_sparce_matrix = HashMap::new();
1424 let mut col_offset = 0;
1425 let mut sorted_vars: Vec<_> = variables.keys().collect();
1426 sorted_vars.sort(); // Ensure consistent ordering
1427
1428 for var_name in sorted_vars {
1429 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1430 col_offset += variables[var_name].get_size();
1431 }
1432
1433 // Test total degrees of freedom
1434 let total_dof: usize = variables.values().map(|v| v.get_size()).sum();
1435 assert_eq!(total_dof, 30); // 10 variables * 3 DOF each
1436 assert_eq!(col_offset, 30);
1437
1438 // Test each variable has correct column mapping
1439 for (var_name, &col_idx) in &variable_index_sparce_matrix {
1440 assert!(
1441 col_idx < total_dof,
1442 "Column index {} for {} should be < {}",
1443 col_idx,
1444 var_name,
1445 total_dof
1446 );
1447 }
1448
1449 println!("SE2 Column mapping test passed");
1450 println!("Total DOF: {}", total_dof);
1451 println!("Variable mappings: {}", variable_index_sparce_matrix.len());
1452 }
1453
1454 #[test]
1455 fn test_symbolic_structure_se2() {
1456 let (problem, initial_values) = create_se2_test_problem();
1457 let variables = problem.initialize_variables(&initial_values);
1458
1459 // Create column mapping
1460 let mut variable_index_sparce_matrix = HashMap::new();
1461 let mut col_offset = 0;
1462 let mut sorted_vars: Vec<_> = variables.keys().collect();
1463 sorted_vars.sort();
1464
1465 for var_name in sorted_vars {
1466 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1467 col_offset += variables[var_name].get_size();
1468 }
1469
1470 // Build symbolic structure
1471 let symbolic_structure = problem
1472 .build_symbolic_structure(&variables, &variable_index_sparce_matrix, col_offset)
1473 .unwrap();
1474
1475 // Test symbolic structure dimensions
1476 assert_eq!(
1477 symbolic_structure.pattern.nrows(),
1478 problem.total_residual_dimension
1479 );
1480 assert_eq!(symbolic_structure.pattern.ncols(), 30); // total DOF
1481
1482 println!("SE2 Symbolic structure test passed");
1483 println!(
1484 "Symbolic matrix: {} x {}",
1485 symbolic_structure.pattern.nrows(),
1486 symbolic_structure.pattern.ncols()
1487 );
1488 }
1489
1490 #[test]
1491 fn test_residual_jacobian_computation_se2() {
1492 let (problem, initial_values) = create_se2_test_problem();
1493 let variables = problem.initialize_variables(&initial_values);
1494
1495 // Create column mapping
1496 let mut variable_index_sparce_matrix = HashMap::new();
1497 let mut col_offset = 0;
1498 let mut sorted_vars: Vec<_> = variables.keys().collect();
1499 sorted_vars.sort();
1500
1501 for var_name in sorted_vars {
1502 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1503 col_offset += variables[var_name].get_size();
1504 }
1505
1506 // Test sparse computation
1507 let symbolic_structure = problem
1508 .build_symbolic_structure(&variables, &variable_index_sparce_matrix, col_offset)
1509 .unwrap();
1510 let (residual_sparse, jacobian_sparse) = problem
1511 .compute_residual_and_jacobian_sparse(
1512 &variables,
1513 &variable_index_sparce_matrix,
1514 &symbolic_structure,
1515 )
1516 .unwrap();
1517
1518 // Test sparse dimensions
1519 assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1520 assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1521 assert_eq!(jacobian_sparse.ncols(), 30);
1522
1523 println!("SE2 Residual/Jacobian computation test passed");
1524 println!("Residual dimensions: {}", residual_sparse.nrows());
1525 println!(
1526 "Jacobian dimensions: {} x {}",
1527 jacobian_sparse.nrows(),
1528 jacobian_sparse.ncols()
1529 );
1530 }
1531
1532 #[test]
1533 fn test_residual_jacobian_computation_se3() {
1534 let (problem, initial_values) = create_se3_test_problem();
1535 let variables = problem.initialize_variables(&initial_values);
1536
1537 // Create column mapping
1538 let mut variable_index_sparce_matrix = HashMap::new();
1539 let mut col_offset = 0;
1540 let mut sorted_vars: Vec<_> = variables.keys().collect();
1541 sorted_vars.sort();
1542
1543 for var_name in sorted_vars {
1544 variable_index_sparce_matrix.insert(var_name.clone(), col_offset);
1545 col_offset += variables[var_name].get_size();
1546 }
1547
1548 // Test sparse computation
1549 let symbolic_structure = problem
1550 .build_symbolic_structure(&variables, &variable_index_sparce_matrix, col_offset)
1551 .unwrap();
1552 let (residual_sparse, jacobian_sparse) = problem
1553 .compute_residual_and_jacobian_sparse(
1554 &variables,
1555 &variable_index_sparce_matrix,
1556 &symbolic_structure,
1557 )
1558 .unwrap();
1559
1560 // Test sparse dimensions match
1561 assert_eq!(residual_sparse.nrows(), problem.total_residual_dimension);
1562 assert_eq!(jacobian_sparse.nrows(), problem.total_residual_dimension);
1563 assert_eq!(jacobian_sparse.ncols(), 48); // 8 variables * 6 DOF each
1564
1565 println!("SE3 Residual/Jacobian computation test passed");
1566 println!("Residual dimensions: {}", residual_sparse.nrows());
1567 println!(
1568 "Jacobian dimensions: {} x {}",
1569 jacobian_sparse.nrows(),
1570 jacobian_sparse.ncols()
1571 );
1572 }
1573
1574 #[test]
1575 fn test_residual_block_operations() {
1576 let mut problem = Problem::new();
1577
1578 // Test adding residual blocks
1579 let block_id1 = problem.add_residual_block(
1580 &["x0", "x1"],
1581 Box::new(BetweenFactorSE2::new(1.0, 0.0, 0.1)),
1582 Some(Box::new(HuberLoss::new(1.0).unwrap())),
1583 );
1584
1585 let block_id2 = problem.add_residual_block(
1586 &["x0"],
1587 Box::new(PriorFactor {
1588 data: dvector![0.0, 0.0, 0.0],
1589 }),
1590 None,
1591 );
1592
1593 assert_eq!(problem.num_residual_blocks(), 2);
1594 assert_eq!(problem.total_residual_dimension, 6); // 3 + 3
1595 assert_eq!(block_id1, 0);
1596 assert_eq!(block_id2, 1);
1597
1598 // Test removing residual blocks
1599 let removed_block = problem.remove_residual_block(block_id1);
1600 assert!(removed_block.is_some());
1601 assert_eq!(problem.num_residual_blocks(), 1);
1602 assert_eq!(problem.total_residual_dimension, 3); // Only prior factor remains
1603
1604 // Test removing non-existent block
1605 let non_existent = problem.remove_residual_block(999);
1606 assert!(non_existent.is_none());
1607
1608 println!("Residual block operations test passed");
1609 println!("Block operations working correctly");
1610 }
1611
1612 #[test]
1613 fn test_variable_constraints() {
1614 let mut problem = Problem::new();
1615
1616 // Test fixing variables
1617 problem.fix_variable("x0", 0);
1618 problem.fix_variable("x0", 1);
1619 problem.fix_variable("x1", 2);
1620
1621 assert!(problem.fixed_variable_indexes.contains_key("x0"));
1622 assert!(problem.fixed_variable_indexes.contains_key("x1"));
1623 assert_eq!(problem.fixed_variable_indexes["x0"].len(), 2);
1624 assert_eq!(problem.fixed_variable_indexes["x1"].len(), 1);
1625
1626 // Test unfixing variables
1627 problem.unfix_variable("x0");
1628 assert!(!problem.fixed_variable_indexes.contains_key("x0"));
1629 assert!(problem.fixed_variable_indexes.contains_key("x1"));
1630
1631 // Test variable bounds
1632 problem.set_variable_bounds("x2", 0, -1.0, 1.0);
1633 problem.set_variable_bounds("x2", 1, -2.0, 2.0);
1634 problem.set_variable_bounds("x3", 0, 0.0, 5.0);
1635
1636 assert!(problem.variable_bounds.contains_key("x2"));
1637 assert!(problem.variable_bounds.contains_key("x3"));
1638 assert_eq!(problem.variable_bounds["x2"].len(), 2);
1639 assert_eq!(problem.variable_bounds["x3"].len(), 1);
1640
1641 // Test removing bounds
1642 problem.remove_variable_bounds("x2");
1643 assert!(!problem.variable_bounds.contains_key("x2"));
1644 assert!(problem.variable_bounds.contains_key("x3"));
1645
1646 println!("Variable constraints test passed");
1647 println!("Fix/unfix and bounds operations working correctly");
1648 }
1649}