1mod traits;
38pub use traits::{OptError, OptObserver, OptObserverVec, OptParams, OptResult, Optimizer};
39
40mod macros;
41
42mod gauss_newton;
43pub use gauss_newton::GaussNewton;
44
45mod levenberg_marquardt;
46pub use levenberg_marquardt::LevenMarquardt;
47
48#[cfg(test)]
50pub mod test {
51 use faer::assert_matrix_eq;
52 use nalgebra::{DefaultAllocator, DimNameAdd, DimNameSum, ToTypenum};
53
54 use super::*;
55 use crate::{
56 containers::{FactorBuilder, Graph, Values},
57 dtype,
58 linalg::{AllocatorBuffer, Const, DualAllocator, DualVector, VectorX},
59 residuals::{BetweenResidual, PriorResidual, Residual},
60 symbols::X,
61 variables::VariableDtype,
62 };
63
64 pub fn optimize_prior<
65 O,
66 const DIM: usize,
67 #[cfg(feature = "serde")] T: VariableDtype<Dim = nalgebra::Const<DIM>> + 'static + typetag::Tagged,
68 #[cfg(not(feature = "serde"))] T: VariableDtype<Dim = nalgebra::Const<DIM>> + 'static,
69 >(
70 new: &dyn Fn(Graph) -> O,
71 ) where
72 PriorResidual<T>: Residual,
73 O: Optimizer<Input = Values>,
74 {
75 let t = VectorX::from_fn(T::DIM, |_, i| ((i + 1) as dtype) / 10.0);
76 let p = T::exp(t.as_view());
77
78 let mut values = Values::new();
79 values.insert_unchecked(X(0), T::identity());
80
81 let mut graph = Graph::new();
82 let res = PriorResidual::new(p.clone());
83 let factor = FactorBuilder::new1_unchecked(res, X(0)).build();
84 graph.add_factor(factor);
85
86 let mut opt = new(graph);
87 values = opt.optimize(values).expect("Optimization failed");
88
89 let out: &T = values.get_unchecked(X(0)).expect("Missing X(0)");
90 assert_matrix_eq!(
91 out.ominus(&p),
92 VectorX::zeros(T::DIM),
93 comp = abs,
94 tol = 1e-6
95 );
96 }
97
98 pub fn optimize_between<
99 O,
100 const DIM: usize,
101 const DIM_DOUBLE: usize,
102 #[cfg(feature = "serde")] T: VariableDtype<Dim = nalgebra::Const<DIM>> + 'static + typetag::Tagged,
103 #[cfg(not(feature = "serde"))] T: VariableDtype<Dim = nalgebra::Const<DIM>> + 'static,
104 >(
105 new: &dyn Fn(Graph) -> O,
106 ) where
107 PriorResidual<T>: Residual,
108 BetweenResidual<T>: Residual,
109 O: Optimizer<Input = Values>,
110 Const<DIM>: ToTypenum,
111 AllocatorBuffer<DimNameSum<Const<DIM>, Const<DIM>>>: Sync + Send,
112 DefaultAllocator: DualAllocator<DimNameSum<Const<DIM>, Const<DIM>>>,
113 DualVector<DimNameSum<Const<DIM>, Const<DIM>>>: Copy,
114 Const<DIM>: DimNameAdd<Const<DIM>>,
115 {
116 let t = VectorX::from_fn(T::DIM, |_, i| ((i as dtype) - (T::DIM as dtype)) / 10.0);
117 let p1 = T::exp(t.as_view());
118
119 let t = VectorX::from_fn(T::DIM, |_, i| ((i + 1) as dtype) / 10.0);
120 let p2 = T::exp(t.as_view());
121
122 let mut values = Values::new();
123 values.insert_unchecked(X(0), T::identity());
124 values.insert_unchecked(X(1), T::identity());
125
126 let mut graph = Graph::new();
127 let res = PriorResidual::new(p1.clone());
128 let factor = FactorBuilder::new1_unchecked(res, X(0)).build();
129 graph.add_factor(factor);
130
131 let diff = p2.minus(&p1);
132 let res = BetweenResidual::new(diff);
133 let factor = FactorBuilder::new2_unchecked(res, X(0), X(1)).build();
134 graph.add_factor(factor);
135
136 let mut opt = new(graph);
137 values = opt.optimize(values).expect("Optimization failed");
138
139 let out1: &T = values.get_unchecked(X(0)).expect("Missing X(0)");
140 assert_matrix_eq!(
141 out1.ominus(&p1),
142 VectorX::zeros(T::DIM),
143 comp = abs,
144 tol = 1e-6
145 );
146
147 let out2: &T = values.get_unchecked(X(1)).expect("Missing X(1)");
148 assert_matrix_eq!(
149 out2.ominus(&p2),
150 VectorX::zeros(T::DIM),
151 comp = abs,
152 tol = 1e-6
153 );
154 }
155}