sophus_autodiff/dual/vector.rs
1use super::scalar::IsDualScalar;
2use crate::{
3 linalg::{
4 SMat,
5 SVec,
6 },
7 prelude::{
8 IsRealScalar,
9 IsVector,
10 },
11};
12
13/// A structure that holds the derivative of a vector-valued map with respect to its inputs.
14///
15/// For a function `f: X -> ℝʳ`, or similarly a batch variant in SIMD,
16/// this structure stores each output dimension’s derivative block.
17///
18/// - `out_vec[i]` contains a derivative matrix (for dimension `i` of the output).
19pub struct VectorValuedDerivative<
20 S,
21 const OUTROWS: usize,
22 const BATCH: usize,
23 const DM: usize,
24 const DN: usize,
25> {
26 /// The output vector of dimension `OUTROWS`, where each element is a
27 /// matrix of shape `[DM × DN]` storing the derivative for that output lane.
28 pub out_vec: SVec<SMat<S, DM, DN>, OUTROWS>,
29}
30
31impl<
32 S: IsRealScalar<BATCH, RealScalar = S>,
33 const OUTROWS: usize,
34 const BATCH: usize,
35 const DM: usize,
36 const DN: usize,
37 > VectorValuedDerivative<S, OUTROWS, BATCH, DM, DN>
38{
39 /// Creates a new instance set to all zeros.
40 pub fn zeros() -> Self {
41 VectorValuedDerivative {
42 out_vec: SVec::zeros(),
43 }
44 }
45}
46
47/// A trait for a *dual vector*, supporting forward-mode AD on vector quantities.
48///
49/// A dual vector is a vector whose entries are themselves dual scalars
50/// (with partial derivatives). This trait extends [`IsVector`] with AD-specific methods:
51///
52/// - Constructing a "variable" vector from a real vector (marking it for differentiation).
53/// - Retrieving the entire derivative as a [`VectorValuedDerivative`].
54///
55/// # Generic Parameters
56/// - `S`: A type implementing [`IsDualScalar`].
57/// - `ROWS`: Number of entries in the vector.
58/// - `BATCH`, `DM`, `DN`: Additional parameters for batch usage and the derivative shape.
59pub trait IsDualVector<
60 S: IsDualScalar<BATCH, DM, DN>,
61 const ROWS: usize,
62 const BATCH: usize,
63 const DM: usize,
64 const DN: usize,
65>: IsVector<S, ROWS, BATCH, DM, DN>
66{
67 /// Creates a new dual vector from a purely *real* vector, marking its entries as variables
68 /// for auto-differentiation.
69 fn var(val: S::RealVector<ROWS>) -> Self;
70
71 /// Returns the derivative as a [`VectorValuedDerivative`].
72 ///
73 /// If the vector has dimension `ROWS`, each element’s derivative is a matrix of shape `[DM ×
74 /// DN]`, so the overall derivative is `[ROWS]` of those matrices.
75 fn derivative(&self) -> VectorValuedDerivative<S::RealScalar, ROWS, BATCH, DM, DN>;
76}
77
78/// A helper trait marking that this dual vector is the result of a *curve* `f: ℝ -> ℝʳ`,
79/// letting you retrieve a simpler derivative form for each dimension of the output.
80pub trait IsDualVectorFromCurve<
81 S: IsDualScalar<BATCH, 1, 1>, // scalar must represent "DM=1, DN=1" scenario
82 const ROWS: usize,
83 const BATCH: usize,
84>: IsDualVector<S, ROWS, BATCH, 1, 1>
85{
86 /// Returns the derivative of this vector w.r.t. the single scalar input,
87 /// as a real vector of dimension `ROWS`.
88 ///
89 /// For example, if `f: ℝ -> ℝ^3`, then `curve_derivative()` is a 3D real vector.
90 fn curve_derivative(&self) -> S::RealVector<ROWS>;
91}
92
93/// A convenience trait for dual vectors that store partial derivatives with respect to a
94/// multi-dimensional input, but only a *single* output dimension is used. More precisely, this
95/// trait is for `S: IsDualScalar<BATCH, DM, 1>` vectors.
96///
97/// It exposes a `jacobian()` method returning a real matrix \([ROWS × DM]\).
98///
99/// # Example
100///
101/// If the vector dimension is `ROWS`, and each entry is a dual scalar with shape `[DM × 1]`,
102/// the overall derivative can be collected in a `[ROWS × DM]` real matrix.
103pub trait HasJacobian<
104 S: IsDualScalar<BATCH, DM, 1>,
105 const ROWS: usize,
106 const BATCH: usize,
107 const DM: usize,
108>: IsVector<S, ROWS, BATCH, DM, 1>
109{
110 /// Returns the [ROWS × DM] matrix collecting each component's derivative row.
111 fn jacobian(&self) -> S::RealMatrix<ROWS, DM>;
112}
113
114#[test]
115fn dual_vector_tests() {
116 #[cfg(feature = "simd")]
117 use crate::dual::DualBatchScalar;
118 #[cfg(feature = "simd")]
119 use crate::linalg::BatchScalarF64;
120 use crate::{
121 dual::dual_scalar::DualScalar,
122 linalg::{
123 IsVector,
124 EPS_F64,
125 },
126 maps::{
127 ScalarValuedVectorMap,
128 VectorValuedVectorMap,
129 },
130 points::example_points,
131 prelude::IsScalar,
132 };
133
134 // A trait for test runs:
135 #[cfg(test)]
136 trait Test {
137 fn run();
138 }
139
140 // A macro generating test code for both single-lane and batch-lane types.
141 macro_rules! def_test_template {
142 ($scalar:ty, $dual_scalar4_1: ty, $batch:literal) => {
143 #[cfg(test)]
144 impl Test for $scalar {
145 fn run() {
146 let points = example_points::<$scalar, 4, $batch,0,0>();
147
148 // We'll test a few scenarios with dot() and scaling, verifying
149 // that the derivative matches finite-difference approximations.
150 for p in points.clone() {
151 for p1 in points.clone() {
152 // 1) dot_fn: compute dot product => scalar
153 {
154 fn dot_fn<S: IsScalar<BATCH, DM, DN>, const BATCH: usize, const DM: usize, const DN: usize>(
155 x: S::Vector<4>,
156 y: S::Vector<4>,
157 ) -> S {
158 x.dot(y)
159 }
160
161 let finite_diff =
162 ScalarValuedVectorMap::<$scalar, $batch>::sym_diff_quotient(
163 |x| dot_fn(x, p1),
164 p,
165 EPS_F64,
166 );
167 let auto_grad = dot_fn::<$dual_scalar4_1, $batch,4,1>(
168 <$dual_scalar4_1>::vector_var(p),
169 <$dual_scalar4_1 as IsScalar<$batch,4,1>>::Vector::<4>::from_real_vector(p1),
170 ).derivative();
171
172 approx::assert_abs_diff_eq!(finite_diff, auto_grad, epsilon = 0.0001);
173 }
174
175 // 2) scale a vector: x -> x * 0.99 or x -> p1[0] * x
176 {
177 fn scale_fn<S: IsScalar<BATCH, DM, DN>, const BATCH: usize, const DM: usize, const DN: usize>(
178 x: S::Vector<4>,
179 s: S
180 ) -> S::Vector<4> {
181 x.scaled(s)
182 }
183
184 // scale the vector by a constant 0.99
185 let finite_diff = VectorValuedVectorMap::<$scalar, $batch>::sym_diff_quotient_jacobian(
186 |x| scale_fn::<$scalar, $batch,0,0>(x, <$scalar>::from_f64(0.99)),
187 p,
188 EPS_F64,
189 );
190 let auto_grad = scale_fn::<$dual_scalar4_1, $batch,4,1>(
191 <$dual_scalar4_1>::vector_var(p),
192 <$dual_scalar4_1>::from_f64(0.99)
193 ).jacobian();
194
195 approx::assert_abs_diff_eq!(finite_diff, auto_grad, epsilon = 0.0001);
196
197 // scale the vector by p1[0], which is a single scalar from p1
198 // so we only look at the partial w.r.t. x, not p1
199 let finite_diff = VectorValuedVectorMap::<$scalar, $batch>::sym_diff_quotient_jacobian(
200 |x| scale_fn::<$scalar, $batch,0,0>(p1, x[0]),
201 p,
202 EPS_F64,
203 );
204 // convert p1 to a dual vector, then get the first element
205 let auto_grad = scale_fn::<$dual_scalar4_1, $batch,4,1>(
206 <$dual_scalar4_1 as IsScalar<$batch,4,1>>::Vector::<4>::from_real_vector(p1),
207 <$dual_scalar4_1>::vector_var(p).elem(0)
208 ).jacobian();
209
210 approx::assert_abs_diff_eq!(finite_diff, auto_grad, epsilon = 0.0001);
211 }
212 }
213 }
214 }
215 }
216 };
217 }
218
219 // Generate tests for single-lane f64 and batch-lane batch scalars
220 def_test_template!(f64, DualScalar<4,1>, 1);
221 #[cfg(feature = "simd")]
222 def_test_template!(BatchScalarF64<2>, DualBatchScalar<2, 4, 1>, 2);
223 #[cfg(feature = "simd")]
224 def_test_template!(BatchScalarF64<4>, DualBatchScalar<4, 4, 1>, 4);
225
226 // Run the tests:
227 f64::run();
228 #[cfg(feature = "simd")]
229 BatchScalarF64::<2>::run();
230 #[cfg(feature = "simd")]
231 BatchScalarF64::<4>::run();
232}