Skip to main content

apex_camera_models/
fov.rs

1//! Field-of-View (FOV) Camera Model
2//!
3//! A fisheye camera model using a field-of-view parameter for radial distortion.
4//!
5//! # Mathematical Model
6//!
7//! ## Projection (3D → 2D)
8//!
9//! For a 3D point p = (x, y, z) in camera coordinates:
10//!
11//! ```text
12//! r = √(x² + y²)
13//! atan_wrd = atan2(2·tan(w/2)·r, z)
14//! rd = atan_wrd / (r·w)    (if r > 0)
15//! rd = 2·tan(w/2) / w       (if r ≈ 0)
16//!
17//! mx = x · rd
18//! my = y · rd
19//! u = fx · mx + cx
20//! v = fy · my + cy
21//! ```
22//!
23//! where w is the field-of-view parameter (0 < w ≤ π).
24//!
25//! ## Unprojection (2D → 3D)
26//!
27//! Uses trigonometric inverse with special handling near optical axis.
28//!
29//! # Parameters
30//!
31//! - **Intrinsics**: fx, fy, cx, cy
32//! - **Distortion**: w (field-of-view parameter) (5 parameters total)
33//!
34//! # Use Cases
35//!
36//! - Fisheye cameras in SLAM applications
37//! - Wide field-of-view lenses
38//!
39//! # References
40//!
41//! - Zhang et al., "Simultaneous Localization and Mapping with Fisheye Cameras"
42//!   https://arxiv.org/pdf/1807.08957
43
44use crate::{CameraModel, CameraModelError, DistortionModel, PinholeParams};
45use nalgebra::{DVector, SMatrix, Vector2, Vector3};
46
47/// FOV camera model with 5 parameters.
48#[derive(Debug, Clone, Copy, PartialEq)]
49pub struct FovCamera {
50    pub pinhole: PinholeParams,
51    pub distortion: DistortionModel,
52}
53
54impl FovCamera {
55    /// Create a new Field-of-View (FOV) camera.
56    ///
57    /// # Arguments
58    ///
59    /// * `pinhole` - Pinhole parameters (fx, fy, cx, cy).
60    /// * `distortion` - MUST be [`DistortionModel::FOV`] with parameter `w`.
61    ///
62    /// # Returns
63    ///
64    /// Returns a new `FovCamera` instance if the distortion model matches.
65    ///
66    /// # Errors
67    ///
68    /// Returns [`CameraModelError::InvalidParams`] if `distortion` is not [`DistortionModel::FOV`].
69    pub fn new(
70        pinhole: PinholeParams,
71        distortion: DistortionModel,
72    ) -> Result<Self, CameraModelError> {
73        let camera = Self {
74            pinhole,
75            distortion,
76        };
77        camera.validate_params()?;
78        Ok(camera)
79    }
80
81    /// Helper method to extract distortion parameter.
82    ///
83    /// # Returns
84    ///
85    /// Returns the `w` parameter for FOV.
86    /// If the distortion model is incorrect (which shouldn't happen for valid instances), returns `0.0`.
87    fn distortion_params(&self) -> f64 {
88        match self.distortion {
89            DistortionModel::FOV { w } => w,
90            _ => 0.0,
91        }
92    }
93
94    /// Performs linear estimation to initialize the w parameter from point correspondences.
95    ///
96    /// This method estimates the `w` parameter using a linear least squares approach
97    /// given 3D-2D point correspondences. It assumes the intrinsic parameters (fx, fy, cx, cy)
98    /// are already set.
99    ///
100    /// # Arguments
101    ///
102    /// * `points_3d`: Matrix3xX<f64> - 3D points in camera coordinates (each column is a point)
103    /// * `points_2d`: Matrix2xX<f64> - Corresponding 2D points in image coordinates
104    ///
105    /// # Returns
106    ///
107    /// Returns `Ok(())` on success or a `CameraModelError` if the estimation fails.
108    pub fn linear_estimation(
109        &mut self,
110        points_3d: &nalgebra::Matrix3xX<f64>,
111        points_2d: &nalgebra::Matrix2xX<f64>,
112    ) -> Result<(), CameraModelError> {
113        // Check if the number of 2D and 3D points match
114        if points_2d.ncols() != points_3d.ncols() {
115            return Err(CameraModelError::InvalidParams(
116                "Number of 2D and 3D points must match".to_string(),
117            ));
118        }
119
120        let num_points = points_2d.ncols();
121
122        if num_points < 2 {
123            return Err(CameraModelError::InvalidParams(
124                "Need at least 2 point correspondences for linear estimation".to_string(),
125            ));
126        }
127
128        let mut best_w = 1.0;
129        let mut best_error = f64::INFINITY;
130
131        for w_test in (10..300).map(|i| i as f64 / 100.0) {
132            let mut error_sum = 0.0;
133            let mut valid_count = 0;
134
135            for i in 0..num_points {
136                let x = points_3d[(0, i)];
137                let y = points_3d[(1, i)];
138                let z = points_3d[(2, i)];
139                let u_observed = points_2d[(0, i)];
140                let v_observed = points_2d[(1, i)];
141
142                let r2 = x * x + y * y;
143                let r = r2.sqrt();
144
145                let tan_w_half = (w_test / 2.0).tan();
146                let atan_wrd = (2.0 * tan_w_half * r).atan2(z);
147
148                let eps_sqrt = f64::EPSILON.sqrt();
149                let rd = if r2 < eps_sqrt {
150                    2.0 * tan_w_half / w_test
151                } else {
152                    atan_wrd / (r * w_test)
153                };
154
155                let mx = x * rd;
156                let my = y * rd;
157
158                let u_predicted = self.pinhole.fx * mx + self.pinhole.cx;
159                let v_predicted = self.pinhole.fy * my + self.pinhole.cy;
160
161                let error = ((u_predicted - u_observed).powi(2)
162                    + (v_predicted - v_observed).powi(2))
163                .sqrt();
164
165                if error.is_finite() {
166                    error_sum += error;
167                    valid_count += 1;
168                }
169            }
170
171            if valid_count > 0 {
172                let avg_error = error_sum / valid_count as f64;
173                if avg_error < best_error {
174                    best_error = avg_error;
175                    best_w = w_test;
176                }
177            }
178        }
179
180        self.distortion = DistortionModel::FOV { w: best_w };
181
182        // Validate parameters
183        self.validate_params()?;
184
185        Ok(())
186    }
187}
188
189/// Convert camera to dynamic vector of intrinsic parameters.
190///
191/// # Layout
192///
193/// The parameters are ordered as: [fx, fy, cx, cy, w]
194impl From<&FovCamera> for DVector<f64> {
195    fn from(camera: &FovCamera) -> Self {
196        let w = camera.distortion_params();
197        DVector::from_vec(vec![
198            camera.pinhole.fx,
199            camera.pinhole.fy,
200            camera.pinhole.cx,
201            camera.pinhole.cy,
202            w,
203        ])
204    }
205}
206
207/// Convert camera to fixed-size array of intrinsic parameters.
208///
209/// # Layout
210///
211/// The parameters are ordered as: [fx, fy, cx, cy, w]
212impl From<&FovCamera> for [f64; 5] {
213    fn from(camera: &FovCamera) -> Self {
214        let w = camera.distortion_params();
215        [
216            camera.pinhole.fx,
217            camera.pinhole.fy,
218            camera.pinhole.cx,
219            camera.pinhole.cy,
220            w,
221        ]
222    }
223}
224
225/// Create camera from slice of intrinsic parameters.
226///
227/// # Layout
228///
229/// Expected parameter order: [fx, fy, cx, cy, w]
230///
231/// # Panics
232///
233/// Panics if the slice has fewer than 5 elements.
234impl TryFrom<&[f64]> for FovCamera {
235    type Error = CameraModelError;
236
237    fn try_from(params: &[f64]) -> Result<Self, Self::Error> {
238        if params.len() < 5 {
239            return Err(CameraModelError::InvalidParams(format!(
240                "FovCamera requires at least 5 parameters, got {}",
241                params.len()
242            )));
243        }
244        Ok(Self {
245            pinhole: PinholeParams {
246                fx: params[0],
247                fy: params[1],
248                cx: params[2],
249                cy: params[3],
250            },
251            distortion: DistortionModel::FOV { w: params[4] },
252        })
253    }
254}
255
256/// Create camera from fixed-size array of intrinsic parameters.
257///
258/// # Layout
259///
260/// Expected parameter order: [fx, fy, cx, cy, w]
261impl From<[f64; 5]> for FovCamera {
262    fn from(params: [f64; 5]) -> Self {
263        Self {
264            pinhole: PinholeParams {
265                fx: params[0],
266                fy: params[1],
267                cx: params[2],
268                cy: params[3],
269            },
270            distortion: DistortionModel::FOV { w: params[4] },
271        }
272    }
273}
274
275/// Creates a `FovCamera` from a parameter slice with validation.
276///
277/// Unlike `From<&[f64]>`, this constructor validates all parameters
278/// and returns a `Result` instead of panicking on invalid input.
279///
280/// # Errors
281///
282/// Returns `CameraModelError::InvalidParams` if fewer than 5 parameters are provided.
283/// Returns validation errors if focal lengths are non-positive or w is out of range.
284pub fn try_from_params(params: &[f64]) -> Result<FovCamera, CameraModelError> {
285    let camera = FovCamera::try_from(params)?;
286    camera.validate_params()?;
287    Ok(camera)
288}
289
290impl CameraModel for FovCamera {
291    const INTRINSIC_DIM: usize = 5;
292    type IntrinsicJacobian = SMatrix<f64, 2, 5>;
293    type PointJacobian = SMatrix<f64, 2, 3>;
294
295    /// Projects a 3D point to 2D image coordinates.
296    ///
297    /// # Mathematical Formula
298    ///
299    /// Uses atan-based radial distortion with FOV parameter w.
300    ///
301    /// # Arguments
302    ///
303    /// * `p_cam` - 3D point in camera coordinate frame.
304    ///
305    /// # Returns
306    ///
307    /// - `Ok(uv)` - 2D image coordinates if valid.
308    ///
309    /// # Errors
310    ///
311    /// Returns [`CameraModelError::ProjectionOutOfBounds`] if `z` is too small.
312    fn project(&self, p_cam: &Vector3<f64>) -> Result<Vector2<f64>, CameraModelError> {
313        let x = p_cam[0];
314        let y = p_cam[1];
315        let z = p_cam[2];
316
317        if z < f64::EPSILON.sqrt() {
318            return Err(CameraModelError::ProjectionOutOfBounds);
319        }
320
321        let r = (x * x + y * y).sqrt();
322        let w = self.distortion_params();
323        let tan_w_2 = (w / 2.0).tan();
324        let mul2tanwby2 = tan_w_2 * 2.0;
325
326        let rd = if r > crate::GEOMETRIC_PRECISION {
327            let atan_wrd = (mul2tanwby2 * r / z).atan();
328            atan_wrd / (r * w)
329        } else {
330            mul2tanwby2 / w
331        };
332
333        let mx = x * rd;
334        let my = y * rd;
335
336        Ok(Vector2::new(
337            self.pinhole.fx * mx + self.pinhole.cx,
338            self.pinhole.fy * my + self.pinhole.cy,
339        ))
340    }
341
342    /// Unprojects a 2D image point to a 3D ray.
343    ///
344    /// # Algorithm
345    ///
346    /// Trigonometric inverse using sin/cos relationships.
347    ///
348    /// # Arguments
349    ///
350    /// * `point_2d` - 2D point in image coordinates.
351    ///
352    /// # Returns
353    ///
354    /// - `Ok(ray)` - Normalized 3D ray direction.
355    ///
356    /// # Errors
357    ///
358    /// This model does not explicitly fail unprojection unless internal math errors occur, in which case it propagates them.
359    fn unproject(&self, point_2d: &Vector2<f64>) -> Result<Vector3<f64>, CameraModelError> {
360        let u = point_2d.x;
361        let v = point_2d.y;
362
363        let w = self.distortion_params();
364        let tan_w_2 = (w / 2.0).tan();
365        let mul2tanwby2 = tan_w_2 * 2.0;
366
367        let mx = (u - self.pinhole.cx) / self.pinhole.fx;
368        let my = (v - self.pinhole.cy) / self.pinhole.fy;
369
370        let r2 = mx * mx + my * my;
371        let rd = r2.sqrt();
372
373        if rd < crate::GEOMETRIC_PRECISION {
374            return Ok(Vector3::new(0.0, 0.0, 1.0));
375        }
376
377        let ru = (rd * w).tan() / mul2tanwby2;
378
379        let norm_factor = (1.0 + ru * ru).sqrt();
380        let x = mx * ru / (rd * norm_factor);
381        let y = my * ru / (rd * norm_factor);
382        let z = 1.0 / norm_factor;
383
384        Ok(Vector3::new(x, y, z))
385    }
386
387    /// Jacobian of projection w.r.t. 3D point coordinates (2×3).
388    ///
389    /// # Mathematical Derivation
390    ///
391    /// For the FOV camera model, projection is defined as:
392    ///
393    /// ```text
394    /// r = √(x² + y²)
395    /// α = 2·tan(w/2)·r / z
396    /// atan_wrd = atan(α)
397    /// rd = atan_wrd / (r·w)    (if r > 0)
398    /// rd = 2·tan(w/2) / w       (if r ≈ 0)
399    ///
400    /// mx = x · rd
401    /// my = y · rd
402    /// u = fx · mx + cx
403    /// v = fy · my + cy
404    /// ```
405    ///
406    /// ## Jacobian Structure
407    ///
408    /// Computing ∂u/∂p and ∂v/∂p where p = (x, y, z):
409    ///
410    /// ```text
411    /// J_point = [ ∂u/∂x  ∂u/∂y  ∂u/∂z ]
412    ///           [ ∂v/∂x  ∂v/∂y  ∂v/∂z ]
413    /// ```
414    ///
415    /// Chain rule for u = fx · x · rd + cx and v = fy · y · rd + cy:
416    ///
417    /// ```text
418    /// ∂u/∂x = fx · ∂(x·rd)/∂x = fx · (rd + x · ∂rd/∂x)
419    /// ∂u/∂y = fx · ∂(x·rd)/∂y = fx · x · ∂rd/∂y
420    /// ∂u/∂z = fx · ∂(x·rd)/∂z = fx · x · ∂rd/∂z
421    ///
422    /// ∂v/∂x = fy · ∂(y·rd)/∂x = fy · y · ∂rd/∂x
423    /// ∂v/∂y = fy · ∂(y·rd)/∂y = fy · (rd + y · ∂rd/∂y)
424    /// ∂v/∂z = fy · ∂(y·rd)/∂z = fy · y · ∂rd/∂z
425    /// ```
426    ///
427    /// Computing ∂rd/∂x, ∂rd/∂y, ∂rd/∂z for r > 0 (rd = atan(α) / (r·w), α = 2·tan(w/2)·r / z):
428    ///
429    /// ```text
430    /// ∂rd/∂r = [∂atan/∂α · ∂α/∂r · r·w - atan(α) · w] / (r·w)²
431    ///        = [1/(1+α²) · 2·tan(w/2)/z · r·w - atan(α) · w] / (r·w)²
432    ///
433    /// ∂rd/∂z = ∂atan/∂α · ∂α/∂z / (r·w)
434    ///        = 1/(1+α²) · (-2·tan(w/2)·r/z²) / (r·w)
435    /// ```
436    ///
437    /// Then using ∂r/∂x = x/r and ∂r/∂y = y/r:
438    ///
439    /// ```text
440    /// ∂rd/∂x = ∂rd/∂r · ∂r/∂x = ∂rd/∂r · x/r
441    /// ∂rd/∂y = ∂rd/∂r · ∂r/∂y = ∂rd/∂r · y/r
442    /// ∂rd/∂z = (computed directly above)
443    /// ```
444    ///
445    /// Near optical axis (r < ε): rd = 2·tan(w/2) / w is constant, so ∂rd/∂x = ∂rd/∂y = ∂rd/∂z = 0:
446    /// ```text
447    /// J_point = [ fx·rd  0      0  ]
448    ///           [ 0      fy·rd  0  ]
449    /// ```
450    ///
451    /// # Arguments
452    ///
453    /// * `p_cam` - 3D point in camera coordinate frame.
454    ///
455    /// # Returns
456    ///
457    /// Returns the 2x3 Jacobian matrix.
458    ///
459    /// # References
460    ///
461    /// - Devernay & Faugeras, "Straight lines have to be straight", Machine Vision and Applications 2001
462    /// - Zhang et al., "Fisheye Camera Calibration Using Principal Point Constraints", PAMI 2012
463    ///
464    /// # Numerical Verification
465    ///
466    /// This analytical Jacobian is verified against numerical differentiation in
467    /// `test_jacobian_point_numerical()` with tolerance < 1e-6.
468    fn jacobian_point(&self, p_cam: &Vector3<f64>) -> Self::PointJacobian {
469        let x = p_cam[0];
470        let y = p_cam[1];
471        let z = p_cam[2];
472
473        let r = (x * x + y * y).sqrt();
474        let w = self.distortion_params();
475        let tan_w_2 = (w / 2.0).tan();
476        let mul2tanwby2 = tan_w_2 * 2.0;
477
478        if r < crate::GEOMETRIC_PRECISION {
479            let rd = mul2tanwby2 / w;
480            return SMatrix::<f64, 2, 3>::new(
481                self.pinhole.fx * rd,
482                0.0,
483                0.0,
484                0.0,
485                self.pinhole.fy * rd,
486                0.0,
487            );
488        }
489
490        let atan_wrd = (mul2tanwby2 * r / z).atan();
491        let rd = atan_wrd / (r * w);
492
493        // Derivatives
494        let datan_dr = mul2tanwby2 * z / (z * z + mul2tanwby2 * mul2tanwby2 * r * r);
495        let datan_dz = -mul2tanwby2 * r / (z * z + mul2tanwby2 * mul2tanwby2 * r * r);
496
497        let drd_dr = (datan_dr * r - atan_wrd) / (r * r * w);
498        let drd_dz = datan_dz / (r * w);
499
500        let dr_dx = x / r;
501        let dr_dy = y / r;
502
503        let dmx_dx = rd + x * drd_dr * dr_dx;
504        let dmx_dy = x * drd_dr * dr_dy;
505        let dmx_dz = x * drd_dz;
506
507        let dmy_dx = y * drd_dr * dr_dx;
508        let dmy_dy = rd + y * drd_dr * dr_dy;
509        let dmy_dz = y * drd_dz;
510
511        SMatrix::<f64, 2, 3>::new(
512            self.pinhole.fx * dmx_dx,
513            self.pinhole.fx * dmx_dy,
514            self.pinhole.fx * dmx_dz,
515            self.pinhole.fy * dmy_dx,
516            self.pinhole.fy * dmy_dy,
517            self.pinhole.fy * dmy_dz,
518        )
519    }
520
521    /// Jacobian of projection w.r.t. intrinsic parameters (2×5).
522    ///
523    /// # Mathematical Derivation
524    ///
525    /// The FOV camera has 5 intrinsic parameters: [fx, fy, cx, cy, w]
526    ///
527    /// ## Projection Equations
528    ///
529    /// ```text
530    /// u = fx · mx + cx
531    /// v = fy · my + cy
532    /// ```
533    ///
534    /// where mx = x · rd and my = y · rd, with:
535    ///
536    /// ```text
537    /// rd = atan(2·tan(w/2)·r/z) / (r·w)  (for r > 0)
538    /// rd = 2·tan(w/2) / w                 (for r ≈ 0)
539    /// ```
540    ///
541    /// ## Jacobian Structure
542    ///
543    /// Intrinsic Jacobian (2×5):
544    /// ```text
545    /// J = [ ∂u/∂fx  ∂u/∂fy  ∂u/∂cx  ∂u/∂cy  ∂u/∂w ]
546    ///     [ ∂v/∂fx  ∂v/∂fy  ∂v/∂cx  ∂v/∂cy  ∂v/∂w ]
547    /// ```
548    ///
549    /// ## Linear Parameters (fx, fy, cx, cy)
550    ///
551    /// These appear linearly in the projection equations:
552    ///
553    /// ```text
554    /// ∂u/∂fx = mx,     ∂u/∂fy = 0,      ∂u/∂cx = 1,      ∂u/∂cy = 0
555    /// ∂v/∂fx = 0,      ∂v/∂fy = my,     ∂v/∂cx = 0,      ∂v/∂cy = 1
556    /// ```
557    ///
558    /// ## Distortion Parameter (w)
559    ///
560    /// The parameter w affects the distortion factor rd. We need ∂rd/∂w.
561    ///
562    /// ### Case 1: r > 0 (Non-Optical Axis)
563    ///
564    /// Starting from:
565    ///
566    /// ```text
567    /// α = 2·tan(w/2)·r / z
568    /// rd = atan(α) / (r·w)
569    /// ```
570    ///
571    /// Taking derivatives:
572    ///
573    /// ```text
574    /// ∂α/∂w = 2·sec²(w/2)·(1/2)·r/z = sec²(w/2)·r/z
575    /// ```
576    ///
577    /// where sec²(w/2) = 1 + tan²(w/2).
578    ///
579    /// Using the quotient rule for rd = atan(α) / (r·w):
580    ///
581    /// ```text
582    /// ∂rd/∂w = [∂atan(α)/∂w · r·w - atan(α) · r] / (r·w)²
583    ///        = [1/(1+α²) · ∂α/∂w · r·w - atan(α) · r] / (r·w)²
584    ///        = [sec²(w/2)·r²·w/z·(1/(1+α²)) - atan(α)·r] / (r²·w²)
585    /// ```
586    ///
587    /// Simplifying:
588    ///
589    /// ```text
590    /// ∂rd/∂w = [∂atan(α)/∂α · ∂α/∂w · r·w - atan(α)·r] / (r·w)²
591    /// ```
592    ///
593    /// ### Case 2: r ≈ 0 (Near Optical Axis)
594    ///
595    /// When r ≈ 0, we use rd = 2·tan(w/2) / w.
596    ///
597    /// Using the quotient rule:
598    ///
599    /// ```text
600    /// ∂rd/∂w = [2·sec²(w/2)·(1/2)·w - 2·tan(w/2)] / w²
601    ///        = [sec²(w/2)·w - 2·tan(w/2)] / w²
602    /// ```
603    ///
604    /// ## Final Jacobian w.r.t. w
605    ///
606    /// Once we have ∂rd/∂w, we compute:
607    ///
608    /// ```text
609    /// ∂u/∂w = fx · ∂(x·rd)/∂w = fx · x · ∂rd/∂w
610    /// ∂v/∂w = fy · ∂(y·rd)/∂w = fy · y · ∂rd/∂w
611    /// ```
612    ///
613    /// ## Matrix Form
614    ///
615    /// The complete Jacobian matrix is:
616    ///
617    /// ```text
618    /// J = [ mx   0    1    0    fx·x·∂rd/∂w ]
619    ///     [  0  my    0    1    fy·y·∂rd/∂w ]
620    /// ```
621    ///
622    /// where mx = x·rd and my = y·rd.
623    ///
624    /// # Arguments
625    ///
626    /// * `p_cam` - 3D point in camera coordinate frame.
627    ///
628    /// # Returns
629    ///
630    /// Returns the 2x5 Intrinsic Jacobian matrix.
631    ///
632    /// # References
633    ///
634    /// - Devernay & Faugeras, "Straight lines have to be straight", Machine Vision and Applications 2001
635    /// - Hughes et al., "Rolling Shutter Motion Deblurring", CVPR 2010 (uses FOV model)
636    ///
637    /// # Numerical Verification
638    ///
639    /// This analytical Jacobian is verified against numerical differentiation in
640    /// `test_jacobian_intrinsics_numerical()` with tolerance < 1e-4.
641    ///
642    /// # Notes
643    ///
644    /// The FOV parameter w controls the field of view angle. Typical values range from
645    /// 0.5 (narrow FOV) to π (hemispheric fisheye). The derivative ∂rd/∂w captures how
646    /// changes in the FOV parameter affect the radial distortion mapping.
647    fn jacobian_intrinsics(&self, p_cam: &Vector3<f64>) -> Self::IntrinsicJacobian {
648        let x = p_cam[0];
649        let y = p_cam[1];
650        let z = p_cam[2];
651
652        let r = (x * x + y * y).sqrt();
653        let w = self.distortion_params();
654        let tan_w_2 = (w / 2.0).tan();
655        let mul2tanwby2 = tan_w_2 * 2.0;
656
657        let rd = if r > crate::GEOMETRIC_PRECISION {
658            let atan_wrd = (mul2tanwby2 * r / z).atan();
659            atan_wrd / (r * w)
660        } else {
661            mul2tanwby2 / w
662        };
663
664        let mx = x * rd;
665        let my = y * rd;
666
667        // ∂u/∂fx = mx, ∂u/∂fy = 0, ∂u/∂cx = 1, ∂u/∂cy = 0
668        // ∂v/∂fx = 0, ∂v/∂fy = my, ∂v/∂cx = 0, ∂v/∂cy = 1
669
670        // For w derivative: ∂rd/∂w
671        let drd_dw = if r > crate::GEOMETRIC_PRECISION {
672            let tan_w_2 = (w / 2.0).tan();
673            let alpha = 2.0 * tan_w_2 * r / z;
674            let atan_alpha = alpha.atan();
675
676            // sec²(w/2) = 1 + tan²(w/2)
677            let sec2_w_2 = 1.0 + tan_w_2 * tan_w_2;
678            let dalpha_dw = sec2_w_2 * r / z;
679
680            // ∂rd/∂w = [1/(1+α²) · ∂α/∂w · r·w - atan(α) · r] / (r·w)²
681            let datan_dw = dalpha_dw / (1.0 + alpha * alpha);
682            (datan_dw * r * w - atan_alpha * r) / (r * r * w * w)
683        } else {
684            let tan_w_2 = (w / 2.0).tan();
685            let sec2_w_2 = 1.0 + tan_w_2 * tan_w_2;
686            // rd = 2·tan(w/2) / w
687            // ∂rd/∂w = [2·sec²(w/2)/2 · w - 2·tan(w/2)] / w²
688            //        = [sec²(w/2) · w - 2·tan(w/2)] / w²
689            (sec2_w_2 * w - 2.0 * tan_w_2) / (w * w)
690        };
691
692        let du_dw = self.pinhole.fx * x * drd_dw;
693        let dv_dw = self.pinhole.fy * y * drd_dw;
694
695        SMatrix::<f64, 2, 5>::new(mx, 0.0, 1.0, 0.0, du_dw, 0.0, my, 0.0, 1.0, dv_dw)
696    }
697
698    /// Validates camera parameters.
699    ///
700    /// # Validation Rules
701    ///
702    /// - fx, fy must be positive (> 0)
703    /// - fx, fy must be finite
704    /// - cx, cy must be finite
705    /// - w must be in (0, π]
706    ///
707    /// # Errors
708    ///
709    /// Returns [`CameraModelError`] if any parameter violates validation rules.
710    fn validate_params(&self) -> Result<(), CameraModelError> {
711        self.pinhole.validate()?;
712        self.get_distortion().validate()
713    }
714
715    /// Returns the pinhole parameters of the camera.
716    ///
717    /// # Returns
718    ///
719    /// A [`PinholeParams`] struct containing the focal lengths (fx, fy) and principal point (cx, cy).
720    fn get_pinhole_params(&self) -> PinholeParams {
721        PinholeParams {
722            fx: self.pinhole.fx,
723            fy: self.pinhole.fy,
724            cx: self.pinhole.cx,
725            cy: self.pinhole.cy,
726        }
727    }
728
729    /// Returns the distortion model and parameters of the camera.
730    ///
731    /// # Returns
732    ///
733    /// The [`DistortionModel`] associated with this camera (typically [`DistortionModel::FOV`]).
734    fn get_distortion(&self) -> DistortionModel {
735        self.distortion
736    }
737
738    /// Returns the string identifier for the camera model.
739    ///
740    /// # Returns
741    ///
742    /// The string `"fov"`.
743    fn get_model_name(&self) -> &'static str {
744        "fov"
745    }
746}
747
748#[cfg(test)]
749mod tests {
750    use super::*;
751    use nalgebra::{Matrix2xX, Matrix3xX};
752
753    type TestResult = Result<(), Box<dyn std::error::Error>>;
754
755    #[test]
756    fn test_fov_camera_creation() -> TestResult {
757        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
758        let distortion = DistortionModel::FOV { w: 1.5 };
759        let camera = FovCamera::new(pinhole, distortion)?;
760
761        assert_eq!(camera.pinhole.fx, 300.0);
762        assert_eq!(camera.distortion_params(), 1.5);
763        Ok(())
764    }
765
766    #[test]
767    fn test_projection_at_optical_axis() -> TestResult {
768        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
769        let distortion = DistortionModel::FOV { w: 1.5 };
770        let camera = FovCamera::new(pinhole, distortion)?;
771
772        let p_cam = Vector3::new(0.0, 0.0, 1.0);
773        let uv = camera.project(&p_cam)?;
774
775        assert!((uv.x - 320.0).abs() < 1e-4);
776        assert!((uv.y - 240.0).abs() < 1e-4);
777
778        Ok(())
779    }
780
781    #[test]
782    fn test_jacobian_point_numerical() -> TestResult {
783        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
784        let distortion = DistortionModel::FOV { w: 1.5 };
785        let camera = FovCamera::new(pinhole, distortion)?;
786
787        let p_cam = Vector3::new(0.1, 0.2, 1.0);
788
789        let jac_analytical = camera.jacobian_point(&p_cam);
790        let eps = crate::NUMERICAL_DERIVATIVE_EPS;
791
792        for i in 0..3 {
793            let mut p_plus = p_cam;
794            let mut p_minus = p_cam;
795            p_plus[i] += eps;
796            p_minus[i] -= eps;
797
798            let uv_plus = camera.project(&p_plus)?;
799            let uv_minus = camera.project(&p_minus)?;
800            let num_jac = (uv_plus - uv_minus) / (2.0 * eps);
801
802            for r in 0..2 {
803                assert!(
804                    jac_analytical[(r, i)].is_finite(),
805                    "Jacobian [{r},{i}] is not finite"
806                );
807                let diff = (jac_analytical[(r, i)] - num_jac[r]).abs();
808                assert!(
809                    diff < crate::JACOBIAN_TEST_TOLERANCE,
810                    "Mismatch at ({}, {})",
811                    r,
812                    i
813                );
814            }
815        }
816        Ok(())
817    }
818
819    #[test]
820    fn test_jacobian_intrinsics_numerical() -> TestResult {
821        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
822        let distortion = DistortionModel::FOV { w: 1.5 };
823        let camera = FovCamera::new(pinhole, distortion)?;
824
825        let p_cam = Vector3::new(0.1, 0.2, 1.0);
826
827        let jac_analytical = camera.jacobian_intrinsics(&p_cam);
828        let params: DVector<f64> = (&camera).into();
829        let eps = crate::NUMERICAL_DERIVATIVE_EPS;
830
831        for i in 0..5 {
832            let mut params_plus = params.clone();
833            let mut params_minus = params.clone();
834            params_plus[i] += eps;
835            params_minus[i] -= eps;
836
837            let cam_plus = FovCamera::try_from(params_plus.as_slice())?;
838            let cam_minus = FovCamera::try_from(params_minus.as_slice())?;
839
840            let uv_plus = cam_plus.project(&p_cam)?;
841            let uv_minus = cam_minus.project(&p_cam)?;
842            let num_jac = (uv_plus - uv_minus) / (2.0 * eps);
843
844            for r in 0..2 {
845                assert!(
846                    jac_analytical[(r, i)].is_finite(),
847                    "Jacobian [{r},{i}] is not finite"
848                );
849                let diff = (jac_analytical[(r, i)] - num_jac[r]).abs();
850                assert!(diff < 1e-4, "Mismatch at ({}, {})", r, i);
851            }
852        }
853        Ok(())
854    }
855
856    #[test]
857    fn test_fov_from_into_traits() -> TestResult {
858        let pinhole = PinholeParams::new(400.0, 410.0, 320.0, 240.0)?;
859        let distortion = DistortionModel::FOV { w: 1.8 };
860        let camera = FovCamera::new(pinhole, distortion)?;
861
862        // Test conversion to DVector
863        let params: DVector<f64> = (&camera).into();
864        assert_eq!(params.len(), 5);
865        assert_eq!(params[0], 400.0);
866        assert_eq!(params[1], 410.0);
867        assert_eq!(params[2], 320.0);
868        assert_eq!(params[3], 240.0);
869        assert_eq!(params[4], 1.8);
870
871        // Test conversion to array
872        let arr: [f64; 5] = (&camera).into();
873        assert_eq!(arr, [400.0, 410.0, 320.0, 240.0, 1.8]);
874
875        // Test conversion from slice
876        let params_slice = [450.0, 460.0, 330.0, 250.0, 2.0];
877        let camera2 = FovCamera::try_from(&params_slice[..])?;
878        assert_eq!(camera2.pinhole.fx, 450.0);
879        assert_eq!(camera2.pinhole.fy, 460.0);
880        assert_eq!(camera2.pinhole.cx, 330.0);
881        assert_eq!(camera2.pinhole.cy, 250.0);
882        assert_eq!(camera2.distortion_params(), 2.0);
883
884        // Test conversion from array
885        let camera3 = FovCamera::from([500.0, 510.0, 340.0, 260.0, 2.5]);
886        assert_eq!(camera3.pinhole.fx, 500.0);
887        assert_eq!(camera3.pinhole.fy, 510.0);
888        assert_eq!(camera3.distortion_params(), 2.5);
889
890        Ok(())
891    }
892
893    #[test]
894    fn test_linear_estimation() -> TestResult {
895        // Ground truth FOV camera
896        let gt_pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
897        let gt_distortion = DistortionModel::FOV { w: 1.0 };
898        let gt_camera = FovCamera::new(gt_pinhole, gt_distortion)?;
899
900        // Generate synthetic 3D points in camera frame
901        let n_points = 50;
902        let mut pts_3d = Matrix3xX::zeros(n_points);
903        let mut pts_2d = Matrix2xX::zeros(n_points);
904        let mut valid = 0;
905
906        for i in 0..n_points {
907            let angle = i as f64 * 2.0 * std::f64::consts::PI / n_points as f64;
908            let r = 0.1 + 0.3 * (i as f64 / n_points as f64);
909            let p3d = Vector3::new(r * angle.cos(), r * angle.sin(), 1.0);
910
911            if let Ok(p2d) = gt_camera.project(&p3d) {
912                pts_3d.set_column(valid, &p3d);
913                pts_2d.set_column(valid, &p2d);
914                valid += 1;
915            }
916        }
917        let pts_3d = pts_3d.columns(0, valid).into_owned();
918        let pts_2d = pts_2d.columns(0, valid).into_owned();
919
920        // Initial camera with default w (grid search will find best)
921        let init_pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
922        let init_distortion = DistortionModel::FOV { w: 0.5 };
923        let mut camera = FovCamera::new(init_pinhole, init_distortion)?;
924
925        camera.linear_estimation(&pts_3d, &pts_2d)?;
926
927        // FOV uses grid search so tolerance is looser
928        for i in 0..valid {
929            let col = pts_3d.column(i);
930            let projected = camera.project(&Vector3::new(col[0], col[1], col[2]))?;
931            let err = ((projected.x - pts_2d[(0, i)]).powi(2)
932                + (projected.y - pts_2d[(1, i)]).powi(2))
933            .sqrt();
934            assert!(err < 5.0, "Reprojection error too large: {err}");
935        }
936
937        Ok(())
938    }
939
940    #[test]
941    fn test_project_unproject_round_trip() -> TestResult {
942        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
943        let distortion = DistortionModel::FOV { w: 1.5 };
944        let camera = FovCamera::new(pinhole, distortion)?;
945
946        let test_points = [
947            Vector3::new(0.1, 0.2, 1.0),
948            Vector3::new(-0.3, 0.1, 2.0),
949            Vector3::new(0.05, -0.1, 0.5),
950        ];
951
952        for p_cam in &test_points {
953            let uv = camera.project(p_cam)?;
954            let ray = camera.unproject(&uv)?;
955            let dot = ray.dot(&p_cam.normalize());
956            assert!(
957                (dot - 1.0).abs() < 1e-6,
958                "Round-trip failed: dot={dot}, expected ~1.0"
959            );
960        }
961
962        Ok(())
963    }
964
965    #[test]
966    fn test_project_returns_error_behind_camera() -> TestResult {
967        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
968        let distortion = DistortionModel::FOV { w: 1.5 };
969        let camera = FovCamera::new(pinhole, distortion)?;
970        assert!(camera.project(&Vector3::new(0.0, 0.0, -1.0)).is_err());
971        Ok(())
972    }
973
974    #[test]
975    fn test_project_at_min_depth_boundary() -> TestResult {
976        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
977        let distortion = DistortionModel::FOV { w: 1.5 };
978        let camera = FovCamera::new(pinhole, distortion)?;
979        let p_min = Vector3::new(0.0, 0.0, crate::MIN_DEPTH);
980        if let Ok(uv) = camera.project(&p_min) {
981            assert!(uv.x.is_finite() && uv.y.is_finite());
982        }
983        Ok(())
984    }
985
986    #[test]
987    fn test_projection_off_axis() -> TestResult {
988        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
989        let distortion = DistortionModel::FOV { w: 1.5 };
990        let camera = FovCamera::new(pinhole, distortion)?;
991        let p_cam = Vector3::new(0.3, 0.0, 1.0);
992        let uv = camera.project(&p_cam)?;
993        assert!(
994            uv.x > 320.0,
995            "off-axis point should project right of principal point"
996        );
997        assert!(
998            (uv.y - 240.0).abs() < 1.0,
999            "y should be close to cy for horizontal offset"
1000        );
1001        Ok(())
1002    }
1003
1004    #[test]
1005    fn test_unproject_center_pixel() -> TestResult {
1006        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
1007        let distortion = DistortionModel::FOV { w: 1.5 };
1008        let camera = FovCamera::new(pinhole, distortion)?;
1009        let uv = Vector2::new(320.0, 240.0);
1010        let ray = camera.unproject(&uv)?;
1011        assert!(ray.x.abs() < 1e-6, "x should be ~0, got {}", ray.x);
1012        assert!(ray.y.abs() < 1e-6, "y should be ~0, got {}", ray.y);
1013        assert!((ray.z - 1.0).abs() < 1e-6, "z should be ~1, got {}", ray.z);
1014        Ok(())
1015    }
1016
1017    #[test]
1018    fn test_batch_projection_matches_individual() -> TestResult {
1019        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
1020        let distortion = DistortionModel::FOV { w: 1.5 };
1021        let camera = FovCamera::new(pinhole, distortion)?;
1022        let pts = Matrix3xX::from_columns(&[
1023            Vector3::new(0.0, 0.0, 1.0),
1024            Vector3::new(0.3, 0.2, 1.5),
1025            Vector3::new(-0.4, 0.1, 2.0),
1026        ]);
1027        let batch = camera.project_batch(&pts);
1028        for i in 0..3 {
1029            let col = pts.column(i);
1030            let p = camera.project(&Vector3::new(col[0], col[1], col[2]))?;
1031            assert!(
1032                (batch[(0, i)] - p.x).abs() < 1e-10,
1033                "batch u mismatch at col {i}"
1034            );
1035            assert!(
1036                (batch[(1, i)] - p.y).abs() < 1e-10,
1037                "batch v mismatch at col {i}"
1038            );
1039        }
1040        Ok(())
1041    }
1042
1043    #[test]
1044    fn test_jacobian_dimensions() -> TestResult {
1045        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
1046        let distortion = DistortionModel::FOV { w: 1.5 };
1047        let camera = FovCamera::new(pinhole, distortion)?;
1048        let p_cam = Vector3::new(0.1, 0.2, 1.0);
1049        let jac_point = camera.jacobian_point(&p_cam);
1050        assert_eq!(jac_point.nrows(), 2);
1051        assert_eq!(jac_point.ncols(), 3);
1052        let jac_intr = camera.jacobian_intrinsics(&p_cam);
1053        assert_eq!(jac_intr.nrows(), 2);
1054        assert_eq!(jac_intr.ncols(), 5); // FovCamera::INTRINSIC_DIM = 5
1055        Ok(())
1056    }
1057}