Skip to main content

apex_camera_models/
fov.rs

1//! Field-of-View (FOV) Camera Model
2//!
3//! A fisheye camera model using a field-of-view parameter for radial distortion.
4//!
5//! # Mathematical Model
6//!
7//! ## Projection (3D → 2D)
8//!
9//! For a 3D point p = (x, y, z) in camera coordinates:
10//!
11//! ```text
12//! r = √(x² + y²)
13//! atan_wrd = atan2(2·tan(w/2)·r, z)
14//! rd = atan_wrd / (r·w)    (if r > 0)
15//! rd = 2·tan(w/2) / w       (if r ≈ 0)
16//!
17//! mx = x · rd
18//! my = y · rd
19//! u = fx · mx + cx
20//! v = fy · my + cy
21//! ```
22//!
23//! where w is the field-of-view parameter (0 < w ≤ π).
24//!
25//! ## Unprojection (2D → 3D)
26//!
27//! Uses trigonometric inverse with special handling near optical axis.
28//!
29//! # Parameters
30//!
31//! - **Intrinsics**: fx, fy, cx, cy
32//! - **Distortion**: w (field-of-view parameter) (5 parameters total)
33//!
34//! # Use Cases
35//!
36//! - Fisheye cameras in SLAM applications
37//! - Wide field-of-view lenses
38//!
39//! # References
40//!
41//! - Zhang et al., "Simultaneous Localization and Mapping with Fisheye Cameras"
42//!   https://arxiv.org/pdf/1807.08957
43
44use crate::{CameraModel, CameraModelError, DistortionModel, PinholeParams};
45use nalgebra::{DVector, SMatrix, Vector2, Vector3};
46
47/// FOV camera model with 5 parameters.
48#[derive(Debug, Clone, Copy, PartialEq)]
49pub struct FovCamera {
50    pub pinhole: PinholeParams,
51    pub distortion: DistortionModel,
52}
53
54impl FovCamera {
55    /// Create a new Field-of-View (FOV) camera.
56    ///
57    /// # Arguments
58    ///
59    /// * `pinhole` - Pinhole parameters (fx, fy, cx, cy).
60    /// * `distortion` - MUST be [`DistortionModel::FOV`] with parameter `w`.
61    ///
62    /// # Returns
63    ///
64    /// Returns a new `FovCamera` instance if the distortion model matches.
65    ///
66    /// # Errors
67    ///
68    /// Returns [`CameraModelError::InvalidParams`] if `distortion` is not [`DistortionModel::FOV`].
69    pub fn new(
70        pinhole: PinholeParams,
71        distortion: DistortionModel,
72    ) -> Result<Self, CameraModelError> {
73        let camera = Self {
74            pinhole,
75            distortion,
76        };
77        camera.validate_params()?;
78        Ok(camera)
79    }
80
81    /// Helper method to extract distortion parameter.
82    ///
83    /// # Returns
84    ///
85    /// Returns the `w` parameter for FOV.
86    /// If the distortion model is incorrect (which shouldn't happen for valid instances), returns `0.0`.
87    fn distortion_params(&self) -> f64 {
88        match self.distortion {
89            DistortionModel::FOV { w } => w,
90            _ => 0.0,
91        }
92    }
93
94    /// Performs linear estimation to initialize the w parameter from point correspondences.
95    ///
96    /// This method estimates the `w` parameter using a linear least squares approach
97    /// given 3D-2D point correspondences. It assumes the intrinsic parameters (fx, fy, cx, cy)
98    /// are already set.
99    ///
100    /// # Arguments
101    ///
102    /// * `points_3d`: Matrix3xX<f64> - 3D points in camera coordinates (each column is a point)
103    /// * `points_2d`: Matrix2xX<f64> - Corresponding 2D points in image coordinates
104    ///
105    /// # Returns
106    ///
107    /// Returns `Ok(())` on success or a `CameraModelError` if the estimation fails.
108    pub fn linear_estimation(
109        &mut self,
110        points_3d: &nalgebra::Matrix3xX<f64>,
111        points_2d: &nalgebra::Matrix2xX<f64>,
112    ) -> Result<(), CameraModelError> {
113        // Check if the number of 2D and 3D points match
114        if points_2d.ncols() != points_3d.ncols() {
115            return Err(CameraModelError::InvalidParams(
116                "Number of 2D and 3D points must match".to_string(),
117            ));
118        }
119
120        let num_points = points_2d.ncols();
121
122        if num_points < 2 {
123            return Err(CameraModelError::InvalidParams(
124                "Need at least 2 point correspondences for linear estimation".to_string(),
125            ));
126        }
127
128        let mut best_w = 1.0;
129        let mut best_error = f64::INFINITY;
130
131        for w_test in (10..300).map(|i| i as f64 / 100.0) {
132            let mut error_sum = 0.0;
133            let mut valid_count = 0;
134
135            for i in 0..num_points {
136                let x = points_3d[(0, i)];
137                let y = points_3d[(1, i)];
138                let z = points_3d[(2, i)];
139                let u_observed = points_2d[(0, i)];
140                let v_observed = points_2d[(1, i)];
141
142                let r2 = x * x + y * y;
143                let r = r2.sqrt();
144
145                let tan_w_half = (w_test / 2.0).tan();
146                let atan_wrd = (2.0 * tan_w_half * r).atan2(z);
147
148                let eps_sqrt = f64::EPSILON.sqrt();
149                let rd = if r2 < eps_sqrt {
150                    2.0 * tan_w_half / w_test
151                } else {
152                    atan_wrd / (r * w_test)
153                };
154
155                let mx = x * rd;
156                let my = y * rd;
157
158                let u_predicted = self.pinhole.fx * mx + self.pinhole.cx;
159                let v_predicted = self.pinhole.fy * my + self.pinhole.cy;
160
161                let error = ((u_predicted - u_observed).powi(2)
162                    + (v_predicted - v_observed).powi(2))
163                .sqrt();
164
165                if error.is_finite() {
166                    error_sum += error;
167                    valid_count += 1;
168                }
169            }
170
171            if valid_count > 0 {
172                let avg_error = error_sum / valid_count as f64;
173                if avg_error < best_error {
174                    best_error = avg_error;
175                    best_w = w_test;
176                }
177            }
178        }
179
180        self.distortion = DistortionModel::FOV { w: best_w };
181
182        // Validate parameters
183        self.validate_params()?;
184
185        Ok(())
186    }
187}
188
189/// Convert camera to dynamic vector of intrinsic parameters.
190///
191/// # Layout
192///
193/// The parameters are ordered as: [fx, fy, cx, cy, w]
194impl From<&FovCamera> for DVector<f64> {
195    fn from(camera: &FovCamera) -> Self {
196        let w = camera.distortion_params();
197        DVector::from_vec(vec![
198            camera.pinhole.fx,
199            camera.pinhole.fy,
200            camera.pinhole.cx,
201            camera.pinhole.cy,
202            w,
203        ])
204    }
205}
206
207/// Convert camera to fixed-size array of intrinsic parameters.
208///
209/// # Layout
210///
211/// The parameters are ordered as: [fx, fy, cx, cy, w]
212impl From<&FovCamera> for [f64; 5] {
213    fn from(camera: &FovCamera) -> Self {
214        let w = camera.distortion_params();
215        [
216            camera.pinhole.fx,
217            camera.pinhole.fy,
218            camera.pinhole.cx,
219            camera.pinhole.cy,
220            w,
221        ]
222    }
223}
224
225/// Create camera from slice of intrinsic parameters.
226///
227/// # Layout
228///
229/// Expected parameter order: [fx, fy, cx, cy, w]
230///
231/// # Panics
232///
233/// Panics if the slice has fewer than 5 elements.
234impl From<&[f64]> for FovCamera {
235    fn from(params: &[f64]) -> Self {
236        assert!(
237            params.len() >= 5,
238            "FovCamera requires at least 5 parameters, got {}",
239            params.len()
240        );
241        Self {
242            pinhole: PinholeParams {
243                fx: params[0],
244                fy: params[1],
245                cx: params[2],
246                cy: params[3],
247            },
248            distortion: DistortionModel::FOV { w: params[4] },
249        }
250    }
251}
252
253/// Create camera from fixed-size array of intrinsic parameters.
254///
255/// # Layout
256///
257/// Expected parameter order: [fx, fy, cx, cy, w]
258impl From<[f64; 5]> for FovCamera {
259    fn from(params: [f64; 5]) -> Self {
260        Self {
261            pinhole: PinholeParams {
262                fx: params[0],
263                fy: params[1],
264                cx: params[2],
265                cy: params[3],
266            },
267            distortion: DistortionModel::FOV { w: params[4] },
268        }
269    }
270}
271
272/// Creates a `FovCamera` from a parameter slice with validation.
273///
274/// Unlike `From<&[f64]>`, this constructor validates all parameters
275/// and returns a `Result` instead of panicking on invalid input.
276///
277/// # Errors
278///
279/// Returns `CameraModelError::InvalidParams` if fewer than 5 parameters are provided.
280/// Returns validation errors if focal lengths are non-positive or w is out of range.
281pub fn try_from_params(params: &[f64]) -> Result<FovCamera, CameraModelError> {
282    if params.len() < 5 {
283        return Err(CameraModelError::InvalidParams(format!(
284            "FovCamera requires at least 5 parameters, got {}",
285            params.len()
286        )));
287    }
288    let camera = FovCamera::from(params);
289    camera.validate_params()?;
290    Ok(camera)
291}
292
293impl CameraModel for FovCamera {
294    const INTRINSIC_DIM: usize = 5;
295    type IntrinsicJacobian = SMatrix<f64, 2, 5>;
296    type PointJacobian = SMatrix<f64, 2, 3>;
297
298    /// Projects a 3D point to 2D image coordinates.
299    ///
300    /// # Mathematical Formula
301    ///
302    /// Uses atan-based radial distortion with FOV parameter w.
303    ///
304    /// # Arguments
305    ///
306    /// * `p_cam` - 3D point in camera coordinate frame.
307    ///
308    /// # Returns
309    ///
310    /// - `Ok(uv)` - 2D image coordinates if valid.
311    ///
312    /// # Errors
313    ///
314    /// Returns [`CameraModelError::ProjectionOutOfBounds`] if `z` is too small.
315    fn project(&self, p_cam: &Vector3<f64>) -> Result<Vector2<f64>, CameraModelError> {
316        let x = p_cam[0];
317        let y = p_cam[1];
318        let z = p_cam[2];
319
320        if z < f64::EPSILON.sqrt() {
321            return Err(CameraModelError::ProjectionOutOfBounds);
322        }
323
324        let r = (x * x + y * y).sqrt();
325        let w = self.distortion_params();
326        let tan_w_2 = (w / 2.0).tan();
327        let mul2tanwby2 = tan_w_2 * 2.0;
328
329        let rd = if r > crate::GEOMETRIC_PRECISION {
330            let atan_wrd = (mul2tanwby2 * r / z).atan();
331            atan_wrd / (r * w)
332        } else {
333            mul2tanwby2 / w
334        };
335
336        let mx = x * rd;
337        let my = y * rd;
338
339        Ok(Vector2::new(
340            self.pinhole.fx * mx + self.pinhole.cx,
341            self.pinhole.fy * my + self.pinhole.cy,
342        ))
343    }
344
345    /// Unprojects a 2D image point to a 3D ray.
346    ///
347    /// # Algorithm
348    ///
349    /// Trigonometric inverse using sin/cos relationships.
350    ///
351    /// # Arguments
352    ///
353    /// * `point_2d` - 2D point in image coordinates.
354    ///
355    /// # Returns
356    ///
357    /// - `Ok(ray)` - Normalized 3D ray direction.
358    ///
359    /// # Errors
360    ///
361    /// This model does not explicitly fail unprojection unless internal math errors occur, in which case it propagates them.
362    fn unproject(&self, point_2d: &Vector2<f64>) -> Result<Vector3<f64>, CameraModelError> {
363        let u = point_2d.x;
364        let v = point_2d.y;
365
366        let w = self.distortion_params();
367        let tan_w_2 = (w / 2.0).tan();
368        let mul2tanwby2 = tan_w_2 * 2.0;
369
370        let mx = (u - self.pinhole.cx) / self.pinhole.fx;
371        let my = (v - self.pinhole.cy) / self.pinhole.fy;
372
373        let r2 = mx * mx + my * my;
374        let rd = r2.sqrt();
375
376        if rd < crate::GEOMETRIC_PRECISION {
377            return Ok(Vector3::new(0.0, 0.0, 1.0));
378        }
379
380        let ru = (rd * w).tan() / mul2tanwby2;
381
382        let norm_factor = (1.0 + ru * ru).sqrt();
383        let x = mx * ru / (rd * norm_factor);
384        let y = my * ru / (rd * norm_factor);
385        let z = 1.0 / norm_factor;
386
387        Ok(Vector3::new(x, y, z))
388    }
389
390    /// Jacobian of projection w.r.t. 3D point coordinates (2×3).
391    ///
392    /// # Mathematical Derivation
393    ///
394    /// For the FOV camera model, projection is defined as:
395    ///
396    /// ```text
397    /// r = √(x² + y²)
398    /// α = 2·tan(w/2)·r / z
399    /// atan_wrd = atan(α)
400    /// rd = atan_wrd / (r·w)    (if r > 0)
401    /// rd = 2·tan(w/2) / w       (if r ≈ 0)
402    ///
403    /// mx = x · rd
404    /// my = y · rd
405    /// u = fx · mx + cx
406    /// v = fy · my + cy
407    /// ```
408    ///
409    /// ## Jacobian Structure
410    ///
411    /// Computing ∂u/∂p and ∂v/∂p where p = (x, y, z):
412    ///
413    /// ```text
414    /// J_point = [ ∂u/∂x  ∂u/∂y  ∂u/∂z ]
415    ///           [ ∂v/∂x  ∂v/∂y  ∂v/∂z ]
416    /// ```
417    ///
418    /// Chain rule for u = fx · x · rd + cx and v = fy · y · rd + cy:
419    ///
420    /// ```text
421    /// ∂u/∂x = fx · ∂(x·rd)/∂x = fx · (rd + x · ∂rd/∂x)
422    /// ∂u/∂y = fx · ∂(x·rd)/∂y = fx · x · ∂rd/∂y
423    /// ∂u/∂z = fx · ∂(x·rd)/∂z = fx · x · ∂rd/∂z
424    ///
425    /// ∂v/∂x = fy · ∂(y·rd)/∂x = fy · y · ∂rd/∂x
426    /// ∂v/∂y = fy · ∂(y·rd)/∂y = fy · (rd + y · ∂rd/∂y)
427    /// ∂v/∂z = fy · ∂(y·rd)/∂z = fy · y · ∂rd/∂z
428    /// ```
429    ///
430    /// Computing ∂rd/∂x, ∂rd/∂y, ∂rd/∂z for r > 0 (rd = atan(α) / (r·w), α = 2·tan(w/2)·r / z):
431    ///
432    /// ```text
433    /// ∂rd/∂r = [∂atan/∂α · ∂α/∂r · r·w - atan(α) · w] / (r·w)²
434    ///        = [1/(1+α²) · 2·tan(w/2)/z · r·w - atan(α) · w] / (r·w)²
435    ///
436    /// ∂rd/∂z = ∂atan/∂α · ∂α/∂z / (r·w)
437    ///        = 1/(1+α²) · (-2·tan(w/2)·r/z²) / (r·w)
438    /// ```
439    ///
440    /// Then using ∂r/∂x = x/r and ∂r/∂y = y/r:
441    ///
442    /// ```text
443    /// ∂rd/∂x = ∂rd/∂r · ∂r/∂x = ∂rd/∂r · x/r
444    /// ∂rd/∂y = ∂rd/∂r · ∂r/∂y = ∂rd/∂r · y/r
445    /// ∂rd/∂z = (computed directly above)
446    /// ```
447    ///
448    /// Near optical axis (r < ε): rd = 2·tan(w/2) / w is constant, so ∂rd/∂x = ∂rd/∂y = ∂rd/∂z = 0:
449    /// ```text
450    /// J_point = [ fx·rd  0      0  ]
451    ///           [ 0      fy·rd  0  ]
452    /// ```
453    ///
454    /// # Arguments
455    ///
456    /// * `p_cam` - 3D point in camera coordinate frame.
457    ///
458    /// # Returns
459    ///
460    /// Returns the 2x3 Jacobian matrix.
461    ///
462    /// # References
463    ///
464    /// - Devernay & Faugeras, "Straight lines have to be straight", Machine Vision and Applications 2001
465    /// - Zhang et al., "Fisheye Camera Calibration Using Principal Point Constraints", PAMI 2012
466    ///
467    /// # Numerical Verification
468    ///
469    /// This analytical Jacobian is verified against numerical differentiation in
470    /// `test_jacobian_point_numerical()` with tolerance < 1e-6.
471    fn jacobian_point(&self, p_cam: &Vector3<f64>) -> Self::PointJacobian {
472        let x = p_cam[0];
473        let y = p_cam[1];
474        let z = p_cam[2];
475
476        let r = (x * x + y * y).sqrt();
477        let w = self.distortion_params();
478        let tan_w_2 = (w / 2.0).tan();
479        let mul2tanwby2 = tan_w_2 * 2.0;
480
481        if r < crate::GEOMETRIC_PRECISION {
482            let rd = mul2tanwby2 / w;
483            return SMatrix::<f64, 2, 3>::new(
484                self.pinhole.fx * rd,
485                0.0,
486                0.0,
487                0.0,
488                self.pinhole.fy * rd,
489                0.0,
490            );
491        }
492
493        let atan_wrd = (mul2tanwby2 * r / z).atan();
494        let rd = atan_wrd / (r * w);
495
496        // Derivatives
497        let datan_dr = mul2tanwby2 * z / (z * z + mul2tanwby2 * mul2tanwby2 * r * r);
498        let datan_dz = -mul2tanwby2 * r / (z * z + mul2tanwby2 * mul2tanwby2 * r * r);
499
500        let drd_dr = (datan_dr * r - atan_wrd) / (r * r * w);
501        let drd_dz = datan_dz / (r * w);
502
503        let dr_dx = x / r;
504        let dr_dy = y / r;
505
506        let dmx_dx = rd + x * drd_dr * dr_dx;
507        let dmx_dy = x * drd_dr * dr_dy;
508        let dmx_dz = x * drd_dz;
509
510        let dmy_dx = y * drd_dr * dr_dx;
511        let dmy_dy = rd + y * drd_dr * dr_dy;
512        let dmy_dz = y * drd_dz;
513
514        SMatrix::<f64, 2, 3>::new(
515            self.pinhole.fx * dmx_dx,
516            self.pinhole.fx * dmx_dy,
517            self.pinhole.fx * dmx_dz,
518            self.pinhole.fy * dmy_dx,
519            self.pinhole.fy * dmy_dy,
520            self.pinhole.fy * dmy_dz,
521        )
522    }
523
524    /// Jacobian of projection w.r.t. intrinsic parameters (2×5).
525    ///
526    /// # Mathematical Derivation
527    ///
528    /// The FOV camera has 5 intrinsic parameters: [fx, fy, cx, cy, w]
529    ///
530    /// ## Projection Equations
531    ///
532    /// ```text
533    /// u = fx · mx + cx
534    /// v = fy · my + cy
535    /// ```
536    ///
537    /// where mx = x · rd and my = y · rd, with:
538    ///
539    /// ```text
540    /// rd = atan(2·tan(w/2)·r/z) / (r·w)  (for r > 0)
541    /// rd = 2·tan(w/2) / w                 (for r ≈ 0)
542    /// ```
543    ///
544    /// ## Jacobian Structure
545    ///
546    /// Intrinsic Jacobian (2×5):
547    /// ```text
548    /// J = [ ∂u/∂fx  ∂u/∂fy  ∂u/∂cx  ∂u/∂cy  ∂u/∂w ]
549    ///     [ ∂v/∂fx  ∂v/∂fy  ∂v/∂cx  ∂v/∂cy  ∂v/∂w ]
550    /// ```
551    ///
552    /// ## Linear Parameters (fx, fy, cx, cy)
553    ///
554    /// These appear linearly in the projection equations:
555    ///
556    /// ```text
557    /// ∂u/∂fx = mx,     ∂u/∂fy = 0,      ∂u/∂cx = 1,      ∂u/∂cy = 0
558    /// ∂v/∂fx = 0,      ∂v/∂fy = my,     ∂v/∂cx = 0,      ∂v/∂cy = 1
559    /// ```
560    ///
561    /// ## Distortion Parameter (w)
562    ///
563    /// The parameter w affects the distortion factor rd. We need ∂rd/∂w.
564    ///
565    /// ### Case 1: r > 0 (Non-Optical Axis)
566    ///
567    /// Starting from:
568    ///
569    /// ```text
570    /// α = 2·tan(w/2)·r / z
571    /// rd = atan(α) / (r·w)
572    /// ```
573    ///
574    /// Taking derivatives:
575    ///
576    /// ```text
577    /// ∂α/∂w = 2·sec²(w/2)·(1/2)·r/z = sec²(w/2)·r/z
578    /// ```
579    ///
580    /// where sec²(w/2) = 1 + tan²(w/2).
581    ///
582    /// Using the quotient rule for rd = atan(α) / (r·w):
583    ///
584    /// ```text
585    /// ∂rd/∂w = [∂atan(α)/∂w · r·w - atan(α) · r] / (r·w)²
586    ///        = [1/(1+α²) · ∂α/∂w · r·w - atan(α) · r] / (r·w)²
587    ///        = [sec²(w/2)·r²·w/z·(1/(1+α²)) - atan(α)·r] / (r²·w²)
588    /// ```
589    ///
590    /// Simplifying:
591    ///
592    /// ```text
593    /// ∂rd/∂w = [∂atan(α)/∂α · ∂α/∂w · r·w - atan(α)·r] / (r·w)²
594    /// ```
595    ///
596    /// ### Case 2: r ≈ 0 (Near Optical Axis)
597    ///
598    /// When r ≈ 0, we use rd = 2·tan(w/2) / w.
599    ///
600    /// Using the quotient rule:
601    ///
602    /// ```text
603    /// ∂rd/∂w = [2·sec²(w/2)·(1/2)·w - 2·tan(w/2)] / w²
604    ///        = [sec²(w/2)·w - 2·tan(w/2)] / w²
605    /// ```
606    ///
607    /// ## Final Jacobian w.r.t. w
608    ///
609    /// Once we have ∂rd/∂w, we compute:
610    ///
611    /// ```text
612    /// ∂u/∂w = fx · ∂(x·rd)/∂w = fx · x · ∂rd/∂w
613    /// ∂v/∂w = fy · ∂(y·rd)/∂w = fy · y · ∂rd/∂w
614    /// ```
615    ///
616    /// ## Matrix Form
617    ///
618    /// The complete Jacobian matrix is:
619    ///
620    /// ```text
621    /// J = [ mx   0    1    0    fx·x·∂rd/∂w ]
622    ///     [  0  my    0    1    fy·y·∂rd/∂w ]
623    /// ```
624    ///
625    /// where mx = x·rd and my = y·rd.
626    ///
627    /// # Arguments
628    ///
629    /// * `p_cam` - 3D point in camera coordinate frame.
630    ///
631    /// # Returns
632    ///
633    /// Returns the 2x5 Intrinsic Jacobian matrix.
634    ///
635    /// # References
636    ///
637    /// - Devernay & Faugeras, "Straight lines have to be straight", Machine Vision and Applications 2001
638    /// - Hughes et al., "Rolling Shutter Motion Deblurring", CVPR 2010 (uses FOV model)
639    ///
640    /// # Numerical Verification
641    ///
642    /// This analytical Jacobian is verified against numerical differentiation in
643    /// `test_jacobian_intrinsics_numerical()` with tolerance < 1e-4.
644    ///
645    /// # Notes
646    ///
647    /// The FOV parameter w controls the field of view angle. Typical values range from
648    /// 0.5 (narrow FOV) to π (hemispheric fisheye). The derivative ∂rd/∂w captures how
649    /// changes in the FOV parameter affect the radial distortion mapping.
650    fn jacobian_intrinsics(&self, p_cam: &Vector3<f64>) -> Self::IntrinsicJacobian {
651        let x = p_cam[0];
652        let y = p_cam[1];
653        let z = p_cam[2];
654
655        let r = (x * x + y * y).sqrt();
656        let w = self.distortion_params();
657        let tan_w_2 = (w / 2.0).tan();
658        let mul2tanwby2 = tan_w_2 * 2.0;
659
660        let rd = if r > crate::GEOMETRIC_PRECISION {
661            let atan_wrd = (mul2tanwby2 * r / z).atan();
662            atan_wrd / (r * w)
663        } else {
664            mul2tanwby2 / w
665        };
666
667        let mx = x * rd;
668        let my = y * rd;
669
670        // ∂u/∂fx = mx, ∂u/∂fy = 0, ∂u/∂cx = 1, ∂u/∂cy = 0
671        // ∂v/∂fx = 0, ∂v/∂fy = my, ∂v/∂cx = 0, ∂v/∂cy = 1
672
673        // For w derivative: ∂rd/∂w
674        let drd_dw = if r > crate::GEOMETRIC_PRECISION {
675            let tan_w_2 = (w / 2.0).tan();
676            let alpha = 2.0 * tan_w_2 * r / z;
677            let atan_alpha = alpha.atan();
678
679            // sec²(w/2) = 1 + tan²(w/2)
680            let sec2_w_2 = 1.0 + tan_w_2 * tan_w_2;
681            let dalpha_dw = sec2_w_2 * r / z;
682
683            // ∂rd/∂w = [1/(1+α²) · ∂α/∂w · r·w - atan(α) · r] / (r·w)²
684            let datan_dw = dalpha_dw / (1.0 + alpha * alpha);
685            (datan_dw * r * w - atan_alpha * r) / (r * r * w * w)
686        } else {
687            let tan_w_2 = (w / 2.0).tan();
688            let sec2_w_2 = 1.0 + tan_w_2 * tan_w_2;
689            // rd = 2·tan(w/2) / w
690            // ∂rd/∂w = [2·sec²(w/2)/2 · w - 2·tan(w/2)] / w²
691            //        = [sec²(w/2) · w - 2·tan(w/2)] / w²
692            (sec2_w_2 * w - 2.0 * tan_w_2) / (w * w)
693        };
694
695        let du_dw = self.pinhole.fx * x * drd_dw;
696        let dv_dw = self.pinhole.fy * y * drd_dw;
697
698        SMatrix::<f64, 2, 5>::new(mx, 0.0, 1.0, 0.0, du_dw, 0.0, my, 0.0, 1.0, dv_dw)
699    }
700
701    /// Validates camera parameters.
702    ///
703    /// # Validation Rules
704    ///
705    /// - fx, fy must be positive (> 0)
706    /// - fx, fy must be finite
707    /// - cx, cy must be finite
708    /// - w must be in (0, π]
709    ///
710    /// # Errors
711    ///
712    /// Returns [`CameraModelError`] if any parameter violates validation rules.
713    fn validate_params(&self) -> Result<(), CameraModelError> {
714        self.pinhole.validate()?;
715        self.get_distortion().validate()
716    }
717
718    /// Returns the pinhole parameters of the camera.
719    ///
720    /// # Returns
721    ///
722    /// A [`PinholeParams`] struct containing the focal lengths (fx, fy) and principal point (cx, cy).
723    fn get_pinhole_params(&self) -> PinholeParams {
724        PinholeParams {
725            fx: self.pinhole.fx,
726            fy: self.pinhole.fy,
727            cx: self.pinhole.cx,
728            cy: self.pinhole.cy,
729        }
730    }
731
732    /// Returns the distortion model and parameters of the camera.
733    ///
734    /// # Returns
735    ///
736    /// The [`DistortionModel`] associated with this camera (typically [`DistortionModel::FOV`]).
737    fn get_distortion(&self) -> DistortionModel {
738        self.distortion
739    }
740
741    /// Returns the string identifier for the camera model.
742    ///
743    /// # Returns
744    ///
745    /// The string `"fov"`.
746    fn get_model_name(&self) -> &'static str {
747        "fov"
748    }
749}
750
751#[cfg(test)]
752mod tests {
753    use super::*;
754    use nalgebra::{Matrix2xX, Matrix3xX};
755
756    type TestResult = Result<(), Box<dyn std::error::Error>>;
757
758    #[test]
759    fn test_fov_camera_creation() -> TestResult {
760        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
761        let distortion = DistortionModel::FOV { w: 1.5 };
762        let camera = FovCamera::new(pinhole, distortion)?;
763
764        assert_eq!(camera.pinhole.fx, 300.0);
765        assert_eq!(camera.distortion_params(), 1.5);
766        Ok(())
767    }
768
769    #[test]
770    fn test_projection_at_optical_axis() -> TestResult {
771        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
772        let distortion = DistortionModel::FOV { w: 1.5 };
773        let camera = FovCamera::new(pinhole, distortion)?;
774
775        let p_cam = Vector3::new(0.0, 0.0, 1.0);
776        let uv = camera.project(&p_cam)?;
777
778        assert!((uv.x - 320.0).abs() < 1e-4);
779        assert!((uv.y - 240.0).abs() < 1e-4);
780
781        Ok(())
782    }
783
784    #[test]
785    fn test_jacobian_point_numerical() -> TestResult {
786        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
787        let distortion = DistortionModel::FOV { w: 1.5 };
788        let camera = FovCamera::new(pinhole, distortion)?;
789
790        let p_cam = Vector3::new(0.1, 0.2, 1.0);
791
792        let jac_analytical = camera.jacobian_point(&p_cam);
793        let eps = crate::NUMERICAL_DERIVATIVE_EPS;
794
795        for i in 0..3 {
796            let mut p_plus = p_cam;
797            let mut p_minus = p_cam;
798            p_plus[i] += eps;
799            p_minus[i] -= eps;
800
801            let uv_plus = camera.project(&p_plus)?;
802            let uv_minus = camera.project(&p_minus)?;
803            let num_jac = (uv_plus - uv_minus) / (2.0 * eps);
804
805            for r in 0..2 {
806                assert!(
807                    jac_analytical[(r, i)].is_finite(),
808                    "Jacobian [{r},{i}] is not finite"
809                );
810                let diff = (jac_analytical[(r, i)] - num_jac[r]).abs();
811                assert!(
812                    diff < crate::JACOBIAN_TEST_TOLERANCE,
813                    "Mismatch at ({}, {})",
814                    r,
815                    i
816                );
817            }
818        }
819        Ok(())
820    }
821
822    #[test]
823    fn test_jacobian_intrinsics_numerical() -> TestResult {
824        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
825        let distortion = DistortionModel::FOV { w: 1.5 };
826        let camera = FovCamera::new(pinhole, distortion)?;
827
828        let p_cam = Vector3::new(0.1, 0.2, 1.0);
829
830        let jac_analytical = camera.jacobian_intrinsics(&p_cam);
831        let params: DVector<f64> = (&camera).into();
832        let eps = crate::NUMERICAL_DERIVATIVE_EPS;
833
834        for i in 0..5 {
835            let mut params_plus = params.clone();
836            let mut params_minus = params.clone();
837            params_plus[i] += eps;
838            params_minus[i] -= eps;
839
840            let cam_plus = FovCamera::from(params_plus.as_slice());
841            let cam_minus = FovCamera::from(params_minus.as_slice());
842
843            let uv_plus = cam_plus.project(&p_cam)?;
844            let uv_minus = cam_minus.project(&p_cam)?;
845            let num_jac = (uv_plus - uv_minus) / (2.0 * eps);
846
847            for r in 0..2 {
848                assert!(
849                    jac_analytical[(r, i)].is_finite(),
850                    "Jacobian [{r},{i}] is not finite"
851                );
852                let diff = (jac_analytical[(r, i)] - num_jac[r]).abs();
853                assert!(diff < 1e-4, "Mismatch at ({}, {})", r, i);
854            }
855        }
856        Ok(())
857    }
858
859    #[test]
860    fn test_fov_from_into_traits() -> TestResult {
861        let pinhole = PinholeParams::new(400.0, 410.0, 320.0, 240.0)?;
862        let distortion = DistortionModel::FOV { w: 1.8 };
863        let camera = FovCamera::new(pinhole, distortion)?;
864
865        // Test conversion to DVector
866        let params: DVector<f64> = (&camera).into();
867        assert_eq!(params.len(), 5);
868        assert_eq!(params[0], 400.0);
869        assert_eq!(params[1], 410.0);
870        assert_eq!(params[2], 320.0);
871        assert_eq!(params[3], 240.0);
872        assert_eq!(params[4], 1.8);
873
874        // Test conversion to array
875        let arr: [f64; 5] = (&camera).into();
876        assert_eq!(arr, [400.0, 410.0, 320.0, 240.0, 1.8]);
877
878        // Test conversion from slice
879        let params_slice = [450.0, 460.0, 330.0, 250.0, 2.0];
880        let camera2 = FovCamera::from(&params_slice[..]);
881        assert_eq!(camera2.pinhole.fx, 450.0);
882        assert_eq!(camera2.pinhole.fy, 460.0);
883        assert_eq!(camera2.pinhole.cx, 330.0);
884        assert_eq!(camera2.pinhole.cy, 250.0);
885        assert_eq!(camera2.distortion_params(), 2.0);
886
887        // Test conversion from array
888        let camera3 = FovCamera::from([500.0, 510.0, 340.0, 260.0, 2.5]);
889        assert_eq!(camera3.pinhole.fx, 500.0);
890        assert_eq!(camera3.pinhole.fy, 510.0);
891        assert_eq!(camera3.distortion_params(), 2.5);
892
893        Ok(())
894    }
895
896    #[test]
897    fn test_linear_estimation() -> TestResult {
898        // Ground truth FOV camera
899        let gt_pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
900        let gt_distortion = DistortionModel::FOV { w: 1.0 };
901        let gt_camera = FovCamera::new(gt_pinhole, gt_distortion)?;
902
903        // Generate synthetic 3D points in camera frame
904        let n_points = 50;
905        let mut pts_3d = Matrix3xX::zeros(n_points);
906        let mut pts_2d = Matrix2xX::zeros(n_points);
907        let mut valid = 0;
908
909        for i in 0..n_points {
910            let angle = i as f64 * 2.0 * std::f64::consts::PI / n_points as f64;
911            let r = 0.1 + 0.3 * (i as f64 / n_points as f64);
912            let p3d = Vector3::new(r * angle.cos(), r * angle.sin(), 1.0);
913
914            if let Ok(p2d) = gt_camera.project(&p3d) {
915                pts_3d.set_column(valid, &p3d);
916                pts_2d.set_column(valid, &p2d);
917                valid += 1;
918            }
919        }
920        let pts_3d = pts_3d.columns(0, valid).into_owned();
921        let pts_2d = pts_2d.columns(0, valid).into_owned();
922
923        // Initial camera with default w (grid search will find best)
924        let init_pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
925        let init_distortion = DistortionModel::FOV { w: 0.5 };
926        let mut camera = FovCamera::new(init_pinhole, init_distortion)?;
927
928        camera.linear_estimation(&pts_3d, &pts_2d)?;
929
930        // FOV uses grid search so tolerance is looser
931        for i in 0..valid {
932            let p3d = pts_3d.column(i).into_owned();
933            let projected = camera.project(&Vector3::new(p3d.x, p3d.y, p3d.z))?;
934            let err = ((projected.x - pts_2d[(0, i)]).powi(2)
935                + (projected.y - pts_2d[(1, i)]).powi(2))
936            .sqrt();
937            assert!(err < 5.0, "Reprojection error too large: {err}");
938        }
939
940        Ok(())
941    }
942
943    #[test]
944    fn test_project_unproject_round_trip() -> TestResult {
945        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
946        let distortion = DistortionModel::FOV { w: 1.5 };
947        let camera = FovCamera::new(pinhole, distortion)?;
948
949        let test_points = [
950            Vector3::new(0.1, 0.2, 1.0),
951            Vector3::new(-0.3, 0.1, 2.0),
952            Vector3::new(0.05, -0.1, 0.5),
953        ];
954
955        for p_cam in &test_points {
956            let uv = camera.project(p_cam)?;
957            let ray = camera.unproject(&uv)?;
958            let dot = ray.dot(&p_cam.normalize());
959            assert!(
960                (dot - 1.0).abs() < 1e-6,
961                "Round-trip failed: dot={dot}, expected ~1.0"
962            );
963        }
964
965        Ok(())
966    }
967
968    #[test]
969    fn test_project_returns_error_behind_camera() -> TestResult {
970        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
971        let distortion = DistortionModel::FOV { w: 1.5 };
972        let camera = FovCamera::new(pinhole, distortion)?;
973        assert!(camera.project(&Vector3::new(0.0, 0.0, -1.0)).is_err());
974        Ok(())
975    }
976
977    #[test]
978    fn test_project_at_min_depth_boundary() -> TestResult {
979        let pinhole = PinholeParams::new(300.0, 300.0, 320.0, 240.0)?;
980        let distortion = DistortionModel::FOV { w: 1.5 };
981        let camera = FovCamera::new(pinhole, distortion)?;
982        let p_min = Vector3::new(0.0, 0.0, crate::MIN_DEPTH);
983        if let Ok(uv) = camera.project(&p_min) {
984            assert!(uv.x.is_finite() && uv.y.is_finite());
985        }
986        Ok(())
987    }
988}