braid_mvg/
camera.rs

1// Copyright 2016-2025 Andrew D. Straw.
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT
5// or http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8#![allow(non_snake_case)]
9
10use serde::Deserialize;
11
12use na::core::dimension::{U1, U2, U3, U4};
13use na::core::{Matrix3, Matrix4, OMatrix, Vector3, Vector5};
14use na::geometry::{Point2, Point3, Rotation3, UnitQuaternion};
15use na::{allocator::Allocator, DefaultAllocator, RealField};
16use nalgebra as na;
17use num_traits::{One, Zero};
18
19use opencv_ros_camera::UndistortedPixels;
20
21use crate::pymvg_support::PymvgCamera;
22use crate::{
23    DistortedPixel, Distortion, ExtrinsicParameters, MvgError, PointWorldFrame, Result,
24    RosOpenCvIntrinsics, UndistortedPixel,
25};
26
27#[derive(Clone, PartialEq)]
28/// A calibrated camera with both intrinsic and extrinsic parameters.
29///
30/// This structure represents a complete camera model including:
31/// - **Intrinsic parameters**: focal length, principal point, distortion
32///   coefficients
33/// - **Extrinsic parameters**: position and orientation in 3D space
34/// - **Image dimensions**: width and height in pixels
35///
36/// The camera follows the standard computer vision coordinate conventions:
37/// - Camera frame: X→right, Y→down, Z→forward (optical axis)
38/// - Image coordinates: origin at top-left, X→right, Y→down
39///
40/// # Mathematical Model
41///
42/// The camera implements the projective camera model:
43/// ```text
44/// s[u v 1]ᵀ = K[R|t][X Y Z 1]ᵀ
45/// ```
46/// where:
47/// - `(X,Y,Z)` are 3D world coordinates
48/// - `(u,v)` are 2D undistorted image coordinates
49/// - `K` is the intrinsic matrix
50/// - `[R|t]` represents rotation and translation (extrinsics)
51/// - `s` is a scaling factor
52///
53/// Lens distortion is supported via the
54/// [`opencv-ros-camera`](https://docs.rs/opencv-ros-camera) crate.
55///
56/// The parameters for the intrinsic matrix (focal length, principal point, and
57/// skew) in addition to the distortion parameters together comprise the
58/// intrinsic parameters, or "intrinsics".
59///
60/// # Example
61///
62/// ```rust
63/// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
64///
65/// // Create a camera with default parameters
66/// let extrinsics = extrinsics::make_default_extrinsics::<f64>();
67/// let intrinsics = make_default_intrinsics::<f64>();
68/// let camera = Camera::new(640, 480, extrinsics, intrinsics).unwrap();
69///
70/// // Project a 3D point to 2D
71/// use braid_mvg::PointWorldFrame;
72/// use nalgebra::Point3;
73/// let point_3d = PointWorldFrame { coords: Point3::new(0.0, 0.0, 5.0) };
74/// let pixel = camera.project_3d_to_pixel(&point_3d);
75/// ```
76pub struct Camera<R: RealField> {
77    pub(crate) width: usize,
78    pub(crate) height: usize,
79    pub(crate) inner: cam_geom::Camera<R, RosOpenCvIntrinsics<R>>,
80    pub(crate) cache: CameraCache<R>,
81}
82
83impl<R: RealField + Copy> Camera<R> {
84    /// Create a new camera from intrinsic and extrinsic parameters.
85    ///
86    /// This constructor creates a complete camera model by combining:
87    /// - Image dimensions (width, height)
88    /// - Extrinsic parameters (camera pose in world coordinates)
89    /// - Intrinsic parameters (focal length, principal point, distortion)
90    ///
91    /// # Arguments
92    ///
93    /// * `width` - Image width in pixels
94    /// * `height` - Image height in pixels
95    /// * `extrinsics` - Camera position and orientation in world coordinates
96    /// * `intrinsics` - Camera intrinsic parameters including distortion model
97    ///
98    /// # Returns
99    ///
100    /// A new [`Camera`] instance, or [`MvgError`] if the parameters are invalid.
101    ///
102    /// # Errors
103    ///
104    /// Returns an error if:
105    /// - The projection matrix cannot be computed
106    /// - The camera parameters are mathematically inconsistent
107    /// - SVD decomposition fails during initialization
108    ///
109    /// # Example
110    ///
111    /// ```rust
112    /// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
113    ///
114    /// let extrinsics = extrinsics::make_default_extrinsics::<f64>();
115    /// let intrinsics = make_default_intrinsics::<f64>();
116    /// let camera = Camera::new(640, 480, extrinsics, intrinsics)?;
117    /// # Ok::<(), braid_mvg::MvgError>(())
118    /// ```
119    pub fn new(
120        width: usize,
121        height: usize,
122        extrinsics: ExtrinsicParameters<R>,
123        intrinsics: RosOpenCvIntrinsics<R>,
124    ) -> Result<Self> {
125        let inner = cam_geom::Camera::new(intrinsics, extrinsics);
126        Self::new_from_cam_geom(width, height, inner)
127    }
128
129    /// Create a new camera from a cam-geom Camera instance.
130    ///
131    /// This constructor wraps an existing cam-geom Camera with additional
132    /// image dimension information and caching for performance.
133    ///
134    /// # Arguments
135    ///
136    /// * `width` - Image width in pixels
137    /// * `height` - Image height in pixels
138    /// * `inner` - A pre-constructed cam-geom Camera instance
139    ///
140    /// # Returns
141    ///
142    /// A new [`Camera`] instance, or [`MvgError`] if the camera cannot be constructed.
143    ///
144    /// # Example
145    ///
146    /// ```rust
147    /// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
148    /// use cam_geom;
149    ///
150    /// let extrinsics = extrinsics::make_default_extrinsics::<f64>();
151    /// let intrinsics = make_default_intrinsics::<f64>();
152    /// let cam_geom_camera = cam_geom::Camera::new(intrinsics, extrinsics);
153    /// let camera = Camera::new_from_cam_geom(640, 480, cam_geom_camera)?;
154    /// # Ok::<(), braid_mvg::MvgError>(())
155    /// ```
156    pub fn new_from_cam_geom(
157        width: usize,
158        height: usize,
159        inner: cam_geom::Camera<R, RosOpenCvIntrinsics<R>>,
160    ) -> Result<Self> {
161        let intrinsics = inner.intrinsics();
162        let extrinsics = inner.extrinsics();
163        let m = {
164            let p33 = intrinsics.p.fixed_view::<3, 3>(0, 0);
165            p33 * extrinsics.matrix()
166        };
167
168        // flip sign if focal length < 0
169        let m = if m[(0, 0)] < na::convert(0.0) { -m } else { m };
170
171        let m = m / m[(2, 3)]; // normalize
172
173        let pinv = my_pinv(&m)?;
174        let cache = CameraCache { m, pinv };
175        Ok(Self {
176            width,
177            height,
178            inner,
179            cache,
180        })
181    }
182
183    /// Create a camera from a 3×4 projection matrix.
184    ///
185    /// This method decomposes a camera projection matrix into intrinsic and extrinsic
186    /// parameters using QR decomposition. It assumes no lens distortion (pinhole model).
187    ///
188    /// # Mathematical Background
189    ///
190    /// The projection matrix P has the form:
191    /// ```text
192    /// P = K[R|t]
193    /// ```
194    /// where K is the 3×3 intrinsic matrix and [R|t] is the 3×4 extrinsic matrix.
195    ///
196    /// # Arguments
197    ///
198    /// * `width` - Image width in pixels
199    /// * `height` - Image height in pixels
200    /// * `pmat` - 3×4 projection matrix
201    ///
202    /// # Returns
203    ///
204    /// A new [`Camera`] instance with no distortion, or [`MvgError`] if decomposition fails.
205    ///
206    /// # Errors
207    ///
208    /// Returns an error if:
209    /// - The projection matrix is singular or ill-conditioned
210    /// - QR decomposition fails
211    /// - The resulting parameters are invalid
212    ///
213    /// # Example
214    ///
215    /// ```rust
216    /// use braid_mvg::Camera;
217    /// use nalgebra::{OMatrix, U3, U4};
218    ///
219    /// // Create a simple projection matrix
220    /// let pmat = OMatrix::<f64, U3, U4>::new(
221    ///     1000.0, 0.0, 320.0, 100.0,
222    ///     0.0, 1000.0, 240.0, 200.0,
223    ///     0.0, 0.0, 1.0, 0.01
224    /// );
225    /// let camera = Camera::from_pmat(640, 480, &pmat)?;
226    /// # Ok::<(), braid_mvg::MvgError>(())
227    /// ```
228    pub fn from_pmat(width: usize, height: usize, pmat: &OMatrix<R, U3, U4>) -> Result<Self> {
229        let distortion = Distortion::zero();
230        Self::from_pmat_with_distortion(width, height, pmat, distortion)
231    }
232
233    fn from_pmat_with_distortion(
234        width: usize,
235        height: usize,
236        pmat: &OMatrix<R, U3, U4>,
237        distortion: Distortion<R>,
238    ) -> Result<Self> {
239        let m = (*pmat).remove_column(3);
240        let (rquat, k) = rq_decomposition(m)?;
241
242        let k22: R = k[(2, 2)];
243
244        let one: R = One::one();
245
246        let k = k * (one / k22); // normalize
247        let fx = k[(0, 0)];
248        let skew = k[(0, 1)];
249        let fy = k[(1, 1)];
250        let cx = k[(0, 2)];
251        let cy = k[(1, 2)];
252
253        let intrinsics =
254            RosOpenCvIntrinsics::from_params_with_distortion(fx, skew, fy, cx, cy, distortion);
255        let camcenter = pmat2cam_center(pmat);
256        let extrinsics = ExtrinsicParameters::from_rotation_and_camcenter(rquat, camcenter);
257
258        Camera::new(width, height, extrinsics, intrinsics)
259    }
260
261    /// convert, if possible, into a 3x4 matrix
262    pub fn as_pmat(&self) -> Option<&OMatrix<R, U3, U4>> {
263        let d = &self.intrinsics().distortion;
264        if d.is_linear() {
265            Some(&self.cache.m)
266        } else {
267            None
268        }
269    }
270
271    /// Get the linear projection matrix (3×4) for this camera.
272    ///
273    /// This returns the cached projection matrix that represents a linearized
274    /// version of the camera model (without lens distortion). The matrix has
275    /// the form P = K[R|t] where K is the intrinsic matrix and [R|t] are
276    /// the extrinsic parameters.
277    ///
278    /// # Returns
279    ///
280    /// Reference to the 3×4 projection matrix
281    ///
282    /// # Example
283    ///
284    /// ```rust
285    /// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
286    ///
287    /// let camera = Camera::new(640, 480,
288    ///     extrinsics::make_default_extrinsics::<f64>(),
289    ///     make_default_intrinsics::<f64>())?;
290    /// let pmat = camera.linear_part_as_pmat();
291    /// println!("Projection matrix shape: {}×{}", pmat.nrows(), pmat.ncols());
292    /// # Ok::<(), braid_mvg::MvgError>(())
293    /// ```
294    pub fn linear_part_as_pmat(&self) -> &OMatrix<R, U3, U4> {
295        &self.cache.m
296    }
297
298    /// Return a linearized copy of self.
299    ///
300    /// The returned camera will not have distortion. In other words, the raw
301    /// projected ("distorted") pixels are identical with the "undistorted"
302    /// variant. The camera model is a perfect linear pinhole.
303    pub fn linearize_to_cam_geom(
304        &self,
305    ) -> cam_geom::Camera<R, cam_geom::IntrinsicParametersPerspective<R>> {
306        let fx = self.intrinsics().k[(0, 0)];
307        let skew = self.intrinsics().k[(0, 1)];
308        let fy = self.intrinsics().k[(1, 1)];
309        let cx = self.intrinsics().k[(0, 2)];
310        let cy = self.intrinsics().k[(1, 2)];
311
312        let intrinsics =
313            cam_geom::IntrinsicParametersPerspective::from(cam_geom::PerspectiveParams {
314                fx,
315                fy,
316                skew,
317                cx,
318                cy,
319            });
320
321        let pose = self.extrinsics().clone();
322        cam_geom::Camera::new(intrinsics, pose)
323    }
324
325    /// Transform this camera using a similarity transformation.
326    ///
327    /// This method applies a similarity transformation (scale, rotation, translation)
328    /// to align the camera coordinate system. This is commonly used in:
329    /// - Multi-camera system calibration
330    /// - Coordinate system alignment
331    /// - Scale recovery in structure-from-motion
332    ///
333    /// # Mathematical Details
334    ///
335    /// The transformation applies: `X' = s*R*X + t` where:
336    /// - `s` is the uniform scale factor
337    /// - `R` is the 3×3 rotation matrix
338    /// - `t` is the 3×1 translation vector
339    /// - `X` are the original 3D points
340    ///
341    /// # Arguments
342    ///
343    /// * `s` - Uniform scale factor (positive)
344    /// * `rot` - 3×3 rotation matrix (must be orthogonal with det=1)
345    /// * `t` - 3×1 translation vector
346    ///
347    /// # Returns
348    ///
349    /// A new aligned [`Camera`] instance, or [`MvgError`] if transformation fails.
350    ///
351    /// # Errors
352    ///
353    /// Returns an error if:
354    /// - The rotation matrix is invalid (not orthogonal or det≠1)
355    /// - The scale factor is non-positive
356    /// - Camera reconstruction fails after transformation
357    ///
358    /// # Example
359    ///
360    /// ```rust
361    /// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
362    /// use nalgebra::{Matrix3, Vector3};
363    ///
364    /// let camera = Camera::new(640, 480,
365    ///     extrinsics::make_default_extrinsics::<f64>(),
366    ///     make_default_intrinsics::<f64>())?;
367    ///
368    /// let scale = 2.0;
369    /// let rotation = Matrix3::identity();
370    /// let translation = Vector3::new(1.0, 0.0, 0.0);
371    ///
372    /// let aligned_camera = camera.align(scale, rotation, translation)?;
373    /// # Ok::<(), braid_mvg::MvgError>(())
374    /// ```
375    pub fn align(&self, s: R, rot: Matrix3<R>, t: Vector3<R>) -> Result<Self> {
376        let m = build_xform(s, rot, t);
377        let mi = my_pinv_4x4(&m)?;
378
379        let pmat = &self.cache.m;
380        let aligned_pmat = pmat * mi;
381
382        Self::from_pmat_with_distortion(
383            self.width,
384            self.height,
385            &aligned_pmat,
386            self.intrinsics().distortion.clone(),
387        )
388    }
389
390    /// return a copy of this camera looking in the opposite direction
391    ///
392    /// The returned camera has the same 3D->2D projection. (The 2D->3D
393    /// projection results in a vector in the opposite direction.)
394    pub fn flip(&self) -> Option<Camera<R>> {
395        use crate::intrinsics::{mirror, MirrorAxis::LeftRight};
396        if !self.intrinsics().rect.is_identity(na::convert(1.0e-7)) {
397            return None;
398        }
399
400        let cc = self.extrinsics().camcenter();
401
402        let lv = self.extrinsics().forward();
403        let lv2 = -lv;
404        let la2 = cc.coords + lv2.as_ref();
405
406        let up = self.extrinsics().up();
407        let up2 = -up;
408
409        let extrinsics2 = crate::ExtrinsicParameters::from_view(&cc.coords, &la2, &up2);
410        let mut intinsics2 = mirror(self.intrinsics(), LeftRight)?;
411
412        intinsics2.p[(0, 1)] = -intinsics2.p[(0, 1)];
413        intinsics2.k[(0, 1)] = -intinsics2.k[(0, 1)];
414
415        let mut d = intinsics2.distortion.clone();
416        *d.tangential2_mut() = -d.tangential2();
417
418        Some(Camera::new(self.width(), self.height(), extrinsics2, intinsics2).unwrap())
419    }
420
421    /// Get the camera's intrinsic parameters.
422    #[inline]
423    pub fn intrinsics(&self) -> &RosOpenCvIntrinsics<R> {
424        self.inner.intrinsics()
425    }
426
427    /// Get the camera's extrinsic parameters.
428    #[inline]
429    pub fn extrinsics(&self) -> &ExtrinsicParameters<R> {
430        self.inner.extrinsics()
431    }
432
433    /// Convert this camera to PyMVG format.
434    ///
435    /// PyMVG is a Python library for multiple view geometry. This method converts
436    /// the camera parameters to the PyMVG JSON schema format for interoperability.
437    ///
438    /// # Arguments
439    ///
440    /// * `name` - Name to assign to the camera in the PyMVG format
441    ///
442    /// # Returns
443    ///
444    /// A [`PymvgCamera`] struct containing the camera parameters in PyMVG format
445    ///
446    /// # Example
447    ///
448    /// ```rust
449    /// use braid_mvg::{Camera, extrinsics, make_default_intrinsics};
450    ///
451    /// let camera = Camera::new(640, 480,
452    ///     extrinsics::make_default_extrinsics::<f64>(),
453    ///     make_default_intrinsics::<f64>())?;
454    /// let pymvg_camera = camera.to_pymvg("camera1");
455    /// # Ok::<(), braid_mvg::MvgError>(())
456    /// ```
457    pub fn to_pymvg(&self, name: &str) -> PymvgCamera<R> {
458        let d = &self.intrinsics().distortion;
459        let dvec = Vector5::new(
460            d.radial1(),
461            d.radial2(),
462            d.tangential1(),
463            d.tangential2(),
464            d.radial3(),
465        );
466        PymvgCamera {
467            name: name.to_string(),
468            width: self.width,
469            height: self.height,
470            P: self.intrinsics().p,
471            K: self.intrinsics().k,
472            D: dvec,
473            R: self.intrinsics().rect,
474            Q: *self.extrinsics().rotation().matrix(),
475            translation: *self.extrinsics().translation(),
476        }
477    }
478
479    pub(crate) fn from_pymvg(cam: &PymvgCamera<R>) -> Result<(String, Self)> {
480        let name = cam.name.clone();
481
482        let rquat = right_handed_rotation_quat_new(&cam.Q)?;
483        let extrinsics = crate::extrinsics::from_rquat_translation(rquat, cam.translation);
484        let distortion = Distortion::from_opencv_vec(cam.D);
485        let intrinsics = RosOpenCvIntrinsics::from_components(cam.P, cam.K, distortion, cam.R)?;
486        let cam = Self::new(cam.width, cam.height, extrinsics, intrinsics)?;
487        Ok((name, cam))
488    }
489
490    /// Get the image width in pixels.
491    #[inline]
492    pub fn width(&self) -> usize {
493        self.width
494    }
495
496    /// Get the image height in pixels.
497    #[inline]
498    pub fn height(&self) -> usize {
499        self.height
500    }
501
502    /// Project a 3D world point to undistorted 2D image coordinates.
503    ///
504    /// This method performs the core camera projection operation, transforming
505    /// a 3D point in world coordinates to 2D pixel coordinates. The result
506    /// represents the undistorted pixel coordinates (as if using a perfect
507    /// pinhole camera model).
508    pub fn project_3d_to_pixel(&self, pt3d: &PointWorldFrame<R>) -> UndistortedPixel<R> {
509        let coords: Point3<R> = pt3d.coords;
510
511        let cc = self.cache.m * coords.to_homogeneous();
512        UndistortedPixel {
513            coords: Point2::new(cc[0] / cc[2], cc[1] / cc[2]),
514        }
515    }
516
517    /// Project a 3D world point to distorted 2D image coordinates.
518    ///
519    /// This method projects a 3D point to 2D image coordinates and then applies
520    /// lens distortion to get the actual pixel coordinates as they would appear
521    /// in the raw camera image.
522    ///
523    /// # Arguments
524    ///
525    /// * `pt3d` - 3D point in world coordinates
526    ///
527    /// # Returns
528    ///
529    /// [`DistortedPixel`] containing the 2D image coordinates with distortion applied
530    ///
531    /// # Example
532    ///
533    /// ```rust
534    /// use braid_mvg::{Camera, PointWorldFrame, extrinsics, make_default_intrinsics};
535    /// use nalgebra::Point3;
536    ///
537    /// let camera = Camera::new(640, 480,
538    ///     extrinsics::make_default_extrinsics::<f64>(),
539    ///     make_default_intrinsics::<f64>())?;
540    /// let point_3d = PointWorldFrame { coords: Point3::new(0.0, 0.0, 5.0) };
541    /// let distorted_pixel = camera.project_3d_to_distorted_pixel(&point_3d);
542    /// # Ok::<(), braid_mvg::MvgError>(())
543    /// ```
544    pub fn project_3d_to_distorted_pixel(&self, pt3d: &PointWorldFrame<R>) -> DistortedPixel<R> {
545        let undistorted = self.project_3d_to_pixel(pt3d);
546        let ud = UndistortedPixels {
547            data: OMatrix::<R, U1, U2>::new(undistorted.coords[0], undistorted.coords[1]),
548        };
549        self.intrinsics().distort(&ud).into()
550    }
551
552    /// Back-project a 2D undistorted pixel to a 3D point at a given distance.
553    ///
554    /// This method performs the inverse camera projection, taking a 2D pixel
555    /// coordinate and a distance to compute the corresponding 3D world point.
556    /// This is useful for depth-based reconstruction and ray casting.
557    ///
558    /// # Arguments
559    ///
560    /// * `pt2d` - 2D pixel coordinates (undistorted)
561    /// * `dist` - Distance from camera center to the 3D point
562    ///
563    /// # Returns
564    ///
565    /// [`PointWorldFrame`] containing the 3D world coordinates
566    ///
567    /// # Example
568    ///
569    /// ```rust
570    /// use braid_mvg::{Camera, UndistortedPixel, extrinsics, make_default_intrinsics};
571    /// use nalgebra::Point2;
572    ///
573    /// let camera = Camera::new(640, 480,
574    ///     extrinsics::make_default_extrinsics::<f64>(),
575    ///     make_default_intrinsics::<f64>())?;
576    /// let pixel = UndistortedPixel { coords: Point2::new(320.0, 240.0) };
577    /// let point_3d = camera.project_pixel_to_3d_with_dist(&pixel, 5.0);
578    /// # Ok::<(), braid_mvg::MvgError>(())
579    /// ```
580    pub fn project_pixel_to_3d_with_dist(
581        &self,
582        pt2d: &UndistortedPixel<R>,
583        dist: R,
584    ) -> PointWorldFrame<R>
585    where
586        DefaultAllocator: Allocator<U1, U2>,
587        DefaultAllocator: Allocator<U1, U3>,
588    {
589        let ray_cam = self.intrinsics().undistorted_pixel_to_camera(&pt2d.into());
590        let pt_cam = ray_cam.point_on_ray_at_distance(dist);
591        self.extrinsics().camera_to_world(&pt_cam).into()
592    }
593
594    /// Back-project a 2D distorted pixel to a 3D point at a given distance.
595    ///
596    /// This method first removes lens distortion from the pixel coordinates,
597    /// then back-projects to 3D space at the specified distance from the camera.
598    ///
599    /// # Arguments
600    ///
601    /// * `pt2d` - 2D pixel coordinates (with distortion)
602    /// * `dist` - Distance from camera center to the 3D point
603    ///
604    /// # Returns
605    ///
606    /// [`PointWorldFrame`] containing the 3D world coordinates
607    ///
608    /// # Example
609    ///
610    /// ```rust
611    /// use braid_mvg::{Camera, DistortedPixel, extrinsics, make_default_intrinsics};
612    /// use nalgebra::Point2;
613    ///
614    /// let camera = Camera::new(640, 480,
615    ///     extrinsics::make_default_extrinsics::<f64>(),
616    ///     make_default_intrinsics::<f64>())?;
617    /// let pixel = DistortedPixel { coords: Point2::new(320.0, 240.0) };
618    /// let point_3d = camera.project_distorted_pixel_to_3d_with_dist(&pixel, 5.0);
619    /// # Ok::<(), braid_mvg::MvgError>(())
620    /// ```
621    pub fn project_distorted_pixel_to_3d_with_dist(
622        &self,
623        pt2d: &DistortedPixel<R>,
624        dist: R,
625    ) -> PointWorldFrame<R> {
626        use cam_geom::IntrinsicParameters;
627        let ray_cam = self.intrinsics().pixel_to_camera(&pt2d.into());
628        let pt_cam = ray_cam.point_on_ray_at_distance(dist);
629        self.extrinsics().camera_to_world(&pt_cam).into()
630    }
631}
632
633impl<R: RealField + Copy> std::default::Default for Camera<R> {
634    fn default() -> Camera<R> {
635        let extrinsics = crate::extrinsics::make_default_extrinsics();
636        let intrinsics = crate::make_default_intrinsics();
637        Camera::new(640, 480, extrinsics, intrinsics).unwrap()
638    }
639}
640
641impl<R: RealField + Copy> std::fmt::Debug for Camera<R> {
642    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
643        f.debug_struct("Camera")
644            .field("width", &self.width)
645            .field("height", &self.height)
646            .field("inner", &self.inner)
647            .finish()
648    }
649}
650
651impl<R: RealField + Copy> AsRef<cam_geom::Camera<R, RosOpenCvIntrinsics<R>>> for Camera<R> {
652    #[inline]
653    fn as_ref(&self) -> &cam_geom::Camera<R, RosOpenCvIntrinsics<R>> {
654        &self.inner
655    }
656}
657
658impl<R: RealField + serde::Serialize + Copy> serde::Serialize for Camera<R> {
659    fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
660    where
661        S: serde::Serializer,
662    {
663        use serde::ser::SerializeStruct;
664
665        // 5 is the number of fields we serialize from the struct.
666        let mut state = serializer.serialize_struct("Camera", 5)?;
667        state.serialize_field("width", &self.width)?;
668        state.serialize_field("height", &self.height)?;
669        state.serialize_field("extrinsics", &self.extrinsics())?;
670        state.serialize_field("intrinsics", &self.intrinsics())?;
671        state.end()
672    }
673}
674
675impl<'de, R: RealField + serde::Deserialize<'de> + Copy> serde::Deserialize<'de> for Camera<R> {
676    fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
677    where
678        D: serde::Deserializer<'de>,
679    {
680        use serde::de;
681        use std::fmt;
682
683        #[derive(Deserialize)]
684        #[serde(field_identifier, rename_all = "lowercase")]
685        enum Field {
686            Width,
687            Height,
688            Extrinsics,
689            Intrinsics,
690        }
691
692        struct CameraVisitor<'de, R2: RealField + serde::Deserialize<'de>>(
693            std::marker::PhantomData<&'de R2>,
694        );
695
696        impl<'de, R2: RealField + serde::Deserialize<'de> + Copy> serde::de::Visitor<'de>
697            for CameraVisitor<'de, R2>
698        {
699            type Value = Camera<R2>;
700
701            fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
702                formatter.write_str("struct Camera")
703            }
704
705            fn visit_seq<V>(self, mut seq: V) -> std::result::Result<Camera<R2>, V::Error>
706            where
707                V: serde::de::SeqAccess<'de>,
708            {
709                let width = seq
710                    .next_element()?
711                    .ok_or_else(|| de::Error::invalid_length(0, &self))?;
712                let height = seq
713                    .next_element()?
714                    .ok_or_else(|| de::Error::invalid_length(1, &self))?;
715                let extrinsics = seq
716                    .next_element()?
717                    .ok_or_else(|| de::Error::invalid_length(0, &self))?;
718                let intrinsics = seq
719                    .next_element()?
720                    .ok_or_else(|| de::Error::invalid_length(1, &self))?;
721                Camera::new(width, height, extrinsics, intrinsics)
722                    .map_err(|e| de::Error::custom(format!("failed creating Camera: {e}")))
723            }
724
725            fn visit_map<V>(self, mut map: V) -> std::result::Result<Camera<R2>, V::Error>
726            where
727                V: serde::de::MapAccess<'de>,
728            {
729                let mut width = None;
730                let mut height = None;
731                let mut extrinsics = None;
732                let mut intrinsics = None;
733                while let Some(key) = map.next_key()? {
734                    match key {
735                        Field::Width => {
736                            if width.is_some() {
737                                return Err(de::Error::duplicate_field("width"));
738                            }
739                            width = Some(map.next_value()?);
740                        }
741                        Field::Height => {
742                            if height.is_some() {
743                                return Err(de::Error::duplicate_field("height"));
744                            }
745                            height = Some(map.next_value()?);
746                        }
747                        Field::Extrinsics => {
748                            if extrinsics.is_some() {
749                                return Err(de::Error::duplicate_field("extrinsics"));
750                            }
751                            extrinsics = Some(map.next_value()?);
752                        }
753                        Field::Intrinsics => {
754                            if intrinsics.is_some() {
755                                return Err(de::Error::duplicate_field("intrinsics"));
756                            }
757                            intrinsics = Some(map.next_value()?);
758                        }
759                    }
760                }
761                let width = width.ok_or_else(|| de::Error::missing_field("width"))?;
762                let height = height.ok_or_else(|| de::Error::missing_field("height"))?;
763                let extrinsics =
764                    extrinsics.ok_or_else(|| de::Error::missing_field("extrinsics"))?;
765                let intrinsics =
766                    intrinsics.ok_or_else(|| de::Error::missing_field("intrinsics"))?;
767                Camera::new(width, height, extrinsics, intrinsics)
768                    .map_err(|e| de::Error::custom(format!("failed creating Camera: {e}")))
769            }
770        }
771
772        const FIELDS: &[&str] = &["width", "height", "extrinsics", "intrinsics"];
773        deserializer.deserialize_struct("Camera", FIELDS, CameraVisitor(std::marker::PhantomData))
774    }
775}
776
777fn _test_camera_is_serialize() {
778    // Compile-time test to ensure Camera implements Serialize trait.
779    fn implements<T: serde::Serialize>() {}
780    implements::<Camera<f64>>();
781}
782
783fn _test_camera_is_deserialize() {
784    // Compile-time test to ensure Camera implements Deserialize trait.
785    fn implements<'de, T: serde::Deserialize<'de>>() {}
786    implements::<Camera<f64>>();
787}
788
789#[derive(Clone, PartialEq)]
790pub(crate) struct CameraCache<R: RealField> {
791    pub(crate) m: OMatrix<R, U3, U4>,
792    pub(crate) pinv: OMatrix<R, U4, U3>,
793}
794
795const SVD_MAX_ITERATIONS: usize = 1_000_000;
796
797fn my_pinv<R: RealField + Copy>(m: &OMatrix<R, U3, U4>) -> Result<OMatrix<R, U4, U3>> {
798    na::linalg::SVD::try_new(*m, true, true, na::convert(1e-7), SVD_MAX_ITERATIONS)
799        .ok_or(MvgError::SvdFailed)?
800        .pseudo_inverse(na::convert(1.0e-7))
801        .map_err(|e| MvgError::PinvError {
802            error: format!("inverse failed {e}"),
803        })
804}
805
806fn my_pinv_4x4<R: RealField + Copy>(m: &OMatrix<R, U4, U4>) -> Result<OMatrix<R, U4, U4>> {
807    na::linalg::SVD::try_new(*m, true, true, na::convert(1e-7), SVD_MAX_ITERATIONS)
808        .ok_or(MvgError::SvdFailed)?
809        .pseudo_inverse(na::convert(1.0e-7))
810        .map_err(|e| MvgError::PinvError {
811            error: format!("inverse failed {e}"),
812        })
813}
814
815fn build_xform<R: RealField + Copy>(s: R, rot: Matrix3<R>, t: Vector3<R>) -> Matrix4<R> {
816    let mut m1 = Matrix4::zero();
817    for i in 0..3 {
818        for j in 0..3 {
819            m1[(i, j)] = rot[(i, j)];
820        }
821    }
822    let mut m2 = m1 * s;
823    for i in 0..3 {
824        m2[(i, 3)] = t[i];
825    }
826    m2[(3, 3)] = R::one();
827    m2
828}
829
830#[allow(clippy::many_single_char_names)]
831fn pmat2cam_center<R: RealField + Copy>(p: &OMatrix<R, U3, U4>) -> Point3<R> {
832    let x = (*p).remove_column(0).determinant();
833    let y = -(*p).remove_column(1).determinant();
834    let z = (*p).remove_column(2).determinant();
835    let w = -(*p).remove_column(3).determinant();
836    Point3::from(Vector3::new(x / w, y / w, z / w))
837}
838
839/// Calculate angle of quaternion
840///
841/// This is the implementation from prior to
842/// https://github.com/rustsim/nalgebra/commit/74aefd9c23dadd12ee654c7d0206b0a96d22040c
843fn my_quat_angle<R: RealField + Copy>(quat: &na::UnitQuaternion<R>) -> R {
844    let w = quat.quaternion().scalar().abs();
845
846    // Handle inaccuracies that make break `.acos`.
847    if w >= R::one() {
848        R::zero()
849    } else {
850        w.acos() * na::convert(2.0f64)
851    }
852}
853
854/// convert a 3x3 matrix into a valid right-handed rotation
855fn right_handed_rotation_quat_new<R: RealField + Copy>(
856    orig: &Matrix3<R>,
857) -> Result<UnitQuaternion<R>> {
858    let r1 = *orig;
859    let rotmat = Rotation3::from_matrix_unchecked(r1);
860    let rquat = UnitQuaternion::from_rotation_matrix(&rotmat);
861    {
862        // Check for valid rotation matrix by converting back to rotation
863        // matrix and back again to quat then comparing quats. Probably
864        // there is a much faster and better way.
865        let rotmat2 = rquat.to_rotation_matrix();
866        let rquat2 = UnitQuaternion::from_rotation_matrix(&rotmat2);
867        let delta = rquat.rotation_to(&rquat2);
868        let angle = my_quat_angle(&delta);
869        let epsilon = na::convert(1.0e-7);
870        if angle.abs() > epsilon {
871            return Err(MvgError::InvalidRotationMatrix);
872        }
873    }
874    Ok(rquat)
875}
876
877fn rq<R: RealField + Copy>(A: Matrix3<R>) -> (Matrix3<R>, Matrix3<R>) {
878    let zero: R = Zero::zero();
879    let one: R = One::one();
880
881    // see https://math.stackexchange.com/a/1640762
882    let P = Matrix3::<R>::new(zero, zero, one, zero, one, zero, one, zero, zero);
883    let Atilde = P * A;
884
885    let (Qtilde, Rtilde) = {
886        let qrm = na::linalg::QR::new(Atilde.transpose());
887        (qrm.q(), qrm.r())
888    };
889    let Q = P * Qtilde.transpose();
890    let R = P * Rtilde.transpose() * P;
891    (R, Q)
892}
893
894/// perform RQ decomposition and return results as right-handed quaternion and intrinsics matrix
895pub fn rq_decomposition<R: RealField + Copy>(
896    orig: Matrix3<R>,
897) -> Result<(UnitQuaternion<R>, Matrix3<R>)> {
898    let (mut intrin, mut q) = rq(orig);
899    let zero: R = Zero::zero();
900    for i in 0..3 {
901        if intrin[(i, i)] < zero {
902            for j in 0..3 {
903                intrin[(j, i)] = -intrin[(j, i)];
904                q[(i, j)] = -q[(i, j)];
905            }
906        }
907    }
908
909    match right_handed_rotation_quat_new(&q) {
910        Ok(rquat) => Ok((rquat, intrin)),
911        Err(error) => {
912            match error {
913                MvgError::InvalidRotationMatrix => {
914                    // convert left-handed rotation to right-handed rotation
915                    let q = -q;
916                    let intrin = -intrin;
917                    let rquat = right_handed_rotation_quat_new(&q)?;
918                    Ok((rquat, intrin))
919                }
920                e => Err(e),
921            }
922        }
923    }
924}
925
926#[cfg(test)]
927mod tests {
928    use crate::{DistortedPixel, PointWorldFrame};
929    use na::core::dimension::{U3, U4};
930    use na::core::{OMatrix, Vector4};
931    use na::geometry::{Point2, Point3};
932    use nalgebra as na;
933
934    fn is_pmat_same(cam: &crate::Camera<f64>, pmat: &OMatrix<f64, U3, U4>) -> bool {
935        let world_pts = [
936            PointWorldFrame {
937                coords: Point3::new(1.23, 4.56, 7.89),
938            },
939            PointWorldFrame {
940                coords: Point3::new(1.0, 2.0, 3.0),
941            },
942        ];
943
944        let pts1: Vec<DistortedPixel<_>> = world_pts
945            .iter()
946            .map(|world| cam.project_3d_to_distorted_pixel(world))
947            .collect();
948
949        let pts2: Vec<DistortedPixel<_>> = world_pts
950            .iter()
951            .map(|world| {
952                let world_h = Vector4::new(world.coords.x, world.coords.y, world.coords.z, 1.0);
953                let rst = pmat * world_h;
954                DistortedPixel {
955                    coords: Point2::new(rst[0] / rst[2], rst[1] / rst[2]),
956                }
957            })
958            .collect();
959
960        let epsilon = 1e-10;
961
962        for (im1, im2) in pts1.iter().zip(pts2) {
963            println!("im1: {im1:?}");
964            println!("im2: {im2:?}");
965            let diff = im1.coords - im2.coords;
966            let dist_squared = diff.dot(&diff);
967            if dist_squared.is_nan() {
968                continue;
969            }
970            println!("dist_squared: {dist_squared:?}");
971            if dist_squared > epsilon {
972                return false;
973            }
974        }
975        true
976    }
977
978    fn is_similar(cam1: &crate::Camera<f64>, cam2: &crate::Camera<f64>) -> bool {
979        let world_pts = [
980            PointWorldFrame {
981                coords: Point3::new(1.23, 4.56, 7.89),
982            },
983            PointWorldFrame {
984                coords: Point3::new(1.0, 2.0, 3.0),
985            },
986        ];
987
988        let pts1: Vec<DistortedPixel<_>> = world_pts
989            .iter()
990            .map(|world| cam1.project_3d_to_distorted_pixel(world))
991            .collect();
992
993        let pts2: Vec<DistortedPixel<_>> = world_pts
994            .iter()
995            .map(|world| cam2.project_3d_to_distorted_pixel(world))
996            .collect();
997
998        let epsilon = 1e-10;
999
1000        for (im1, im2) in pts1.iter().zip(pts2) {
1001            let diff = im1.coords - im2.coords;
1002            let dist_squared = diff.dot(&diff);
1003            if dist_squared.is_nan() {
1004                continue;
1005            }
1006            if dist_squared > epsilon {
1007                return false;
1008            }
1009        }
1010        true
1011    }
1012
1013    #[test]
1014    fn test_to_from_pmat() {
1015        for (name, cam1) in crate::tests::get_test_cameras().iter() {
1016            println!("\n\n\ntesting camera {name}");
1017            let pmat = match cam1.as_pmat() {
1018                Some(pmat) => pmat,
1019                None => {
1020                    println!("skipping camera {name}: no pmat");
1021                    continue;
1022                }
1023            };
1024            assert!(is_pmat_same(cam1, pmat));
1025            let cam2 = crate::Camera::from_pmat(cam1.width(), cam1.height(), pmat).unwrap();
1026            assert!(is_similar(cam1, &cam2));
1027        }
1028    }
1029
1030    #[test]
1031    fn test_flipped_camera() {
1032        for (name, cam1) in crate::tests::get_test_cameras().iter() {
1033            println!("testing camera {name}");
1034            let cam2 = cam1.flip().expect("flip cam");
1035            if !is_similar(cam1, &cam2) {
1036                panic!("results not similar for cam {name}");
1037            }
1038        }
1039    }
1040
1041    #[test]
1042    fn test_rq() {
1043        let a = na::Matrix3::new(1.2, 3.4, 5.6, 7.8, 9.8, 7.6, 5.4, 3.2, 1.0);
1044        let (r, q) = crate::camera::rq(a);
1045        println!("r {r:?}");
1046        println!("q {q:?}");
1047
1048        // check it is a real decomposition
1049        let a2 = r * q;
1050        println!("a {a:?}");
1051        println!("a2 {a2:?}");
1052
1053        approx::assert_abs_diff_eq!(a, a2, epsilon = 1e-10);
1054
1055        // check that q is orthonormal
1056        let actual = q * q.transpose();
1057        let expected = na::Matrix3::identity();
1058        approx::assert_abs_diff_eq!(actual, expected, epsilon = 1e-10);
1059
1060        // check that r is upper triangular
1061        approx::assert_abs_diff_eq!(r[(1, 0)], 0.0, epsilon = 1e-10);
1062        approx::assert_abs_diff_eq!(r[(2, 0)], 0.0, epsilon = 1e-10);
1063        approx::assert_abs_diff_eq!(r[(2, 1)], 0.0, epsilon = 1e-10);
1064    }
1065
1066    #[test]
1067    fn test_rotation_matrices_and_quaternions() {
1068        use na::geometry::{Rotation3, UnitQuaternion};
1069
1070        #[rustfmt::skip]
1071        let r1 = na::Matrix3::from_column_slice(
1072            &[-0.9999999999999998, -0.00000000000000042632564145606005, -0.0000000000000002220446049250313,
1073            0.0000000000000004263256414560601, -1.0, 0.0,
1074            -0.0000000000000002220446049250313, -0.00000000000000000000000000000004930380657631324, -0.9999999999999998]);
1075
1076        let rotmat = Rotation3::from_matrix_unchecked(r1);
1077
1078        let rquat = UnitQuaternion::from_rotation_matrix(&rotmat);
1079
1080        let rotmat2 = rquat.to_rotation_matrix();
1081
1082        let rquat2 = UnitQuaternion::from_rotation_matrix(&rotmat2);
1083
1084        let angle = rquat.angle_to(&rquat2);
1085        let delta = rquat.rotation_to(&rquat2);
1086        let my_angle = crate::camera::my_quat_angle(&delta);
1087
1088        println!("r1 {r1:?}");
1089        println!("rotmat {rotmat:?}");
1090        println!("rquat {rquat:?}");
1091        println!("rotmat2 {rotmat2:?}");
1092        println!("rquat2 {rquat2:?}");
1093        println!("angle: {angle:?}");
1094        println!("delta {delta:?}");
1095        println!("my_angle: {my_angle:?}");
1096
1097        let q = na::Quaternion::new(
1098            -0.000000000000000000000000000000002756166576353432,
1099            0.000000000000000024825341532472726,
1100            -0.00000000000000004766465574234759,
1101            0.5590169943749475,
1102        );
1103        let uq = UnitQuaternion::from_quaternion(q); // hmm, this conversion doesn't give me the delta from above :(
1104        println!("q: {q:?}");
1105        println!("uq: {uq:?}");
1106        println!("uq.angle(): {:?}", uq.angle());
1107    }
1108
1109    #[test]
1110    fn test_serde() {
1111        let expected = crate::Camera::<f64>::default();
1112        let buf = serde_json::to_string(&expected).unwrap();
1113        let actual: crate::Camera<f64> = serde_json::from_str(&buf).unwrap();
1114        assert!(expected == actual);
1115    }
1116}