1#![allow(clippy::many_single_char_names, clippy::similar_names)]
9use crate::batch::{DetectionBatch, Pose6D};
10use crate::config::PoseEstimationMode;
11use crate::image::ImageView;
12use nalgebra::{Matrix2, Matrix3, Matrix6, Rotation3, UnitQuaternion, Vector3, Vector6};
13
14#[derive(Debug, Clone, Copy, PartialEq)]
23#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
24pub enum DistortionCoeffs {
25 None,
27 #[cfg(feature = "non_rectified")]
31 BrownConrady {
32 k1: f64,
34 k2: f64,
36 p1: f64,
38 p2: f64,
40 k3: f64,
42 },
43 #[cfg(feature = "non_rectified")]
47 KannalaBrandt {
48 k1: f64,
50 k2: f64,
52 k3: f64,
54 k4: f64,
56 },
57}
58
59impl DistortionCoeffs {
60 #[must_use]
62 #[inline]
63 pub const fn is_distorted(&self) -> bool {
64 !matches!(self, Self::None)
65 }
66}
67
68#[derive(Debug, Clone, Copy, PartialEq)]
74#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
75pub struct CameraIntrinsics {
76 pub fx: f64,
78 pub fy: f64,
80 pub cx: f64,
82 pub cy: f64,
84 pub distortion: DistortionCoeffs,
86}
87
88impl CameraIntrinsics {
89 #[must_use]
93 pub fn new(fx: f64, fy: f64, cx: f64, cy: f64) -> Self {
94 Self {
95 fx,
96 fy,
97 cx,
98 cy,
99 distortion: DistortionCoeffs::None,
100 }
101 }
102
103 #[cfg(feature = "non_rectified")]
105 #[must_use]
106 #[allow(clippy::too_many_arguments)]
107 pub fn with_brown_conrady(
108 fx: f64,
109 fy: f64,
110 cx: f64,
111 cy: f64,
112 k1: f64,
113 k2: f64,
114 p1: f64,
115 p2: f64,
116 k3: f64,
117 ) -> Self {
118 Self {
119 fx,
120 fy,
121 cx,
122 cy,
123 distortion: DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 },
124 }
125 }
126
127 #[cfg(feature = "non_rectified")]
129 #[must_use]
130 #[allow(clippy::too_many_arguments)]
131 pub fn with_kannala_brandt(
132 fx: f64,
133 fy: f64,
134 cx: f64,
135 cy: f64,
136 k1: f64,
137 k2: f64,
138 k3: f64,
139 k4: f64,
140 ) -> Self {
141 Self {
142 fx,
143 fy,
144 cx,
145 cy,
146 distortion: DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 },
147 }
148 }
149
150 #[must_use]
152 pub fn as_matrix(&self) -> Matrix3<f64> {
153 Matrix3::new(self.fx, 0.0, self.cx, 0.0, self.fy, self.cy, 0.0, 0.0, 1.0)
154 }
155
156 #[must_use]
158 pub fn inv_matrix(&self) -> Matrix3<f64> {
159 Matrix3::new(
160 1.0 / self.fx,
161 0.0,
162 -self.cx / self.fx,
163 0.0,
164 1.0 / self.fy,
165 -self.cy / self.fy,
166 0.0,
167 0.0,
168 1.0,
169 )
170 }
171
172 #[must_use]
177 pub fn undistort_pixel(&self, px: f64, py: f64) -> [f64; 2] {
178 match self.distortion {
179 DistortionCoeffs::None => [px, py],
180 #[cfg(feature = "non_rectified")]
181 DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 } => {
182 let m = crate::camera::BrownConradyModel { k1, k2, p1, p2, k3 };
183 let xn = (px - self.cx) / self.fx;
184 let yn = (py - self.cy) / self.fy;
185 let [xu, yu] = crate::camera::CameraModel::undistort(&m, xn, yn);
186 [xu * self.fx + self.cx, yu * self.fy + self.cy]
187 },
188 #[cfg(feature = "non_rectified")]
189 DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 } => {
190 let m = crate::camera::KannalaBrandtModel { k1, k2, k3, k4 };
191 let xn = (px - self.cx) / self.fx;
192 let yn = (py - self.cy) / self.fy;
193 let [xu, yu] = crate::camera::CameraModel::undistort(&m, xn, yn);
194 [xu * self.fx + self.cx, yu * self.fy + self.cy]
195 },
196 }
197 }
198
199 #[must_use]
204 pub fn distort_normalized(&self, xn: f64, yn: f64) -> [f64; 2] {
205 let [xd, yd] = match self.distortion {
206 DistortionCoeffs::None => [xn, yn],
207 #[cfg(feature = "non_rectified")]
208 DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 } => {
209 let m = crate::camera::BrownConradyModel { k1, k2, p1, p2, k3 };
210 crate::camera::CameraModel::distort(&m, xn, yn)
211 },
212 #[cfg(feature = "non_rectified")]
213 DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 } => {
214 let m = crate::camera::KannalaBrandtModel { k1, k2, k3, k4 };
215 crate::camera::CameraModel::distort(&m, xn, yn)
216 },
217 };
218 [xd * self.fx + self.cx, yd * self.fy + self.cy]
219 }
220
221 #[must_use]
225 #[cfg_attr(not(feature = "non_rectified"), allow(unused_variables))]
226 pub(crate) fn distortion_jacobian(&self, xn: f64, yn: f64) -> [[f64; 2]; 2] {
227 match self.distortion {
228 DistortionCoeffs::None => [[1.0, 0.0], [0.0, 1.0]],
229 #[cfg(feature = "non_rectified")]
230 DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 } => {
231 let m = crate::camera::BrownConradyModel { k1, k2, p1, p2, k3 };
232 crate::camera::CameraModel::distort_jacobian(&m, xn, yn)
233 },
234 #[cfg(feature = "non_rectified")]
235 DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 } => {
236 let m = crate::camera::KannalaBrandtModel { k1, k2, k3, k4 };
237 crate::camera::CameraModel::distort_jacobian(&m, xn, yn)
238 },
239 }
240 }
241}
242
243#[derive(Debug, Clone, Copy)]
246#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
247pub struct Pose {
248 pub rotation: Matrix3<f64>,
250 pub translation: Vector3<f64>,
252}
253
254impl Pose {
255 #[must_use]
257 pub fn new(rotation: Matrix3<f64>, translation: Vector3<f64>) -> Self {
258 Self {
259 rotation,
260 translation,
261 }
262 }
263
264 #[must_use]
266 pub fn project(&self, point: &Vector3<f64>, intrinsics: &CameraIntrinsics) -> [f64; 2] {
267 let p_cam = self.rotation * point + self.translation;
268 let z = p_cam.z.max(1e-4);
269 let x = (p_cam.x / z) * intrinsics.fx + intrinsics.cx;
270 let y = (p_cam.y / z) * intrinsics.fy + intrinsics.cy;
271 [x, y]
272 }
273}
274
275#[inline]
277pub(crate) fn symmetrize_jtj6(jtj: &mut Matrix6<f64>) {
278 for r in 1..6 {
279 for c in 0..r {
280 jtj[(r, c)] = jtj[(c, r)];
281 }
282 }
283}
284
285#[inline]
294pub(crate) fn projection_jacobian(
295 x_z: f64,
296 y_z: f64,
297 z_inv: f64,
298 intrinsics: &CameraIntrinsics,
299) -> (f64, f64, f64, f64, f64, f64, f64, f64, f64, f64) {
300 let fx = intrinsics.fx;
301 let fy = intrinsics.fy;
302 (
303 fx * z_inv,
304 -fx * x_z * z_inv,
305 -fx * x_z * y_z,
306 fx * (x_z * x_z + 1.0),
307 -fx * y_z,
308 fy * z_inv,
309 -fy * y_z * z_inv,
310 -fy * (y_z * y_z + 1.0),
311 fy * y_z * x_z,
312 fy * x_z,
313 )
314}
315
316#[must_use]
333#[allow(clippy::missing_panics_doc)]
334#[tracing::instrument(skip_all, name = "pipeline::estimate_tag_pose")]
335pub fn estimate_tag_pose(
336 intrinsics: &CameraIntrinsics,
337 corners: &[[f64; 2]; 4],
338 tag_size: f64,
339 img: Option<&ImageView>,
340 mode: PoseEstimationMode,
341) -> (Option<Pose>, Option<[[f64; 6]; 6]>) {
342 estimate_tag_pose_with_config(
343 intrinsics,
344 corners,
345 tag_size,
346 img,
347 mode,
348 &crate::config::DetectorConfig::default(),
349 None,
350 )
351}
352
353#[must_use]
355#[allow(clippy::missing_panics_doc)]
356#[tracing::instrument(skip_all, name = "pipeline::estimate_tag_pose")]
357pub fn estimate_tag_pose_with_config(
358 intrinsics: &CameraIntrinsics,
359 corners: &[[f64; 2]; 4],
360 tag_size: f64,
361 img: Option<&ImageView>,
362 mode: PoseEstimationMode,
363 config: &crate::config::DetectorConfig,
364 external_covariances: Option<&[Matrix2<f64>; 4]>,
365) -> (Option<Pose>, Option<[[f64; 6]; 6]>) {
366 let ideal_corners: [[f64; 2]; 4] = if intrinsics.distortion == DistortionCoeffs::None {
371 *corners
372 } else {
373 core::array::from_fn(|i| intrinsics.undistort_pixel(corners[i][0], corners[i][1]))
374 };
375
376 let Some(h_poly) = crate::decoder::Homography::square_to_quad(&ideal_corners) else {
378 return (None, None);
379 };
380 let h_pixel = h_poly.h;
381
382 let k_inv = intrinsics.inv_matrix();
384 let h_norm = k_inv * h_pixel;
385
386 let scaler = 2.0 / tag_size;
392 let mut h_metric = h_norm;
393 h_metric.column_mut(0).scale_mut(scaler);
394 h_metric.column_mut(1).scale_mut(scaler);
395
396 let Some(candidates) = solve_ippe_square(&h_metric) else {
398 return (None, None);
399 };
400
401 let best_pose = find_best_pose(intrinsics, &ideal_corners, tag_size, &candidates);
404
405 match (mode, img, external_covariances) {
409 (PoseEstimationMode::Accurate, _, Some(ext_covs)) => {
410 let (refined_pose, covariance) = crate::pose_weighted::refine_pose_lm_weighted(
412 intrinsics, corners, tag_size, best_pose, ext_covs,
413 );
414 (Some(refined_pose), Some(covariance))
415 },
416 (PoseEstimationMode::Accurate, Some(image), None) => {
417 let uncertainty = crate::pose_weighted::compute_framework_uncertainty(
419 image,
420 &ideal_corners,
421 &h_poly,
422 config.tikhonov_alpha_max,
423 config.sigma_n_sq,
424 config.structure_tensor_radius,
425 );
426 let (refined_pose, covariance) = crate::pose_weighted::refine_pose_lm_weighted(
427 intrinsics,
428 corners,
429 tag_size,
430 best_pose,
431 &uncertainty,
432 );
433 (Some(refined_pose), Some(covariance))
434 },
435 _ => {
436 (
438 Some(refine_pose_lm(
439 intrinsics,
440 corners,
441 tag_size,
442 best_pose,
443 config.huber_delta_px,
444 )),
445 None,
446 )
447 },
448 }
449}
450
451fn solve_ippe_square(h: &Matrix3<f64>) -> Option<[Pose; 2]> {
457 let h1 = h.column(0);
460 let h2 = h.column(1);
461
462 let a = h1.dot(&h1);
466 let b = h2.dot(&h2);
467 let c = h1.dot(&h2);
468
469 let trace = a + b;
472 let det = a * b - c * c;
473 let delta = (trace * trace - 4.0 * det).max(0.0).sqrt();
474
475 let s1_sq = (trace + delta) * 0.5;
477 let s2_sq = (trace - delta) * 0.5;
478
479 let s1 = s1_sq.sqrt();
481 let s2 = s2_sq.sqrt();
482
483 if (s1 - s2).abs() < 1e-4 * s1 {
486 let mut r1 = h1.clone_owned();
490 let scale = 1.0 / r1.norm();
491 r1 *= scale;
492
493 let mut r2 = h2 - r1 * (h2.dot(&r1));
495 r2 = r2.normalize();
496
497 let r3 = r1.cross(&r2);
498 let rot = Matrix3::from_columns(&[r1, r2, r3]);
499
500 let gamma = (s1 + s2) * 0.5;
505 if gamma < 1e-8 {
506 return None;
507 } let tz = 1.0 / gamma;
509 let t = h.column(2) * tz;
510
511 let pose = Pose::new(rot, t);
512 return Some([pose, pose]);
513 }
514
515 let v1 = if c.abs() > 1e-8 {
531 let v = nalgebra::Vector2::new(s1_sq - b, c);
532 v.normalize()
533 } else if a >= b {
534 nalgebra::Vector2::new(1.0, 0.0)
535 } else {
536 nalgebra::Vector2::new(0.0, 1.0)
537 };
538
539 let v2 = nalgebra::Vector2::new(-v1.y, v1.x);
541
542 let j_v1 = h1 * v1.x + h2 * v1.y;
546 let j_v2 = h1 * v2.x + h2 * v2.y;
547
548 if s1 < 1e-8 {
550 return None;
551 }
552 let u1 = j_v1 / s1;
553 let u2 = j_v2 / s2.max(1e-8); let r1_a = u1 * v1.x + u2 * v2.x;
563 let r2_a = u1 * v1.y + u2 * v2.y;
564 let r3_a = r1_a.cross(&r2_a);
565 let rot_a = Matrix3::from_columns(&[r1_a, r2_a, r3_a]);
566
567 let gamma = (s1 + s2) * 0.5;
569 let tz = 1.0 / gamma;
570 let t_a = h.column(2) * tz;
571 let pose_a = Pose::new(rot_a, t_a);
572
573 let n_a = rot_a.column(2);
581 let n_b_raw = Vector3::new(-n_a.x, -n_a.y, n_a.z);
583 let n_b = if n_b_raw.norm_squared() > 1e-8 {
584 n_b_raw.normalize()
585 } else {
586 Vector3::z_axis().into_inner()
588 };
589
590 let h1_norm = h1.normalize();
594 let x_b_raw = h1_norm - n_b * h1_norm.dot(&n_b);
595 let x_b = if x_b_raw.norm_squared() > 1e-8 {
596 x_b_raw.normalize()
597 } else {
598 let tangent = if n_b.x.abs() > 0.9 {
600 Vector3::y_axis().into_inner()
601 } else {
602 Vector3::x_axis().into_inner()
603 };
604 tangent.cross(&n_b).normalize()
605 };
606
607 let y_b = n_b.cross(&x_b);
608 let rot_b = Matrix3::from_columns(&[x_b, y_b, n_b]);
609 let pose_b = Pose::new(rot_b, t_a);
610
611 Some([pose_a, pose_b])
612}
613
614pub(crate) fn centered_tag_corners(tag_size: f64) -> [Vector3<f64>; 4] {
617 let h = tag_size * 0.5;
618 [
619 Vector3::new(-h, -h, 0.0),
620 Vector3::new(h, -h, 0.0),
621 Vector3::new(h, h, 0.0),
622 Vector3::new(-h, h, 0.0),
623 ]
624}
625
626#[inline]
630fn project_with_distortion(p_cam: &Vector3<f64>, intrinsics: &CameraIntrinsics) -> [f64; 2] {
631 let z = p_cam.z.max(1e-4);
632 let xn = p_cam.x / z;
633 let yn = p_cam.y / z;
634 intrinsics.distort_normalized(xn, yn)
635}
636
637#[allow(clippy::too_many_lines)]
659fn refine_pose_lm(
660 intrinsics: &CameraIntrinsics,
661 corners: &[[f64; 2]; 4],
662 tag_size: f64,
663 initial_pose: Pose,
664 huber_delta_px: f64,
665) -> Pose {
666 let huber_delta = huber_delta_px;
667
668 let mut pose = initial_pose;
669 let obj_pts = centered_tag_corners(tag_size);
670
671 let mut lambda = 1e-3_f64;
673 let mut nu = 2.0_f64;
674 let mut current_cost = huber_cost(intrinsics, corners, &obj_pts, &pose, huber_delta);
675
676 for _ in 0..20 {
678 let mut jtj = Matrix6::<f64>::zeros();
681 let mut jtr = Vector6::<f64>::zeros();
682
683 for i in 0..4 {
684 let p_world = obj_pts[i];
685 let p_cam = pose.rotation * p_world + pose.translation;
686 let z = p_cam.z.max(1e-4);
687 let z_inv = 1.0 / z;
688 let xn = p_cam.x / z;
689 let yn = p_cam.y / z;
690
691 let [u_est, v_est] = intrinsics.distort_normalized(xn, yn);
694
695 let res_u = corners[i][0] - u_est;
696 let res_v = corners[i][1] - v_est;
697
698 let r_norm = (res_u * res_u + res_v * res_v).sqrt();
701 let w = if r_norm <= huber_delta {
702 1.0
703 } else {
704 huber_delta / r_norm
705 };
706
707 let jd = intrinsics.distortion_jacobian(xn, yn);
711
712 let du_dp = Vector3::new(
722 intrinsics.fx * jd[0][0] * z_inv,
723 intrinsics.fx * jd[0][1] * z_inv,
724 -intrinsics.fx * (jd[0][0] * xn + jd[0][1] * yn) * z_inv,
725 );
726 let dv_dp = Vector3::new(
727 intrinsics.fy * jd[1][0] * z_inv,
728 intrinsics.fy * jd[1][1] * z_inv,
729 -intrinsics.fy * (jd[1][0] * xn + jd[1][1] * yn) * z_inv,
730 );
731
732 let mut row_u = Vector6::zeros();
735 row_u[0] = du_dp[0];
736 row_u[1] = du_dp[1];
737 row_u[2] = du_dp[2];
738 row_u[3] = du_dp[1] * p_cam.z - du_dp[2] * p_cam.y;
739 row_u[4] = du_dp[2] * p_cam.x - du_dp[0] * p_cam.z;
740 row_u[5] = du_dp[0] * p_cam.y - du_dp[1] * p_cam.x;
741
742 let mut row_v = Vector6::zeros();
743 row_v[0] = dv_dp[0];
744 row_v[1] = dv_dp[1];
745 row_v[2] = dv_dp[2];
746 row_v[3] = dv_dp[1] * p_cam.z - dv_dp[2] * p_cam.y;
747 row_v[4] = dv_dp[2] * p_cam.x - dv_dp[0] * p_cam.z;
748 row_v[5] = dv_dp[0] * p_cam.y - dv_dp[1] * p_cam.x;
749
750 jtj += w * (row_u * row_u.transpose() + row_v * row_v.transpose());
752 jtr += w * (row_u * res_u + row_v * res_v);
753 }
754
755 if jtr.amax() < 1e-8 {
757 break;
758 }
759
760 let d_diag = Vector6::new(
764 jtj[(0, 0)].max(1e-8),
765 jtj[(1, 1)].max(1e-8),
766 jtj[(2, 2)].max(1e-8),
767 jtj[(3, 3)].max(1e-8),
768 jtj[(4, 4)].max(1e-8),
769 jtj[(5, 5)].max(1e-8),
770 );
771
772 let mut jtj_damped = jtj;
774 for k in 0..6 {
775 jtj_damped[(k, k)] += lambda * d_diag[k];
776 }
777
778 let delta = if let Some(chol) = jtj_damped.cholesky() {
779 chol.solve(&jtr)
780 } else {
781 lambda *= 10.0;
783 nu = 2.0;
784 continue;
785 };
786
787 let predicted_reduction = 0.5 * delta.dot(&(lambda * d_diag.component_mul(&delta) + jtr));
791
792 let twist = Vector3::new(delta[3], delta[4], delta[5]);
794 let trans_update = Vector3::new(delta[0], delta[1], delta[2]);
795 let rot_update = nalgebra::Rotation3::new(twist).matrix().into_owned();
796 let new_pose = Pose::new(
797 rot_update * pose.rotation,
798 rot_update * pose.translation + trans_update,
799 );
800
801 let new_cost = huber_cost(intrinsics, corners, &obj_pts, &new_pose, huber_delta);
802 let actual_reduction = current_cost - new_cost;
803
804 let rho = if predicted_reduction > 1e-12 {
805 actual_reduction / predicted_reduction
806 } else {
807 0.0
808 };
809
810 if rho > 0.0 {
811 pose = new_pose;
813 current_cost = new_cost;
814 lambda *= (1.0 - (2.0 * rho - 1.0).powi(3)).max(1.0 / 3.0);
816 nu = 2.0;
817
818 if delta.norm() < 1e-7 {
820 break;
821 }
822 } else {
823 lambda *= nu;
825 nu *= 2.0;
826 }
827 }
828
829 pose
830}
831
832fn huber_cost(
838 intrinsics: &CameraIntrinsics,
839 corners: &[[f64; 2]; 4],
840 obj_pts: &[Vector3<f64>; 4],
841 pose: &Pose,
842 delta: f64,
843) -> f64 {
844 let mut cost = 0.0;
845 for i in 0..4 {
846 let p_cam = pose.rotation * obj_pts[i] + pose.translation;
847 let p = project_with_distortion(&p_cam, intrinsics);
848 let r_u = corners[i][0] - p[0];
849 let r_v = corners[i][1] - p[1];
850 let r_norm = (r_u * r_u + r_v * r_v).sqrt();
852 if r_norm <= delta {
853 cost += 0.5 * r_norm * r_norm;
854 } else {
855 cost += delta * (r_norm - 0.5 * delta);
856 }
857 }
858 cost
859}
860
861fn reprojection_error(
862 intrinsics: &CameraIntrinsics,
863 corners: &[[f64; 2]; 4],
864 obj_pts: &[Vector3<f64>; 4],
865 pose: &Pose,
866) -> f64 {
867 let mut err_sq = 0.0;
868 for i in 0..4 {
869 let p_cam = pose.rotation * obj_pts[i] + pose.translation;
870 let p = project_with_distortion(&p_cam, intrinsics);
871 err_sq += (p[0] - corners[i][0]).powi(2) + (p[1] - corners[i][1]).powi(2);
872 }
873 err_sq
874}
875
876fn find_best_pose(
877 intrinsics: &CameraIntrinsics,
878 corners: &[[f64; 2]; 4],
879 tag_size: f64,
880 candidates: &[Pose; 2],
881) -> Pose {
882 let obj_pts = centered_tag_corners(tag_size);
883
884 let err0 = reprojection_error(intrinsics, corners, &obj_pts, &candidates[0]);
885 let err1 = reprojection_error(intrinsics, corners, &obj_pts, &candidates[1]);
886
887 if err1 < err0 {
889 candidates[1]
890 } else {
891 candidates[0]
892 }
893}
894
895#[tracing::instrument(skip_all, name = "pipeline::pose_refinement")]
900pub fn refine_poses_soa(
901 batch: &mut DetectionBatch,
902 v: usize,
903 intrinsics: &CameraIntrinsics,
904 tag_size: f64,
905 img: Option<&ImageView>,
906 mode: PoseEstimationMode,
907) {
908 refine_poses_soa_with_config(
909 batch,
910 v,
911 intrinsics,
912 tag_size,
913 img,
914 mode,
915 &crate::config::DetectorConfig::default(),
916 );
917}
918
919#[tracing::instrument(skip_all, name = "pipeline::pose_refinement")]
921pub fn refine_poses_soa_with_config(
922 batch: &mut DetectionBatch,
923 v: usize,
924 intrinsics: &CameraIntrinsics,
925 tag_size: f64,
926 img: Option<&ImageView>,
927 mode: PoseEstimationMode,
928 config: &crate::config::DetectorConfig,
929) {
930 use rayon::prelude::*;
931
932 let poses: Vec<_> = (0..v)
935 .into_par_iter()
936 .map(|i| {
937 let corners = [
938 [
939 f64::from(batch.corners[i][0].x),
940 f64::from(batch.corners[i][0].y),
941 ],
942 [
943 f64::from(batch.corners[i][1].x),
944 f64::from(batch.corners[i][1].y),
945 ],
946 [
947 f64::from(batch.corners[i][2].x),
948 f64::from(batch.corners[i][2].y),
949 ],
950 [
951 f64::from(batch.corners[i][3].x),
952 f64::from(batch.corners[i][3].y),
953 ],
954 ];
955
956 let mut ext_covs = [Matrix2::zeros(); 4];
959 let mut has_ext_covs = false;
960 #[allow(clippy::needless_range_loop)]
961 for j in 0..4 {
962 ext_covs[j] = Matrix2::new(
963 f64::from(batch.corner_covariances[i][j * 4]),
964 f64::from(batch.corner_covariances[i][j * 4 + 1]),
965 f64::from(batch.corner_covariances[i][j * 4 + 2]),
966 f64::from(batch.corner_covariances[i][j * 4 + 3]),
967 );
968 if ext_covs[j].norm_squared() > 1e-12 {
969 has_ext_covs = true;
970 }
971 }
972
973 let (pose_opt, _) = estimate_tag_pose_with_config(
974 intrinsics,
975 &corners,
976 tag_size,
977 img,
978 mode,
979 config,
980 if has_ext_covs { Some(&ext_covs) } else { None },
981 );
982
983 if let Some(pose) = pose_opt {
984 let rot = Rotation3::from_matrix_unchecked(pose.rotation);
994 let q = UnitQuaternion::from_rotation_matrix(&rot);
995 let t = pose.translation;
996
997 let mut data = [0.0f32; 7];
999 data[0] = t.x as f32;
1000 data[1] = t.y as f32;
1001 data[2] = t.z as f32;
1002 data[3] = q.coords.x as f32;
1003 data[4] = q.coords.y as f32;
1004 data[5] = q.coords.z as f32;
1005 data[6] = q.coords.w as f32;
1006 Some(data)
1007 } else {
1008 None
1009 }
1010 })
1011 .collect();
1012
1013 for (i, pose_data) in poses.into_iter().enumerate() {
1014 if let Some(data) = pose_data {
1015 batch.poses[i] = Pose6D { data, padding: 0.0 };
1016 } else {
1017 batch.poses[i] = Pose6D {
1018 data: [0.0; 7],
1019 padding: 0.0,
1020 };
1021 }
1022 }
1023}
1024
1025#[cfg(test)]
1026#[allow(clippy::expect_used, clippy::unwrap_used)]
1027mod tests {
1028 use super::*;
1029 use proptest::prelude::*;
1030
1031 #[test]
1032 fn test_pose_projection() {
1033 let intrinsics = CameraIntrinsics::new(500.0, 500.0, 320.0, 240.0);
1034 let rotation = Matrix3::identity();
1035 let translation = Vector3::new(0.0, 0.0, 2.0); let pose = Pose::new(rotation, translation);
1037
1038 let p_world = Vector3::new(0.1, 0.1, 0.0);
1039 let p_img = pose.project(&p_world, &intrinsics);
1040
1041 assert!((p_img[0] - 345.0).abs() < 1e-6);
1044 assert!((p_img[1] - 265.0).abs() < 1e-6);
1045 }
1046
1047 #[test]
1048 fn test_perfect_pose_estimation() {
1049 let intrinsics = CameraIntrinsics::new(800.0, 800.0, 400.0, 300.0);
1050 let gt_rot = Matrix3::identity(); let gt_t = Vector3::new(0.1, -0.2, 1.5);
1052 let gt_pose = Pose::new(gt_rot, gt_t);
1053
1054 let tag_size = 0.16; let obj_pts = centered_tag_corners(tag_size);
1056
1057 let mut img_pts = [[0.0, 0.0]; 4];
1058 for i in 0..4 {
1059 img_pts[i] = gt_pose.project(&obj_pts[i], &intrinsics);
1060 }
1061
1062 let (est_pose, _) = estimate_tag_pose(
1063 &intrinsics,
1064 &img_pts,
1065 tag_size,
1066 None,
1067 PoseEstimationMode::Fast,
1068 );
1069 let est_pose = est_pose.expect("Pose estimation failed");
1070
1071 assert!((est_pose.translation.x - gt_t.x).abs() < 1e-3);
1073 assert!((est_pose.translation.y - gt_t.y).abs() < 1e-3);
1074 assert!((est_pose.translation.z - gt_t.z).abs() < 1e-3);
1075
1076 let diff_rot = est_pose.rotation - gt_rot;
1078 assert!(diff_rot.norm() < 1e-3);
1079 }
1080
1081 proptest! {
1082 #[test]
1083 fn prop_intrinsics_inversion(
1084 fx in 100.0..2000.0f64,
1085 fy in 100.0..2000.0f64,
1086 cx in 0.0..1000.0f64,
1087 cy in 0.0..1000.0f64
1088 ) {
1089 let intrinsics = CameraIntrinsics::new(fx, fy, cx, cy);
1090 let k = intrinsics.as_matrix();
1091 let k_inv = intrinsics.inv_matrix();
1092 let identity = k * k_inv;
1093
1094 let expected = Matrix3::<f64>::identity();
1095 for i in 0..3 {
1096 for j in 0..3 {
1097 prop_assert!((identity[(i, j)] - expected[(i, j)]).abs() < 1e-9);
1098 }
1099 }
1100 }
1101
1102 #[test]
1103 fn prop_pose_recovery_stability(
1104 tx in -0.5..0.5f64,
1105 ty in -0.5..0.5f64,
1106 tz in 0.5..5.0f64, roll in -0.5..0.5f64,
1108 pitch in -0.5..0.5f64,
1109 yaw in -0.5..0.5f64,
1110 noise in 0.0..0.1f64 ) {
1112 let intrinsics = CameraIntrinsics::new(800.0, 800.0, 400.0, 300.0);
1113 let translation = Vector3::new(tx, ty, tz);
1114
1115 let r_obj = nalgebra::Rotation3::from_euler_angles(roll, pitch, yaw);
1117 let rotation = r_obj.matrix().into_owned();
1118 let gt_pose = Pose::new(rotation, translation);
1119
1120 let tag_size = 0.16;
1121 let obj_pts = centered_tag_corners(tag_size);
1122
1123 let mut img_pts = [[0.0, 0.0]; 4];
1124 for i in 0..4 {
1125 let p = gt_pose.project(&obj_pts[i], &intrinsics);
1126 img_pts[i] = [p[0] + noise, p[1] + noise];
1128 }
1129
1130 if let (Some(est_pose), _) = estimate_tag_pose(&intrinsics, &img_pts, tag_size, None, PoseEstimationMode::Fast) {
1131 let t_err = (est_pose.translation - translation).norm();
1134 prop_assert!(t_err < 0.1 + noise * 0.1, "Translation error {} too high for noise {}", t_err, noise);
1135
1136 let r_err = (est_pose.rotation - rotation).norm();
1137 prop_assert!(r_err < 0.1 + noise * 0.1, "Rotation error {} too high for noise {}", r_err, noise);
1138 }
1139 }
1140 }
1141}