1use nalgebra::{
2 allocator::Allocator,
3 base::storage::{Owned, Storage},
4 convert,
5 geometry::{Point3, Rotation3, UnitQuaternion},
6 DefaultAllocator, Dim, Matrix, Matrix3, RealField, SMatrix, Vector3, U1, U2, U3, U4,
7};
8
9#[cfg(feature = "serde-serialize")]
10use serde::{Deserialize, Serialize};
11
12use crate::{
13 coordinate_system::WorldFrame,
14 intrinsics_perspective::{IntrinsicParametersPerspective, PerspectiveParams},
15 Bundle, Error, ExtrinsicParameters, IntrinsicParameters, Pixels, Points, RayBundle,
16};
17
18#[derive(Debug, Clone, PartialEq)]
71#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
72pub struct Camera<R, I>
73where
74 I: IntrinsicParameters<R>,
75 R: RealField,
76{
77 intrinsics: I,
78 extrinsics: ExtrinsicParameters<R>,
79}
80
81impl<R, I> Camera<R, I>
82where
83 I: IntrinsicParameters<R>,
84 R: RealField,
85{
86 #[inline]
91 pub fn new(intrinsics: I, extrinsics: ExtrinsicParameters<R>) -> Self {
92 Self {
93 intrinsics,
94 extrinsics,
95 }
96 }
97
98 #[inline]
100 pub fn extrinsics(&self) -> &ExtrinsicParameters<R> {
101 &self.extrinsics
102 }
103
104 #[inline]
106 pub fn intrinsics(&self) -> &I {
107 &self.intrinsics
108 }
109
110 pub fn world_to_pixel<NPTS, InStorage>(
112 &self,
113 world: &Points<WorldFrame, R, NPTS, InStorage>,
114 ) -> Pixels<R, NPTS, Owned<R, NPTS, U2>>
115 where
116 NPTS: Dim,
117 InStorage: Storage<R, NPTS, U3>,
118 DefaultAllocator: Allocator<NPTS, U3>,
119 DefaultAllocator: Allocator<NPTS, U2>,
120 {
121 let camera_frame = self.extrinsics.world_to_camera(world);
122 self.intrinsics.camera_to_pixel(&camera_frame)
123 }
124
125 pub fn pixel_to_world<IN, NPTS>(
134 &self,
135 pixels: &Pixels<R, NPTS, IN>,
136 ) -> RayBundle<WorldFrame, I::BundleType, R, NPTS, Owned<R, NPTS, U3>>
137 where
138 I::BundleType: Bundle<R>,
139 IN: Storage<R, NPTS, U2>,
140 NPTS: Dim,
141 I::BundleType: Bundle<R>,
142 DefaultAllocator: Allocator<U1, U2>,
143 DefaultAllocator: Allocator<NPTS, U2>,
144 DefaultAllocator: Allocator<NPTS, U3>,
145 {
146 let camera = self.intrinsics.pixel_to_camera(pixels);
148
149 self.extrinsics.ray_camera_to_world(&camera)
151 }
152}
153
154impl<R: RealField> Camera<R, IntrinsicParametersPerspective<R>> {
155 pub fn from_perspective_matrix<S>(pmat: &Matrix<R, U3, U4, S>) -> Result<Self, Error>
157 where
158 S: Storage<R, U3, U4> + Clone,
159 {
160 let m = pmat.clone().remove_column(3);
161 let (rquat, k) = rq_decomposition(m)?;
162
163 let k22: R = k[(2, 2)].clone();
164
165 let one: R = convert(1.0);
166
167 let k = k * (one / k22); let params = PerspectiveParams {
170 fx: k[(0, 0)].clone(),
171 fy: k[(1, 1)].clone(),
172 skew: k[(0, 1)].clone(),
173 cx: k[(0, 2)].clone(),
174 cy: k[(1, 2)].clone(),
175 };
176
177 let camcenter = pmat2cam_center(pmat);
178 let extrinsics = ExtrinsicParameters::from_rotation_and_camcenter(rquat, camcenter);
179
180 Ok(Self::new(params.into(), extrinsics))
181 }
182
183 pub fn as_camera_matrix(&self) -> SMatrix<R, 3, 4> {
185 let m = {
186 let p33 = self.intrinsics().as_intrinsics_matrix();
187 p33 * self.extrinsics().cache.qt.clone()
188 };
189
190 let m = if m[(0, 0)] < nalgebra::convert(0.0) {
192 -m
193 } else {
194 m
195 };
196
197 m.clone() / m[(2, 3)].clone() }
199}
200
201#[cfg(test)]
202pub fn roundtrip_camera<R, I>(
203 cam: Camera<R, I>,
204 width: usize,
205 height: usize,
206 step: usize,
207 border: usize,
208 eps: R,
209) where
210 R: RealField,
211 I: IntrinsicParameters<R>,
212 I::BundleType: Bundle<R>,
213{
214 let pixels = crate::intrinsic_test_utils::generate_uv_raw(width, height, step, border);
215
216 let world_coords = cam.pixel_to_world(&pixels);
217 let world_coords_points = world_coords.point_on_ray();
218
219 let pixel_actual = cam.world_to_pixel(&world_coords_points);
221 approx::assert_abs_diff_eq!(pixels.data, pixel_actual.data, epsilon = convert(eps));
222}
223
224#[allow(non_snake_case)]
225fn rq<R: RealField>(A: Matrix3<R>) -> (Matrix3<R>, Matrix3<R>) {
226 let zero: R = convert(0.0);
227 let one: R = convert(1.0);
228
229 #[rustfmt::skip]
231 let P = Matrix3::<R>::new(
232 zero.clone(), zero.clone(), one.clone(), zero.clone(), one.clone(), zero.clone(), one, zero.clone(), zero, );
236 let Atilde = P.clone() * A;
237
238 let (Qtilde, Rtilde) = {
239 let qrm = nalgebra::linalg::QR::new(Atilde.transpose());
240 (qrm.q(), qrm.r())
241 };
242 let Q = P.clone() * Qtilde.transpose();
243 let R = P.clone() * Rtilde.transpose() * P;
244 (R, Q)
245}
246
247fn rq_decomposition<R: RealField>(
249 orig: Matrix3<R>,
250) -> Result<(UnitQuaternion<R>, Matrix3<R>), Error> {
251 let (mut intrin, mut q) = rq(orig);
253
254 let zero: R = convert(0.0);
256 for i in 0..3 {
257 if intrin[(i, i)] < zero {
258 for j in 0..3 {
259 intrin[(j, i)] = -intrin[(j, i)].clone();
260 q[(i, j)] = -q[(i, j)].clone();
261 }
262 }
263 }
264
265 let r1 = q.clone(); let r2 = -q; if r1.determinant() > r2.determinant() {
274 let intrin1 = intrin;
275 let rotmat1 = Rotation3::from_matrix_unchecked(r1);
276 let rquat1 = UnitQuaternion::from_rotation_matrix(&rotmat1);
277 Ok((rquat1, intrin1))
278 } else {
279 let intrin2 = -intrin;
280 let rotmat2 = Rotation3::from_matrix_unchecked(r2);
281 let rquat2 = UnitQuaternion::from_rotation_matrix(&rotmat2);
282 Ok((rquat2, intrin2))
283 }
284}
285
286#[allow(clippy::many_single_char_names)]
288fn pmat2cam_center<R, S>(p: &Matrix<R, U3, U4, S>) -> Point3<R>
289where
290 R: RealField,
291 S: Storage<R, U3, U4> + Clone,
292{
293 let x = p.clone().remove_column(0).determinant();
294 let y = -p.clone().remove_column(1).determinant();
295 let z = p.clone().remove_column(2).determinant();
296 let w = -p.clone().remove_column(3).determinant();
297 Point3::from(Vector3::new(x / w.clone(), y / w.clone(), z / w))
298}
299
300#[cfg(test)]
301mod tests {
302 #[test]
303 #[cfg(feature = "serde-serialize")]
304 fn test_serde() {
305 use nalgebra::{Unit, Vector3};
306
307 use super::PerspectiveParams;
308 use crate::{Camera, ExtrinsicParameters, IntrinsicParametersPerspective};
309
310 let params = PerspectiveParams {
311 fx: 100.0,
312 fy: 102.0,
313 skew: 0.1,
314 cx: 321.0,
315 cy: 239.9,
316 };
317
318 let intrinsics: IntrinsicParametersPerspective<_> = params.into();
319
320 let camcenter = Vector3::new(10.0, 0.0, 10.0);
321 let lookat = Vector3::new(0.0, 0.0, 0.0);
322 let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0));
323 let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up);
324
325 let expected = Camera::new(intrinsics, pose);
326
327 let buf = serde_json::to_string(&expected).unwrap();
328 let actual: Camera<_, _> = serde_json::from_str(&buf).unwrap();
329 assert!(expected == actual);
330 }
331
332 #[test]
333 fn test_rotation_rh() {
334 use super::Camera;
335
336 let image_points = [
338 [102.92784, 141.48114],
339 [250.00058, 141.4969],
340 [397.07214, 141.48114],
341 [141.12552, 247.49763],
342 [250.0, 247.50888],
343 [358.8746, 247.4984],
344 [163.47325, 309.60333],
345 [250.0, 309.61908],
346 [336.52673, 309.60333],
347 ];
348 let object_points = [
349 [140., 140., 0.],
350 [500., 140., 0.],
351 [860., 140., 0.],
352 [140., 500., 0.],
353 [500., 500., 0.],
354 [860., 500., 0.],
355 [140., 860., 0.],
356 [500., 860., 0.],
357 [860., 860., 0.],
358 ];
359 let corresponding_points = std::iter::zip(image_points, object_points)
360 .map(|(image_point, object_point)| dlt::CorrespondingPoint {
361 object_point,
362 image_point,
363 })
364 .collect::<Vec<_>>();
365 let pmat = dlt::dlt_corresponding(&corresponding_points, 1e-10).unwrap();
366 let cam_f32 = Camera::from_perspective_matrix(&pmat.cast::<f32>()).unwrap();
367 println!("cam_f32 = {cam_f32:?}");
368 let cam_f64 = Camera::from_perspective_matrix(&pmat).unwrap();
369 println!("cam_f64 = {cam_f64:?}");
370 }
371}