1use crate::batch::{DetectionBatch, DetectionBatchView};
2use crate::config::DetectorConfig;
3use crate::decoder::{TagDecoder, family_to_decoder};
4use crate::error::DetectorError;
5use crate::image::ImageView;
6use bumpalo::Bump;
7
8pub struct FrameContext {
13 pub arena: Bump,
15 pub batch: DetectionBatch,
17 pub upscale_buf: Vec<u8>,
19}
20
21impl FrameContext {
22 #[must_use]
24 pub fn new() -> Self {
25 Self {
26 arena: Bump::new(),
27 batch: DetectionBatch::new(),
28 upscale_buf: Vec::new(),
29 }
30 }
31
32 pub fn reset(&mut self) {
34 self.arena.reset();
35 }
36}
37
38impl Default for FrameContext {
39 fn default() -> Self {
40 Self::new()
41 }
42}
43
44pub struct Detector {
51 engine: LocusEngine,
52 ctx: FrameContext,
53}
54
55impl Default for Detector {
56 fn default() -> Self {
57 Self::new()
58 }
59}
60
61impl Detector {
62 #[must_use]
64 pub fn new() -> Self {
65 Self::builder().build()
66 }
67
68 #[must_use]
70 pub fn builder() -> DetectorBuilder {
71 DetectorBuilder::new()
72 }
73
74 #[must_use]
76 pub fn with_config(config: DetectorConfig) -> Self {
77 Self::builder().with_config(config).build()
78 }
79
80 #[must_use]
82 pub fn state(&self) -> &FrameContext {
83 &self.ctx
84 }
85
86 #[must_use]
88 pub fn engine(&self) -> &LocusEngine {
89 &self.engine
90 }
91
92 pub fn detect(
101 &mut self,
102 img: &ImageView,
103 intrinsics: Option<&crate::pose::CameraIntrinsics>,
104 tag_size: Option<f64>,
105 pose_mode: crate::config::PoseEstimationMode,
106 debug_telemetry: bool,
107 ) -> Result<DetectionBatchView<'_>, DetectorError> {
108 self.engine.detect_with_context(
109 img,
110 &mut self.ctx,
111 intrinsics,
112 tag_size,
113 pose_mode,
114 debug_telemetry,
115 )
116 }
117
118 pub fn detect_concurrent(
125 &self,
126 frames: &[ImageView<'_>],
127 intrinsics: Option<&crate::pose::CameraIntrinsics>,
128 tag_size: Option<f64>,
129 pose_mode: crate::config::PoseEstimationMode,
130 ) -> Vec<Result<Vec<crate::Detection>, DetectorError>> {
131 self.engine
132 .detect_concurrent(frames, intrinsics, tag_size, pose_mode)
133 }
134
135 pub fn set_families(&mut self, families: &[crate::config::TagFamily]) {
137 self.engine.set_families(families);
138 }
139
140 #[must_use]
142 pub fn config(&self) -> DetectorConfig {
143 self.engine.config()
144 }
145
146 #[cfg(feature = "bench-internals")]
149 #[must_use]
150 pub fn bench_api_get_batch_cloned(&self) -> DetectionBatch {
151 let mut new_batch = DetectionBatch::new();
152 new_batch.corners.copy_from_slice(&self.ctx.batch.corners);
153 new_batch
154 .homographies
155 .copy_from_slice(&self.ctx.batch.homographies);
156 new_batch.ids.copy_from_slice(&self.ctx.batch.ids);
157 new_batch.payloads.copy_from_slice(&self.ctx.batch.payloads);
158 new_batch
159 .error_rates
160 .copy_from_slice(&self.ctx.batch.error_rates);
161 new_batch.poses.copy_from_slice(&self.ctx.batch.poses);
162 new_batch
163 .status_mask
164 .copy_from_slice(&self.ctx.batch.status_mask);
165 new_batch
166 .funnel_status
167 .copy_from_slice(&self.ctx.batch.funnel_status);
168 new_batch
169 .corner_covariances
170 .copy_from_slice(&self.ctx.batch.corner_covariances);
171 new_batch
172 }
173}
174
175#[allow(clippy::similar_names)]
181#[allow(clippy::too_many_lines)]
182#[allow(clippy::too_many_arguments)]
183fn run_detection_pipeline<'ctx>(
184 config: &DetectorConfig,
185 decoders: &[Box<dyn TagDecoder + Send + Sync>],
186 img: &ImageView,
187 ctx: &'ctx mut FrameContext,
188 intrinsics: Option<&crate::pose::CameraIntrinsics>,
189 tag_size: Option<f64>,
190 pose_mode: crate::config::PoseEstimationMode,
191 debug_telemetry: bool,
192) -> Result<DetectionBatchView<'ctx>, DetectorError> {
193 let has_distortion = intrinsics.is_some_and(|k| k.distortion.is_distorted());
194 if has_distortion && config.quad_extraction_mode == crate::config::QuadExtractionMode::EdLines {
195 return Err(DetectorError::Config(
196 crate::error::ConfigError::EdLinesUnsupportedWithDistortion,
197 ));
198 }
199
200 ctx.reset();
201 let state = ctx;
202
203 let (detection_img, _effective_scale, refinement_img) = if config.decimation > 1 {
204 let new_w = img.width / config.decimation;
205 let new_h = img.height / config.decimation;
206 let decimated_data = state.arena.alloc_slice_fill_copy(new_w * new_h, 0u8);
207 let decimated_img = img
208 .decimate_to(config.decimation, decimated_data)
209 .map_err(DetectorError::Preprocessing)?;
210 (decimated_img, 1.0 / config.decimation as f64, *img)
211 } else if config.upscale_factor > 1 {
212 let new_w = img.width * config.upscale_factor;
213 let new_h = img.height * config.upscale_factor;
214 state.upscale_buf.resize(new_w * new_h, 0);
215
216 let upscaled_img = img
217 .upscale_to(config.upscale_factor, &mut state.upscale_buf)
218 .map_err(DetectorError::Preprocessing)?;
219 (upscaled_img, config.upscale_factor as f64, upscaled_img)
220 } else {
221 (*img, 1.0, *img)
222 };
223
224 let img = &detection_img;
225
226 let sharpened_img = if config.enable_sharpening {
228 let sharpened = state
229 .arena
230 .alloc_slice_fill_copy(img.width * img.height, 0u8);
231 crate::filter::laplacian_sharpen(img, sharpened);
232
233 ImageView::new(sharpened, img.width, img.height, img.width)
234 .map_err(DetectorError::InvalidImage)?
235 } else {
236 *img
237 };
238
239 let binarized = state
240 .arena
241 .alloc_slice_fill_copy(img.width * img.height, 0u8);
242 let threshold_map = state
243 .arena
244 .alloc_slice_fill_copy(img.width * img.height, 0u8);
245
246 let (n, unrefined) = {
248 let engine = crate::threshold::ThresholdEngine::from_config(config);
249 let tile_stats = engine.compute_tile_stats(&state.arena, &sharpened_img);
250 engine.apply_threshold_with_map(
251 &state.arena,
252 &sharpened_img,
253 &tile_stats,
254 binarized,
255 threshold_map,
256 );
257
258 let label_result = crate::simd_ccl_fusion::label_components_lsl(
260 &state.arena,
261 &sharpened_img,
262 threshold_map,
263 config.segmentation_connectivity == crate::config::SegmentationConnectivity::Eight,
264 config.quad_min_area,
265 );
266
267 #[cfg(feature = "non_rectified")]
270 let (n, unrefined) = match intrinsics.map(|k| (k, k.distortion)) {
271 Some((k, crate::pose::DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 })) => {
272 let model = crate::camera::BrownConradyModel { k1, k2, p1, p2, k3 };
273 crate::quad::extract_quads_soa_with_camera(
274 &mut state.batch,
275 &sharpened_img,
276 &label_result,
277 config,
278 config.decimation,
279 &refinement_img,
280 debug_telemetry,
281 &model,
282 k,
283 )
284 },
285 Some((k, crate::pose::DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 })) => {
286 let model = crate::camera::KannalaBrandtModel { k1, k2, k3, k4 };
287 crate::quad::extract_quads_soa_with_camera(
288 &mut state.batch,
289 &sharpened_img,
290 &label_result,
291 config,
292 config.decimation,
293 &refinement_img,
294 debug_telemetry,
295 &model,
296 k,
297 )
298 },
299 _ => crate::quad::extract_quads_soa(
300 &mut state.batch,
301 &sharpened_img,
302 &label_result,
303 config,
304 config.decimation,
305 &refinement_img,
306 debug_telemetry,
307 ),
308 };
309 #[cfg(not(feature = "non_rectified"))]
310 let (n, unrefined) = crate::quad::extract_quads_soa(
311 &mut state.batch,
312 &sharpened_img,
313 &label_result,
314 config,
315 config.decimation,
316 &refinement_img,
317 debug_telemetry,
318 );
319
320 if !has_distortion {
326 crate::funnel::apply_funnel_gate(
327 &mut state.batch,
328 n,
329 &sharpened_img,
330 &tile_stats,
331 config.threshold_tile_size,
332 config.decoder_min_contrast,
333 1.0 / config.decimation as f64,
334 );
335 }
336
337 (n, unrefined)
338 };
339
340 let mut jitter_ptr = std::ptr::null();
342 let mut num_jitter = 0;
343 if let (true, Some(unrefined_pts)) = (debug_telemetry, unrefined) {
344 num_jitter = unrefined_pts.len();
345 let jitter = state.arena.alloc_slice_fill_copy(num_jitter * 8, 0.0f32);
347 for (i, unrefined_corners) in unrefined_pts.iter().enumerate() {
348 for (j, unrefined_corner) in unrefined_corners.iter().enumerate() {
349 let dx = state.batch.corners[i][j].x - unrefined_corner.x as f32;
350 let dy = state.batch.corners[i][j].y - unrefined_corner.y as f32;
351 jitter[i * 8 + j * 2] = dx;
352 jitter[i * 8 + j * 2 + 1] = dy;
353 }
354 }
355 jitter_ptr = jitter.as_ptr();
356 }
357
358 crate::decoder::compute_homographies_soa(
360 &state.batch.corners[0..n],
361 &state.batch.status_mask[0..n],
362 &mut state.batch.homographies[0..n],
363 );
364
365 let mut gwlf_fallback_count = 0;
367 let mut gwlf_avg_delta = 0.0f32;
368 if config.refinement_mode == crate::config::CornerRefinementMode::Gwlf {
369 let mut total_delta = 0.0f32;
370 let mut count = 0;
371 for i in 0..n {
372 let coarse = [
373 [state.batch.corners[i][0].x, state.batch.corners[i][0].y],
374 [state.batch.corners[i][1].x, state.batch.corners[i][1].y],
375 [state.batch.corners[i][2].x, state.batch.corners[i][2].y],
376 [state.batch.corners[i][3].x, state.batch.corners[i][3].y],
377 ];
378 if let Some((refined, covs)) = crate::gwlf::refine_quad_gwlf_with_cov(
379 &refinement_img,
380 &coarse,
381 config.gwlf_transversal_alpha,
382 ) {
383 for j in 0..4 {
384 let dx = refined[j][0] - coarse[j][0];
385 let dy = refined[j][1] - coarse[j][1];
386 total_delta += (dx * dx + dy * dy).sqrt();
387 count += 1;
388
389 state.batch.corners[i][j].x = refined[j][0];
390 state.batch.corners[i][j].y = refined[j][1];
391
392 state.batch.corner_covariances[i][j * 4] = covs[j][(0, 0)] as f32;
394 state.batch.corner_covariances[i][j * 4 + 1] = covs[j][(0, 1)] as f32;
395 state.batch.corner_covariances[i][j * 4 + 2] = covs[j][(1, 0)] as f32;
396 state.batch.corner_covariances[i][j * 4 + 3] = covs[j][(1, 1)] as f32;
397 }
398 } else {
399 gwlf_fallback_count += 1;
400 }
401 }
402 if count > 0 {
403 gwlf_avg_delta = total_delta / count as f32;
404 }
405
406 crate::decoder::compute_homographies_soa(
408 &state.batch.corners[0..n],
409 &state.batch.status_mask[0..n],
410 &mut state.batch.homographies[0..n],
411 );
412 }
413
414 match intrinsics.map(|k| &k.distortion) {
418 #[cfg(feature = "non_rectified")]
419 Some(crate::pose::DistortionCoeffs::BrownConrady { k1, k2, p1, p2, k3 }) => {
420 let model = crate::camera::BrownConradyModel {
421 k1: *k1,
422 k2: *k2,
423 p1: *p1,
424 p2: *p2,
425 k3: *k3,
426 };
427 crate::decoder::decode_batch_soa_with_camera(
428 &mut state.batch,
429 n,
430 &refinement_img,
431 decoders,
432 config,
433 intrinsics,
434 &model,
435 );
436 },
437 #[cfg(feature = "non_rectified")]
438 Some(crate::pose::DistortionCoeffs::KannalaBrandt { k1, k2, k3, k4 }) => {
439 let model = crate::camera::KannalaBrandtModel {
440 k1: *k1,
441 k2: *k2,
442 k3: *k3,
443 k4: *k4,
444 };
445 crate::decoder::decode_batch_soa_with_camera(
446 &mut state.batch,
447 n,
448 &refinement_img,
449 decoders,
450 config,
451 intrinsics,
452 &model,
453 );
454 },
455 _ => {
456 crate::decoder::decode_batch_soa(
458 &mut state.batch,
459 n,
460 &refinement_img,
461 decoders,
462 config,
463 );
464 },
465 }
466
467 let v = state.batch.partition(n);
469
470 let (repro_errors_ptr, num_repro) = run_pose_refinement(
472 &mut state.batch,
473 &state.arena,
474 v,
475 intrinsics,
476 tag_size,
477 &refinement_img,
478 pose_mode,
479 config,
480 debug_telemetry,
481 );
482
483 let telemetry = if debug_telemetry {
487 Some(crate::batch::TelemetryPayload {
488 binarized_ptr: binarized.as_ptr(),
489 threshold_map_ptr: threshold_map.as_ptr(),
490 subpixel_jitter_ptr: jitter_ptr,
491 num_jitter,
492 reprojection_errors_ptr: repro_errors_ptr,
493 num_reprojection: num_repro,
494 gwlf_fallback_count,
495 gwlf_avg_delta,
496 width: img.width,
497 height: img.height,
498 stride: img.width,
499 })
500 } else {
501 None
502 };
503
504 Ok(state.batch.view_with_telemetry(v, n, telemetry))
505}
506
507#[allow(clippy::too_many_arguments)]
511fn run_pose_refinement(
512 batch: &mut crate::batch::DetectionBatch,
513 arena: &bumpalo::Bump,
514 v: usize,
515 intrinsics: Option<&crate::pose::CameraIntrinsics>,
516 tag_size: Option<f64>,
517 refinement_img: &ImageView,
518 pose_mode: crate::config::PoseEstimationMode,
519 config: &DetectorConfig,
520 debug_telemetry: bool,
521) -> (*const f32, usize) {
522 let mut repro_errors_ptr = std::ptr::null();
523 let mut num_repro = 0;
524
525 if let (Some(intr), Some(size)) = (intrinsics, tag_size) {
526 crate::pose::refine_poses_soa_with_config(
527 batch,
528 v,
529 intr,
530 size,
531 Some(refinement_img),
532 pose_mode,
533 config,
534 );
535
536 if debug_telemetry {
537 num_repro = v;
538 let repro_errors = arena.alloc_slice_fill_copy(num_repro, 0.0f32);
539
540 let model_pts = [
541 nalgebra::Vector3::new(0.0, 0.0, 0.0),
542 nalgebra::Vector3::new(size, 0.0, 0.0),
543 nalgebra::Vector3::new(size, size, 0.0),
544 nalgebra::Vector3::new(0.0, size, 0.0),
545 ];
546
547 for (i, repro_error) in repro_errors.iter_mut().enumerate().take(v) {
548 let det_pose_data = batch.poses[i].data;
549 let det_t = nalgebra::Vector3::new(
550 f64::from(det_pose_data[0]),
551 f64::from(det_pose_data[1]),
552 f64::from(det_pose_data[2]),
553 );
554 let det_q = nalgebra::UnitQuaternion::from_quaternion(nalgebra::Quaternion::new(
555 f64::from(det_pose_data[6]),
556 f64::from(det_pose_data[3]),
557 f64::from(det_pose_data[4]),
558 f64::from(det_pose_data[5]),
559 ));
560
561 let pose = crate::pose::Pose {
562 rotation: *det_q.to_rotation_matrix().matrix(),
563 translation: det_t,
564 };
565
566 let mut sum_sq_err = 0.0;
567 for (j, model_pt) in model_pts.iter().enumerate() {
568 let proj = pose.project(model_pt, intr);
569 let dx = proj[0] - f64::from(batch.corners[i][j].x);
570 let dy = proj[1] - f64::from(batch.corners[i][j].y);
571 sum_sq_err += dx * dx + dy * dy;
572 }
573 *repro_error = (sum_sq_err / 4.0).sqrt() as f32;
574 }
575 repro_errors_ptr = repro_errors.as_ptr();
576 }
577 }
578
579 (repro_errors_ptr, num_repro)
580}
581
582pub struct DetectorBuilder {
584 config: DetectorConfig,
585 families: Vec<crate::config::TagFamily>,
586 max_concurrent_frames: usize,
587}
588
589impl DetectorBuilder {
590 #[must_use]
592 pub fn new() -> Self {
593 Self {
594 config: DetectorConfig::default(),
595 families: Vec::new(),
596 max_concurrent_frames: 1,
597 }
598 }
599
600 #[must_use]
602 pub fn with_config(mut self, config: DetectorConfig) -> Self {
603 self.config = config;
604 self
605 }
606
607 #[must_use]
609 pub fn with_decimation(mut self, decimation: usize) -> Self {
610 self.config.decimation = decimation;
611 self
612 }
613
614 #[must_use]
616 pub fn with_family(mut self, family: crate::config::TagFamily) -> Self {
617 if !self.families.contains(&family) {
618 self.families.push(family);
619 }
620 self
621 }
622
623 #[must_use]
625 pub fn with_threads(mut self, threads: usize) -> Self {
626 self.config.nthreads = threads;
627 self
628 }
629
630 #[must_use]
632 pub fn with_upscale_factor(mut self, factor: usize) -> Self {
633 self.config.upscale_factor = factor;
634 self
635 }
636
637 #[must_use]
639 pub fn with_corner_refinement(mut self, mode: crate::config::CornerRefinementMode) -> Self {
640 self.config.refinement_mode = mode;
641 self
642 }
643
644 #[must_use]
646 pub fn with_decode_mode(mut self, mode: crate::config::DecodeMode) -> Self {
647 self.config.decode_mode = mode;
648 self
649 }
650
651 #[must_use]
653 pub fn with_connectivity(
654 mut self,
655 connectivity: crate::config::SegmentationConnectivity,
656 ) -> Self {
657 self.config.segmentation_connectivity = connectivity;
658 self
659 }
660
661 #[must_use]
663 pub fn with_threshold_tile_size(mut self, size: usize) -> Self {
664 self.config.threshold_tile_size = size;
665 self
666 }
667
668 #[must_use]
670 pub fn with_threshold_min_range(mut self, range: u8) -> Self {
671 self.config.threshold_min_range = range;
672 self
673 }
674
675 #[must_use]
677 pub fn with_adaptive_threshold_constant(mut self, c: i16) -> Self {
678 self.config.adaptive_threshold_constant = c;
679 self
680 }
681
682 #[must_use]
684 pub fn with_quad_min_area(mut self, area: u32) -> Self {
685 self.config.quad_min_area = area;
686 self
687 }
688
689 #[must_use]
691 pub fn with_quad_min_fill_ratio(mut self, ratio: f32) -> Self {
692 self.config.quad_min_fill_ratio = ratio;
693 self
694 }
695
696 #[must_use]
698 pub fn with_quad_min_edge_score(mut self, score: f64) -> Self {
699 self.config.quad_min_edge_score = score;
700 self
701 }
702
703 #[must_use]
705 pub fn with_max_hamming_error(mut self, errors: u32) -> Self {
706 self.config.max_hamming_error = errors;
707 self
708 }
709
710 #[must_use]
712 pub fn with_decoder_min_contrast(mut self, contrast: f64) -> Self {
713 self.config.decoder_min_contrast = contrast;
714 self
715 }
716
717 #[must_use]
719 pub fn with_gwlf_transversal_alpha(mut self, alpha: f64) -> Self {
720 self.config.gwlf_transversal_alpha = alpha;
721 self
722 }
723
724 #[must_use]
726 pub fn with_quad_max_elongation(mut self, elongation: f64) -> Self {
727 self.config.quad_max_elongation = elongation;
728 self
729 }
730
731 #[must_use]
733 pub fn with_quad_min_density(mut self, density: f64) -> Self {
734 self.config.quad_min_density = density;
735 self
736 }
737
738 #[must_use]
740 pub fn with_quad_extraction_mode(mut self, mode: crate::config::QuadExtractionMode) -> Self {
741 self.config.quad_extraction_mode = mode;
742 self
743 }
744
745 #[must_use]
747 pub fn with_sharpening(mut self, enable: bool) -> Self {
748 self.config.enable_sharpening = enable;
749 self
750 }
751
752 #[must_use]
759 pub fn build(self) -> Detector {
760 let pool_size = self.max_concurrent_frames.max(1);
761 let decoders = self.build_decoders();
762 let engine = LocusEngine::new(self.config, decoders, pool_size);
763 Detector {
764 engine,
765 ctx: FrameContext::new(),
766 }
767 }
768
769 pub fn validated_build(self) -> Result<Detector, crate::error::ConfigError> {
776 self.config.validate()?;
777 Ok(self.build())
778 }
779
780 #[must_use]
787 pub fn with_max_concurrent_frames(mut self, n: usize) -> Self {
788 self.max_concurrent_frames = n.max(1);
789 self
790 }
791
792 #[must_use]
799 pub fn build_engine(self) -> LocusEngine {
800 let pool_size = if self.max_concurrent_frames == 0 {
801 rayon::current_num_threads()
802 } else {
803 self.max_concurrent_frames
804 };
805 let decoders = self.build_decoders();
806 LocusEngine::new(self.config, decoders, pool_size)
807 }
808
809 fn build_decoders(&self) -> Vec<Box<dyn TagDecoder + Send + Sync>> {
810 let families = if self.families.is_empty() {
811 vec![crate::config::TagFamily::AprilTag36h11]
812 } else {
813 self.families.clone()
814 };
815 families.into_iter().map(family_to_decoder).collect()
816 }
817}
818
819impl Default for DetectorBuilder {
820 fn default() -> Self {
821 Self::new()
822 }
823}
824
825pub struct LocusEngine {
844 config: DetectorConfig,
845 decoders: Vec<Box<dyn TagDecoder + Send + Sync>>,
846 pool: crossbeam_queue::ArrayQueue<Box<FrameContext>>,
848}
849
850impl LocusEngine {
851 #[must_use]
856 pub fn new(
857 config: DetectorConfig,
858 decoders: Vec<Box<dyn TagDecoder + Send + Sync>>,
859 pool_size: usize,
860 ) -> Self {
861 let capacity = pool_size.max(1);
862 let pool = crossbeam_queue::ArrayQueue::new(capacity);
863 for _ in 0..pool_size {
864 let _ = pool.push(Box::new(FrameContext::new()));
866 }
867 Self {
868 config,
869 decoders,
870 pool,
871 }
872 }
873
874 pub fn detect_with_context<'ctx>(
883 &self,
884 img: &ImageView,
885 ctx: &'ctx mut FrameContext,
886 intrinsics: Option<&crate::pose::CameraIntrinsics>,
887 tag_size: Option<f64>,
888 pose_mode: crate::config::PoseEstimationMode,
889 debug_telemetry: bool,
890 ) -> Result<DetectionBatchView<'ctx>, DetectorError> {
891 run_detection_pipeline(
892 &self.config,
893 &self.decoders,
894 img,
895 ctx,
896 intrinsics,
897 tag_size,
898 pose_mode,
899 debug_telemetry,
900 )
901 }
902
903 pub fn detect_concurrent(
912 &self,
913 frames: &[ImageView<'_>],
914 intrinsics: Option<&crate::pose::CameraIntrinsics>,
915 tag_size: Option<f64>,
916 pose_mode: crate::config::PoseEstimationMode,
917 ) -> Vec<Result<Vec<crate::Detection>, DetectorError>> {
918 use rayon::prelude::*;
919 frames
920 .par_iter()
921 .map(|img| {
922 let (mut ctx, to_pool) = if let Some(c) = self.pool.pop() {
924 (c, true)
925 } else {
926 (Box::new(FrameContext::new()), false)
927 };
928
929 let owned = run_detection_pipeline(
930 &self.config,
931 &self.decoders,
932 img,
933 &mut ctx,
934 intrinsics,
935 tag_size,
936 pose_mode,
937 false, )
939 .map(|view| {
940 view.reassemble_owned()
944 });
945
946 if to_pool {
947 let _ = self.pool.push(ctx);
948 }
949 owned
950 })
951 .collect()
952 }
953
954 pub fn set_families(&mut self, families: &[crate::config::TagFamily]) {
956 self.decoders.clear();
957 for &family in families {
958 self.decoders
959 .push(crate::decoder::family_to_decoder(family));
960 }
961 }
962
963 #[must_use]
965 pub fn config(&self) -> DetectorConfig {
966 self.config
967 }
968}
969
970#[cfg(all(test, feature = "non_rectified"))]
971#[allow(clippy::expect_used, clippy::unwrap_used)]
972mod tests {
973 use super::*;
974 use crate::config::{CornerRefinementMode, DecodeMode, PoseEstimationMode, QuadExtractionMode};
975 use crate::error::ConfigError;
976 use crate::pose::CameraIntrinsics;
977
978 #[test]
979 fn edlines_with_distortion_is_rejected_at_detect_time() {
980 let config = DetectorConfig::builder()
981 .quad_extraction_mode(QuadExtractionMode::EdLines)
982 .refinement_mode(CornerRefinementMode::None)
983 .decode_mode(DecodeMode::Hard)
984 .build();
985
986 let mut detector = Detector::with_config(config);
987 let pixels = vec![0u8; 64 * 64];
988 let img = ImageView::new(&pixels, 64, 64, 64).expect("valid view");
989 let intrinsics = CameraIntrinsics::with_brown_conrady(
990 800.0, 800.0, 32.0, 32.0, -0.3, 0.1, 0.001, -0.002, 0.0,
991 );
992
993 let err = detector
994 .detect(
995 &img,
996 Some(&intrinsics),
997 None,
998 PoseEstimationMode::Fast,
999 false,
1000 )
1001 .expect_err("distorted EdLines must fail");
1002
1003 assert!(
1004 matches!(
1005 err,
1006 DetectorError::Config(ConfigError::EdLinesUnsupportedWithDistortion)
1007 ),
1008 "expected EdLinesUnsupportedWithDistortion, got {err:?}"
1009 );
1010 }
1011
1012 #[test]
1013 fn edlines_with_pinhole_is_accepted() {
1014 let config = DetectorConfig::builder()
1015 .quad_extraction_mode(QuadExtractionMode::EdLines)
1016 .refinement_mode(CornerRefinementMode::None)
1017 .decode_mode(DecodeMode::Hard)
1018 .build();
1019
1020 let mut detector = Detector::with_config(config);
1021 let pixels = vec![0u8; 64 * 64];
1022 let img = ImageView::new(&pixels, 64, 64, 64).expect("valid view");
1023 let intrinsics = CameraIntrinsics::new(800.0, 800.0, 32.0, 32.0);
1024
1025 detector
1026 .detect(
1027 &img,
1028 Some(&intrinsics),
1029 None,
1030 PoseEstimationMode::Fast,
1031 false,
1032 )
1033 .expect("edlines with pinhole intrinsics must succeed");
1034 }
1035}