Skip to main content

oar_ocr_core/models/detection/
db.rs

1//! DB (Differentiable Binarization) Model
2//!
3//! This module provides a pure implementation of the DB text detection model.
4//! The model handles preprocessing, inference, and postprocessing independently of tasks.
5
6use crate::core::inference::OrtInfer;
7use crate::core::{OCRError, Tensor4D, validate_positive, validate_range};
8use crate::processors::{
9    BoundingBox, BoxType, DBPostProcess, DBPostProcessConfig, DetResizeForTest, ImageScaleInfo,
10    LimitType, NormalizeImage, ScoreMode, TensorLayout,
11};
12use image::{DynamicImage, RgbImage};
13use std::path::Path;
14use tracing::debug;
15
16/// Configuration for DB model preprocessing.
17#[derive(Debug, Clone, Default)]
18pub struct DBPreprocessConfig {
19    /// Limit for the side length of the image
20    pub limit_side_len: Option<u32>,
21    /// Type of limit to apply
22    pub limit_type: Option<LimitType>,
23    /// Maximum side limit for the image
24    pub max_side_limit: Option<u32>,
25    /// Resize long dimension (alternative to limit_side_len)
26    pub resize_long: Option<u32>,
27}
28
29/// Configuration for DB model postprocessing.
30#[derive(Debug, Clone)]
31pub struct DBPostprocessConfig {
32    /// Pixel-level threshold for text detection
33    pub score_threshold: f32,
34    /// Box-level threshold for filtering detections
35    pub box_threshold: f32,
36    /// Expansion ratio for detected regions using Vatti clipping
37    pub unclip_ratio: f32,
38    /// Maximum number of candidate detections
39    pub max_candidates: usize,
40    /// Whether to use dilation
41    pub use_dilation: bool,
42    /// Score calculation mode
43    pub score_mode: ScoreMode,
44    /// Type of bounding box (Quad or Poly)
45    pub box_type: BoxType,
46}
47
48impl Default for DBPostprocessConfig {
49    fn default() -> Self {
50        Self {
51            score_threshold: 0.3,
52            box_threshold: 0.7,
53            unclip_ratio: 1.5,
54            max_candidates: 1000,
55            use_dilation: false,
56            score_mode: ScoreMode::Fast,
57            box_type: BoxType::Quad,
58        }
59    }
60}
61
62impl DBPostprocessConfig {
63    /// Validates the configuration parameters.
64    pub fn validate(&self) -> Result<(), OCRError> {
65        // Validate score_threshold is in [0, 1]
66        validate_range(self.score_threshold, 0.0, 1.0, "score_threshold")?;
67
68        // Validate box_threshold is in [0, 1]
69        validate_range(self.box_threshold, 0.0, 1.0, "box_threshold")?;
70
71        // Validate unclip_ratio is positive
72        validate_positive(self.unclip_ratio, "unclip_ratio")?;
73
74        // Validate max_candidates is positive
75        validate_positive(self.max_candidates, "max_candidates")?;
76
77        Ok(())
78    }
79}
80
81/// DB model output containing bounding boxes and confidence scores.
82#[derive(Debug, Clone)]
83pub struct DBModelOutput {
84    /// Detected bounding boxes for each image in the batch
85    pub boxes: Vec<Vec<BoundingBox>>,
86    /// Confidence scores for each bounding box
87    pub scores: Vec<Vec<f32>>,
88}
89
90/// Pure DB model implementation.
91///
92/// This model implements the core DB architecture and can be configured
93/// for different detection tasks through preprocessing and postprocessing configs.
94#[derive(Debug)]
95pub struct DBModel {
96    /// ONNX Runtime inference engine
97    inference: OrtInfer,
98    /// Image resizer for preprocessing
99    resizer: DetResizeForTest,
100    /// Image normalizer for preprocessing
101    normalizer: NormalizeImage,
102    /// Postprocessor for converting predictions to bounding boxes
103    postprocessor: DBPostProcess,
104}
105
106impl DBModel {
107    /// Creates a new DB model.
108    pub fn new(
109        inference: OrtInfer,
110        resizer: DetResizeForTest,
111        normalizer: NormalizeImage,
112        postprocessor: DBPostProcess,
113    ) -> Self {
114        Self {
115            inference,
116            resizer,
117            normalizer,
118            postprocessor,
119        }
120    }
121
122    /// Preprocesses images for detection.
123    pub fn preprocess(
124        &self,
125        images: Vec<RgbImage>,
126    ) -> Result<(Tensor4D, Vec<ImageScaleInfo>), OCRError> {
127        // Convert to DynamicImage
128        let dynamic_images: Vec<DynamicImage> =
129            images.into_iter().map(DynamicImage::ImageRgb8).collect();
130
131        // Apply detection resizing
132        let (resized_images, img_shapes) = self.resizer.apply(
133            dynamic_images,
134            None, // Use default limit_side_len
135            None, // Use default limit_type
136            None, // Use default max_side_limit
137        );
138
139        debug!("After resize: {} images", resized_images.len());
140        for (i, (img, shape)) in resized_images.iter().zip(&img_shapes).enumerate() {
141            debug!(
142                "  Image {}: {}x{}, shape=[src_h={:.0}, src_w={:.0}, ratio_h={:.3}, ratio_w={:.3}]",
143                i,
144                img.width(),
145                img.height(),
146                shape.src_h,
147                shape.src_w,
148                shape.ratio_h,
149                shape.ratio_w
150            );
151        }
152
153        // Apply ImageNet normalization and convert to tensor.
154        //
155        // Note: External models often decode images as BGR and then normalize with
156        // mean/std as provided in their configs. In this repo, input images are
157        // loaded as RGB; we keep them in RGB here and rely on `NormalizeImage`
158        // with `ColorOrder::BGR` to map channels (RGB -> BGR) without a manual swap.
159        let batch_tensor = self.normalizer.normalize_batch_to(resized_images)?;
160        debug!("Batch tensor shape: {:?}", batch_tensor.shape());
161
162        Ok((batch_tensor, img_shapes))
163    }
164
165    /// Runs inference on the preprocessed batch.
166    pub fn infer(&self, batch_tensor: &Tensor4D) -> Result<Tensor4D, OCRError> {
167        self.inference
168            .infer_4d(batch_tensor)
169            .map_err(|e| OCRError::Inference {
170                model_name: "DB".to_string(),
171                context: format!(
172                    "failed to run inference on batch with shape {:?}",
173                    batch_tensor.shape()
174                ),
175                source: Box::new(e),
176            })
177    }
178
179    /// Postprocesses model predictions to bounding boxes.
180    pub fn postprocess(
181        &self,
182        predictions: &Tensor4D,
183        img_shapes: Vec<ImageScaleInfo>,
184        score_threshold: f32,
185        box_threshold: f32,
186        unclip_ratio: f32,
187    ) -> DBModelOutput {
188        let config = DBPostProcessConfig::new(score_threshold, box_threshold, unclip_ratio);
189        let (boxes, scores) = self
190            .postprocessor
191            .apply(predictions, img_shapes, Some(&config));
192        DBModelOutput { boxes, scores }
193    }
194
195    /// Runs the complete forward pass: preprocess -> infer -> postprocess.
196    pub fn forward(
197        &self,
198        images: Vec<RgbImage>,
199        score_threshold: f32,
200        box_threshold: f32,
201        unclip_ratio: f32,
202    ) -> Result<DBModelOutput, OCRError> {
203        let (batch_tensor, img_shapes) = self.preprocess(images)?;
204        let predictions = self.infer(&batch_tensor)?;
205        Ok(self.postprocess(
206            &predictions,
207            img_shapes,
208            score_threshold,
209            box_threshold,
210            unclip_ratio,
211        ))
212    }
213}
214
215/// Builder for DB model.
216pub struct DBModelBuilder {
217    /// Preprocessing configuration
218    preprocess_config: DBPreprocessConfig,
219    /// Postprocessing configuration
220    postprocess_config: DBPostprocessConfig,
221    /// ONNX Runtime session configuration
222    ort_config: Option<crate::core::config::OrtSessionConfig>,
223}
224
225impl DBModelBuilder {
226    /// Creates a new DB model builder with default settings.
227    pub fn new() -> Self {
228        Self {
229            preprocess_config: DBPreprocessConfig::default(),
230            postprocess_config: DBPostprocessConfig::default(),
231            ort_config: None,
232        }
233    }
234
235    /// Sets the preprocessing configuration.
236    pub fn preprocess_config(mut self, config: DBPreprocessConfig) -> Self {
237        self.preprocess_config = config;
238        self
239    }
240
241    /// Sets the postprocessing configuration.
242    pub fn postprocess_config(mut self, config: DBPostprocessConfig) -> Self {
243        self.postprocess_config = config;
244        self
245    }
246
247    /// Sets the ONNX Runtime session configuration.
248    pub fn with_ort_config(mut self, config: crate::core::config::OrtSessionConfig) -> Self {
249        self.ort_config = Some(config);
250        self
251    }
252
253    /// Builds the DB model.
254    pub fn build(self, model_path: &Path) -> Result<DBModel, OCRError> {
255        // Create ONNX inference engine
256        let inference = if self.ort_config.is_some() {
257            use crate::core::config::ModelInferenceConfig;
258            let common_config = ModelInferenceConfig {
259                ort_session: self.ort_config,
260                ..Default::default()
261            };
262            OrtInfer::from_config(&common_config, model_path, Some("x"))?
263        } else {
264            OrtInfer::new(model_path, Some("x"))?
265        };
266
267        // Create resizer
268        let resizer = DetResizeForTest::new(
269            None,                                  // input_shape
270            None,                                  // image_shape
271            None,                                  // keep_ratio
272            self.preprocess_config.limit_side_len, // limit_side_len
273            self.preprocess_config.limit_type,     // limit_type
274            self.preprocess_config.resize_long,    // resize_long
275            self.preprocess_config.max_side_limit, // max_side_limit
276        );
277
278        // Create normalizer.
279        // External models read images in BGR. Their configs use ImageNet stats
280        // in that *same* channel order (B, G, R). Our images are loaded as RGB,
281        // so we keep them in RGB and use `ColorOrder::BGR` to map channels
282        // into BGR order during normalization.
283        let normalizer = NormalizeImage::with_color_order(
284            Some(1.0 / 255.0),               // scale
285            Some(vec![0.485, 0.456, 0.406]), // mean
286            Some(vec![0.229, 0.224, 0.225]), // std
287            Some(TensorLayout::CHW),         // order
288            Some(crate::processors::types::ColorOrder::BGR),
289        )?;
290
291        // Create postprocessor
292        let postprocessor = DBPostProcess::new(
293            Some(self.postprocess_config.score_threshold),
294            Some(self.postprocess_config.box_threshold),
295            Some(self.postprocess_config.max_candidates),
296            Some(self.postprocess_config.unclip_ratio),
297            Some(self.postprocess_config.use_dilation),
298            Some(self.postprocess_config.score_mode),
299            Some(self.postprocess_config.box_type),
300        );
301
302        Ok(DBModel::new(inference, resizer, normalizer, postprocessor))
303    }
304}
305
306impl Default for DBModelBuilder {
307    fn default() -> Self {
308        Self::new()
309    }
310}