Skip to main content

ocr_rs/mnn/
docsrs_stub.rs

1//! docsrs stub module - for documentation generation
2//!
3//! This module is used during docs.rs build, providing type definitions without actual implementations
4
5use ndarray::{ArrayD, ArrayViewD};
6use std::path::Path;
7
8// ============== Error Types ==============
9
10/// MNN-related errors
11#[derive(Debug, Clone, PartialEq, Eq)]
12pub enum MnnError {
13    /// Invalid parameter
14    InvalidParameter(String),
15    /// Out of memory
16    OutOfMemory,
17    /// Runtime error
18    RuntimeError(String),
19    /// Unsupported operation
20    Unsupported,
21    /// Model loading failed
22    ModelLoadFailed(String),
23    /// Null pointer error
24    NullPointer,
25    /// Shape mismatch
26    ShapeMismatch {
27        expected: Vec<usize>,
28        got: Vec<usize>,
29    },
30}
31
32impl std::fmt::Display for MnnError {
33    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
34        write!(f, "{:?}", self)
35    }
36}
37
38impl std::error::Error for MnnError {}
39
40/// MNN Result type
41pub type Result<T> = std::result::Result<T, MnnError>;
42
43// ============== Backend Types ==============
44
45/// Computation backend
46#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
47pub enum Backend {
48    /// CPU backend (default)
49    #[default]
50    CPU,
51    /// Metal GPU (macOS/iOS)
52    Metal,
53    /// OpenCL GPU
54    OpenCL,
55    /// OpenGL GPU
56    OpenGL,
57    /// Vulkan GPU
58    Vulkan,
59    /// CUDA GPU
60    CUDA,
61    /// CoreML (macOS/iOS)
62    CoreML,
63}
64
65/// Precision mode
66#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
67pub enum PrecisionMode {
68    /// Normal precision
69    #[default]
70    Normal,
71    /// Low precision
72    Low,
73    /// High precision
74    High,
75    /// Low memory usage
76    LowMemory,
77}
78
79/// Data format
80#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
81pub enum DataFormat {
82    /// NCHW format
83    #[default]
84    NCHW,
85    /// NHWC format
86    NHWC,
87}
88
89// ============== Configuration Types ==============
90
91/// Inference configuration
92#[derive(Debug, Clone)]
93pub struct InferenceConfig {
94    pub thread_count: i32,
95    pub precision_mode: PrecisionMode,
96    pub backend: Backend,
97    pub use_cache: bool,
98    pub data_format: DataFormat,
99}
100
101impl Default for InferenceConfig {
102    fn default() -> Self {
103        Self {
104            thread_count: 4,
105            precision_mode: PrecisionMode::Normal,
106            backend: Backend::CPU,
107            use_cache: true,
108            data_format: DataFormat::NCHW,
109        }
110    }
111}
112
113impl InferenceConfig {
114    /// Create a new inference configuration
115    pub fn new() -> Self {
116        Self::default()
117    }
118
119    /// Set the number of threads
120    pub fn with_threads(mut self, threads: i32) -> Self {
121        self.thread_count = threads;
122        self
123    }
124
125    /// Set the precision mode
126    pub fn with_precision(mut self, precision: PrecisionMode) -> Self {
127        self.precision_mode = precision;
128        self
129    }
130
131    /// Set the backend
132    pub fn with_backend(mut self, backend: Backend) -> Self {
133        self.backend = backend;
134        self
135    }
136
137    /// Set the data format
138    pub fn with_data_format(mut self, format: DataFormat) -> Self {
139        self.data_format = format;
140        self
141    }
142}
143
144// ============== Shared Runtime ==============
145
146/// Shared runtime for sharing resources between multiple engines
147pub struct SharedRuntime {
148    _private: (),
149}
150
151impl SharedRuntime {
152    /// Create a new shared runtime
153    pub fn new(_config: &InferenceConfig) -> Result<Self> {
154        unimplemented!(
155            "This feature is only available at runtime, not available during documentation build"
156        )
157    }
158}
159
160// ============== Inference Engine ==============
161
162/// MNN inference engine
163pub struct InferenceEngine {
164    _input_shape: Vec<usize>,
165    _output_shape: Vec<usize>,
166}
167
168impl InferenceEngine {
169    /// Create inference engine from file
170    pub fn from_file(
171        _model_path: impl AsRef<Path>,
172        _config: Option<InferenceConfig>,
173    ) -> Result<Self> {
174        unimplemented!(
175            "This feature is only available at runtime, not available during documentation build"
176        )
177    }
178
179    /// Create inference engine from memory
180    pub fn from_buffer(_data: &[u8], _config: Option<InferenceConfig>) -> Result<Self> {
181        unimplemented!(
182            "This feature is only available at runtime, not available during documentation build"
183        )
184    }
185
186    /// Create inference engine from model bytes using shared runtime
187    pub fn from_buffer_with_runtime(
188        _model_buffer: &[u8],
189        _runtime: &SharedRuntime,
190    ) -> Result<Self> {
191        unimplemented!(
192            "This feature is only available at runtime, not available during documentation build"
193        )
194    }
195
196    /// Get input shape
197    pub fn input_shape(&self) -> &[usize] {
198        &self._input_shape
199    }
200
201    /// Get output shape
202    pub fn output_shape(&self) -> &[usize] {
203        &self._output_shape
204    }
205
206    /// Perform inference
207    pub fn infer(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
208        unimplemented!()
209    }
210
211    /// Perform inference (variable input shape)
212    pub fn infer_dynamic(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
213        unimplemented!()
214    }
215
216    /// Perform inference (variable input shape) - alias
217    pub fn run_dynamic(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
218        unimplemented!()
219    }
220
221    /// Perform inference (raw interface)
222    pub fn run_dynamic_raw(
223        &self,
224        _input_data: &[f32],
225        _input_shape: &[usize],
226        _output_data: &mut [f32],
227    ) -> Result<Vec<usize>> {
228        unimplemented!()
229    }
230}
231
232// ============== Helper Functions ==============
233
234/// Get MNN version
235pub fn get_version() -> String {
236    "unknown (docs.rs build)".to_string()
237}