ocr_rs/mnn/
docsrs_stub.rs1use ndarray::{ArrayD, ArrayViewD};
6use std::path::Path;
7
8#[derive(Debug, Clone, PartialEq, Eq)]
12pub enum MnnError {
13 InvalidParameter(String),
15 OutOfMemory,
17 RuntimeError(String),
19 Unsupported,
21 ModelLoadFailed(String),
23 NullPointer,
25 ShapeMismatch {
27 expected: Vec<usize>,
28 got: Vec<usize>,
29 },
30}
31
32impl std::fmt::Display for MnnError {
33 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
34 write!(f, "{:?}", self)
35 }
36}
37
38impl std::error::Error for MnnError {}
39
40pub type Result<T> = std::result::Result<T, MnnError>;
42
43#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
47pub enum Backend {
48 #[default]
50 CPU,
51 Metal,
53 OpenCL,
55 OpenGL,
57 Vulkan,
59 CUDA,
61 CoreML,
63}
64
65#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
67pub enum PrecisionMode {
68 #[default]
70 Normal,
71 Low,
73 High,
75 LowMemory,
77}
78
79#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
81pub enum DataFormat {
82 #[default]
84 NCHW,
85 NHWC,
87}
88
89#[derive(Debug, Clone)]
93pub struct InferenceConfig {
94 pub thread_count: i32,
95 pub precision_mode: PrecisionMode,
96 pub backend: Backend,
97 pub use_cache: bool,
98 pub data_format: DataFormat,
99}
100
101impl Default for InferenceConfig {
102 fn default() -> Self {
103 Self {
104 thread_count: 4,
105 precision_mode: PrecisionMode::Normal,
106 backend: Backend::CPU,
107 use_cache: true,
108 data_format: DataFormat::NCHW,
109 }
110 }
111}
112
113impl InferenceConfig {
114 pub fn new() -> Self {
116 Self::default()
117 }
118
119 pub fn with_threads(mut self, threads: i32) -> Self {
121 self.thread_count = threads;
122 self
123 }
124
125 pub fn with_precision(mut self, precision: PrecisionMode) -> Self {
127 self.precision_mode = precision;
128 self
129 }
130
131 pub fn with_backend(mut self, backend: Backend) -> Self {
133 self.backend = backend;
134 self
135 }
136
137 pub fn with_data_format(mut self, format: DataFormat) -> Self {
139 self.data_format = format;
140 self
141 }
142}
143
144pub struct SharedRuntime {
148 _private: (),
149}
150
151impl SharedRuntime {
152 pub fn new(_config: &InferenceConfig) -> Result<Self> {
154 unimplemented!(
155 "This feature is only available at runtime, not available during documentation build"
156 )
157 }
158}
159
160pub struct InferenceEngine {
164 _input_shape: Vec<usize>,
165 _output_shape: Vec<usize>,
166}
167
168impl InferenceEngine {
169 pub fn from_file(
171 _model_path: impl AsRef<Path>,
172 _config: Option<InferenceConfig>,
173 ) -> Result<Self> {
174 unimplemented!(
175 "This feature is only available at runtime, not available during documentation build"
176 )
177 }
178
179 pub fn from_buffer(_data: &[u8], _config: Option<InferenceConfig>) -> Result<Self> {
181 unimplemented!(
182 "This feature is only available at runtime, not available during documentation build"
183 )
184 }
185
186 pub fn from_buffer_with_runtime(
188 _model_buffer: &[u8],
189 _runtime: &SharedRuntime,
190 ) -> Result<Self> {
191 unimplemented!(
192 "This feature is only available at runtime, not available during documentation build"
193 )
194 }
195
196 pub fn input_shape(&self) -> &[usize] {
198 &self._input_shape
199 }
200
201 pub fn output_shape(&self) -> &[usize] {
203 &self._output_shape
204 }
205
206 pub fn infer(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
208 unimplemented!()
209 }
210
211 pub fn infer_dynamic(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
213 unimplemented!()
214 }
215
216 pub fn run_dynamic(&self, _input: ArrayViewD<f32>) -> Result<ArrayD<f32>> {
218 unimplemented!()
219 }
220
221 pub fn run_dynamic_raw(
223 &self,
224 _input_data: &[f32],
225 _input_shape: &[usize],
226 _output_data: &mut [f32],
227 ) -> Result<Vec<usize>> {
228 unimplemented!()
229 }
230}
231
232pub fn get_version() -> String {
236 "unknown (docs.rs build)".to_string()
237}