Skip to main content

mnn_rs/
lib.rs

1//! # MNN Rust Bindings
2//!
3//! Safe Rust bindings for Alibaba's MNN (Mobile Neural Network) inference engine.
4//!
5//! MNN is a highly efficient and lightweight deep learning inference framework.
6//! This crate provides idiomatic Rust bindings for running inference with MNN.
7//!
8//! ## Features
9//!
10//! - **Safe API**: All MNN operations are wrapped in safe Rust types
11//! - **Multiple Backends**: CPU, CUDA, OpenCL, Vulkan, Metal support
12//! - **Async Support**: Optional async API using tokio
13//! - **Cross-Platform**: Windows, Linux, macOS, Android, iOS support
14//!
15//! ## Quick Start
16//!
17//! ```no_run
18//! use mnn_rs::{Interpreter, ScheduleConfig, BackendType};
19//!
20//! // Load a model
21//! let interpreter = Interpreter::from_file("model.mnn")?;
22//!
23//! // Create a session
24//! let config = ScheduleConfig::new()
25//!     .backend(BackendType::CPU)
26//!     .num_threads(4);
27//!
28//! let mut session = interpreter.create_session(config)?;
29//!
30//! // Get input tensor
31//! let input = session.get_input(None)?;
32//!
33//! // Fill input with data (example)
34//! // input.write(&my_data)?;
35//!
36//! // Run inference
37//! session.run()?;
38//!
39//! // Get output
40//! let output = session.get_output(None)?;
41//!
42//! # Ok::<(), mnn_rs::MnnError>(())
43//! ```
44//!
45//! ## Backend Configuration
46//!
47//! ```no_run
48//! use mnn_rs::{ScheduleConfig, BackendType, MemoryMode, PrecisionMode};
49//!
50//! // CPU with custom settings
51//! let cpu_config = ScheduleConfig::new()
52//!     .backend(BackendType::CPU)
53//!     .num_threads(8)
54//!     .memory_mode(MemoryMode::Low);
55//!
56//! // GPU (auto-detect best backend)
57//! let gpu_config = ScheduleConfig::new()
58//!     .backend(BackendType::Auto)
59//!     .precision_mode(PrecisionMode::Low);
60//!
61//! # Ok::<(), mnn_rs::MnnError>(())
62//! ```
63//!
64//! ## Async API (requires "async" feature)
65//!
66//! ```ignore
67//! use mnn_rs::{AsyncInterpreter, ScheduleConfig};
68//!
69//! #[tokio::main]
70//! async fn main() -> Result<(), mnn_rs::MnnError> {
71//!     let interpreter = AsyncInterpreter::from_file("model.mnn").await?;
72//!     let mut session = interpreter.create_session(ScheduleConfig::default()).await?;
73//!
74//!     session.run_async().await?;
75//!
76//!     Ok(())
77//! }
78//! ```
79
80#![deny(missing_docs)]
81#![deny(unsafe_op_in_unsafe_fn)]
82#![warn(missing_debug_implementations)]
83
84pub use mnn_rs_sys;
85
86// Core modules
87mod error;
88pub use error::{MnnError, MnnResult};
89
90mod backend;
91pub use backend::{
92    available_backends, is_backend_available, version, BackendCapabilities, BackendConfig,
93    BackendType, DataType,
94};
95
96mod config;
97pub use config::{DataFormat, MemoryMode, PowerMode, PrecisionMode, ScheduleConfig, ScheduleConfigBuilder, SessionMode};
98
99mod tensor;
100pub use tensor::{Tensor, TensorData, TensorInfo, TensorView};
101
102#[cfg(feature = "image-process")]
103mod image_process;
104#[cfg(feature = "image-process")]
105pub use image_process::{
106    imread, imwrite, resize,
107    Filter, ImageConfig, ImageFormat, ImageProcess, ImreadFlags, Matrix, ResizeFilter, Wrap,
108};
109
110mod session;
111pub use session::{Session, SessionGuard};
112
113mod interpreter;
114pub use interpreter::Interpreter;
115#[cfg(feature = "async")]
116pub use interpreter::AsyncInterpreter;
117
118mod utils;
119pub use utils::{calculate_element_count, calculate_tensor_size, convert_format};
120
121#[cfg(feature = "runtime")]
122mod runtime;
123#[cfg(feature = "runtime")]
124pub use runtime::{InterpreterRuntimeExt, RuntimeInfo};
125
126// Async module
127#[cfg(feature = "async")]
128mod async_mod;
129#[cfg(feature = "async")]
130pub use async_mod::{run_session_async, AsyncBatchInference, SessionPool};
131
132// Re-export commonly used types at crate root
133pub use crate::error::MnnError as Error;
134
135/// Prelude for common MNN types.
136pub mod prelude {
137    //! Common types for MNN operations.
138    //!
139    //! This module re-exports the most commonly used types for convenience.
140    //!
141    //! ```
142    //! use mnn_rs::prelude::*;
143    //! ```
144
145    pub use crate::backend::{BackendType, DataType};
146    pub use crate::config::{DataFormat, MemoryMode, PowerMode, PrecisionMode, ScheduleConfig, ScheduleConfigBuilder};
147    pub use crate::error::{MnnError, MnnResult};
148    pub use crate::interpreter::Interpreter;
149    pub use crate::session::Session;
150    pub use crate::tensor::{Tensor, TensorData, TensorInfo};
151    #[cfg(feature = "image-process")]
152    pub use crate::image_process::{ImageConfig, ImageFormat, Filter};
153    #[cfg(feature = "runtime")]
154    pub use crate::runtime::RuntimeInfo;
155}
156
157/// Testing utilities (only available in tests).
158#[cfg(test)]
159mod tests {
160    use super::*;
161
162    #[test]
163    fn test_version() {
164        let v = version();
165        // Version should be a non-empty string
166        assert!(!v.is_empty() || v == "unknown");
167    }
168
169    #[test]
170    fn test_available_backends() {
171        let backends = available_backends();
172        // CPU should always be available
173        assert!(backends.contains(&BackendType::CPU));
174    }
175}