1#[cfg(feature = "raw")]
3pub mod bindings {
4 #![allow(unused_imports)]
5 #![allow(non_upper_case_globals)]
6 #![allow(non_camel_case_types)]
7 #![allow(non_snake_case)]
8 #[cfg(docsrs)]
9 include!("bindings_docs.rs");
10 #[cfg(not(docsrs))]
11 include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
12}
13
14#[cfg(not(feature = "raw"))]
15mod bindings {
16 #![allow(unused_imports)]
17 #![allow(dead_code)]
18 #![allow(non_upper_case_globals)]
19 #![allow(non_camel_case_types)]
20 #![allow(non_snake_case)]
21 #[cfg(docsrs)]
22 include!("bindings_docs.rs");
23 #[cfg(not(docsrs))]
24 include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
25}
26
27#[cfg(feature = "rknn")]
28pub mod rknn {
29 pub mod flag {
30 pub use crate::bindings::{
31 RKNN_FLAG_PRIOR_HIGH,
32 RKNN_FLAG_PRIOR_MEDIUM,
33 RKNN_FLAG_PRIOR_LOW,
34 RKNN_FLAG_ASYNC_MASK,
35 RKNN_FLAG_COLLECT_PERF_MASK,
36 RKNN_FLAG_MEM_ALLOC_OUTSIDE,
37 RKNN_FLAG_SHARE_WEIGHT_MEM,
38 RKNN_FLAG_FENCE_IN_OUTSIDE,
39 RKNN_FLAG_FENCE_OUT_OUTSIDE,
40 RKNN_FLAG_COLLECT_MODEL_INFO_ONLY,
41 RKNN_FLAG_INTERNAL_ALLOC_OUTSIDE,
42 RKNN_FLAG_EXECUTE_FALLBACK_PRIOR_DEVICE_GPU,
43 RKNN_FLAG_ENABLE_SRAM,
44 RKNN_FLAG_SHARE_SRAM,
45 RKNN_FLAG_DISABLE_PROC_HIGH_PRIORITY,
46 RKNN_FLAG_DISABLE_FLUSH_INPUT_MEM_CACHE,
47 RKNN_FLAG_DISABLE_FLUSH_OUTPUT_MEM_CACHE,
48 RKNN_FLAG_MODEL_BUFFER_ZERO_COPY,
49 RKNN_MEM_FLAG_ALLOC_NO_CONTEXT,
50 };
51 }
52 pub mod error {
53 pub use crate::bindings::{
54 RKNN_SUCC,
55 RKNN_ERR_FAIL,
56 RKNN_ERR_TIMEOUT,
57 RKNN_ERR_DEVICE_UNAVAILABLE,
58 RKNN_ERR_MALLOC_FAIL,
59 RKNN_ERR_PARAM_INVALID,
60 RKNN_ERR_MODEL_INVALID,
61 RKNN_ERR_CTX_INVALID,
62 RKNN_ERR_INPUT_INVALID,
63 RKNN_ERR_OUTPUT_INVALID,
64 RKNN_ERR_DEVICE_UNMATCH,
65 RKNN_ERR_INCOMPATILE_PRE_COMPILE_MODEL,
66 RKNN_ERR_INCOMPATILE_OPTIMIZATION_LEVEL_VERSION,
67 RKNN_ERR_TARGET_PLATFORM_UNMATCH,
68 };
69 }
70
71 pub mod limits {
72 pub use crate::bindings::{
73 RKNN_MAX_DIMS,
74 RKNN_MAX_NUM_CHANNEL,
75 RKNN_MAX_NAME_LEN,
76 RKNN_MAX_DYNAMIC_SHAPE_NUM,
77 };
78 }
79
80 pub use crate::bindings::{
81 rknn_context,
82 rknn_query_cmd,
83 rknn_tensor_type,
84 rknn_tensor_qnt_type,
85 rknn_tensor_format,
86 rknn_core_mask,
87 rknn_input_output_num,
88 rknn_tensor_attr,
89 rknn_input_range,
90 rknn_perf_detail,
91 rknn_perf_run,
92 rknn_sdk_version,
93 rknn_mem_size,
94 rknn_custom_string,
95 rknn_tensor_mem_flags,
96 rknn_mem_alloc_flags,
97 rknn_mem_sync_mode,
98 rknn_tensor_mem,
99 rknn_input,
100 rknn_output,
101 rknn_init_extend,
102 rknn_run_extend,
103 rknn_output_extend,
104 };
105
106 pub use crate::bindings::{
107 rknn_init,
108 rknn_destroy,
109 rknn_query,
110 rknn_inputs_set,
111 rknn_run,
112 rknn_outputs_get,
113 rknn_outputs_release,
114 rknn_create_mem,
115 rknn_destroy_mem,
116 rknn_set_io_mem,
117 };
118
119 pub fn get_type_string(t: rknn_tensor_type) -> &'static str {
120 match t {
121 rknn_tensor_type::RKNN_TENSOR_FLOAT32 => "FP32",
122 rknn_tensor_type::RKNN_TENSOR_FLOAT16 => "FP16",
123 rknn_tensor_type::RKNN_TENSOR_INT8 => "INT8",
124 rknn_tensor_type::RKNN_TENSOR_UINT8 => "UINT8",
125 rknn_tensor_type::RKNN_TENSOR_INT16 => "INT16",
126 rknn_tensor_type::RKNN_TENSOR_UINT16 => "UINT16",
127 rknn_tensor_type::RKNN_TENSOR_INT32 => "INT32",
128 rknn_tensor_type::RKNN_TENSOR_UINT32 => "UINT32",
129 rknn_tensor_type::RKNN_TENSOR_INT64 => "INT64",
130 rknn_tensor_type::RKNN_TENSOR_BOOL => "BOOL",
131 rknn_tensor_type::RKNN_TENSOR_INT4 => "INT4",
132 rknn_tensor_type::RKNN_TENSOR_BFLOAT16 => "BF16",
133 _ => return "UNKNOW",
134 }
135 }
136
137 pub fn get_qnt_type_string(t: rknn_tensor_qnt_type) -> &'static str {
138 match t {
139 rknn_tensor_qnt_type::RKNN_TENSOR_QNT_NONE => "NONE",
140 rknn_tensor_qnt_type::RKNN_TENSOR_QNT_DFP => "DFP",
141 rknn_tensor_qnt_type::RKNN_TENSOR_QNT_AFFINE_ASYMMETRIC => "AFFINE",
142 _ => return "UNKNOW",
143 }
144 }
145}
146
147#[cfg(feature = "matmul")]
148pub mod matmul {}
149
150#[cfg(feature = "custom-op")]
151pub mod custom_op {}