use super::types::*;
use crate::link;
type wchar_t = ::std::os::raw::c_char;
link! {
extern "C" {
#[doc = " @brief Print the error info.\n @ingroup ov_base_c_api\n @param ov_status_e a status code."]
pub fn ov_get_error_info(status: ov_status_e) -> *const ::std::os::raw::c_char;
}
extern "C" {
#[doc = " @brief free char\n @ingroup ov_base_c_api\n @param content The pointer to the char to free."]
pub fn ov_free(content: *const ::std::os::raw::c_char);
}
extern "C" {
#[doc = " @brief Get the last error msg.\n @ingroup ov_base_c_api"]
pub fn ov_get_last_err_msg() -> *const ::std::os::raw::c_char;
}
extern "C" {
#[doc = " @brief Check this dimension whether is dynamic\n @ingroup ov_dimension_c_api\n @param dim The dimension pointer that will be checked.\n @return Boolean, true is dynamic and false is static."]
pub fn ov_dimension_is_dynamic(dim: ov_dimension_t) -> bool;
}
extern "C" {
#[doc = " @brief Create a layout object.\n @ingroup ov_layout_c_api\n @param layout The layout input pointer.\n @param layout_desc The description of layout.\n @return ov_status_e a status code, return OK if successful"]
pub fn ov_layout_create(
layout_desc: *const ::std::os::raw::c_char,
layout: *mut *mut ov_layout_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Free layout object.\n @ingroup ov_layout_c_api\n @param layout will be released."]
pub fn ov_layout_free(layout: *mut ov_layout_t);
}
extern "C" {
#[doc = " @brief Convert layout object to a readable string.\n @ingroup ov_layout_c_api\n @param layout will be converted.\n @return string that describes the layout content."]
pub fn ov_layout_to_string(layout: *const ov_layout_t) -> *const ::std::os::raw::c_char;
}
extern "C" {
#[doc = " @brief Check this rank whether is dynamic\n @ingroup ov_rank_c_api\n @param rank The rank pointer that will be checked.\n @return bool The return value."]
pub fn ov_rank_is_dynamic(rank: ov_rank_t) -> bool;
}
extern "C" {
#[doc = " @brief Initialize a fully shape object, allocate space for its dimensions and set its content id dims is not null.\n @ingroup ov_shape_c_api\n @param rank The rank value for this object, it should be more than 0(>0)\n @param dims The dimensions data for this shape object, it's size should be equal to rank.\n @param shape The input/output shape object pointer.\n @return ov_status_e The return status code."]
pub fn ov_shape_create(rank: i64, dims: *const i64, shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Free a shape object's internal memory.\n @ingroup ov_shape_c_api\n @param shape The input shape object pointer.\n @return ov_status_e The return status code."]
pub fn ov_shape_free(shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Initialze a partial shape with static rank and dynamic dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support static rank.\n @param dims support dynamic and static dimension.\n Static rank, but dynamic dimensions on some or all axes.\n Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}`\n Static rank, and static dimensions on all axes.\n Examples: `{1,2,3,4}` or `{6}` or `{}`\n\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_create(
rank: i64,
dims: *const ov_dimension_t,
partial_shape_obj: *mut ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Initialze a partial shape with dynamic rank and dynamic dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support dynamic and static rank.\n @param dims support dynamic and static dimension.\n Dynamic rank:\n Example: `?`\n Static rank, but dynamic dimensions on some or all axes.\n Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}`\n Static rank, and static dimensions on all axes.\n Examples: `{1,2,3,4}` or `{6}` or `{}\"`\n\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_create_dynamic(
rank: ov_rank_t,
dims: *const ov_dimension_t,
partial_shape_obj: *mut ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Initialize a partial shape with static rank and static dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support static rank.\n @param dims support static dimension.\n Static rank, and static dimensions on all axes.\n Examples: `{1,2,3,4}` or `{6}` or `{}`\n\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_create_static(
rank: i64,
dims: *const i64,
partial_shape_obj: *mut ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release internal memory allocated in partial shape.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The object's internal memory will be released.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_free(partial_shape: *mut ov_partial_shape_t);
}
extern "C" {
#[doc = " @brief Convert partial shape without dynamic data to a static shape.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @param shape The shape pointer.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_to_shape(
partial_shape: ov_partial_shape_t,
shape: *mut ov_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Convert shape to partial shape.\n @ingroup ov_partial_shape_c_api\n @param shape The shape pointer.\n @param partial_shape The partial_shape pointer.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_shape_to_partial_shape(
shape: ov_shape_t,
partial_shape: *mut ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Check this partial_shape whether is dynamic\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_partial_shape_is_dynamic(partial_shape: ov_partial_shape_t) -> bool;
}
extern "C" {
#[doc = " @brief Helper function, convert a partial shape to readable string.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @return A string reprensts partial_shape's content."]
pub fn ov_partial_shape_to_string(
partial_shape: ov_partial_shape_t,
) -> *const ::std::os::raw::c_char;
}
extern "C" {
#[doc = " @brief Get the shape of port object.\n @ingroup ov_node_c_api\n @param port A pointer to ov_output_const_port_t.\n @param tensor_shape tensor shape.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_const_port_get_shape(
port: *const ov_output_const_port_t,
tensor_shape: *mut ov_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the shape of port object.\n @ingroup ov_node_c_api\n @param port A pointer to ov_output_port_t.\n @param tensor_shape tensor shape.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_port_get_shape(
port: *const ov_output_port_t,
tensor_shape: *mut ov_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the tensor name of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param tensor_name A pointer to the tensor name.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_port_get_any_name(
port: *const ov_output_const_port_t,
tensor_name: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the partial shape of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param partial_shape Partial shape.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_port_get_partial_shape(
port: *const ov_output_const_port_t,
partial_shape: *mut ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the tensor type of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param tensor_type tensor type.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_port_get_element_type(
port: *const ov_output_const_port_t,
tensor_type: *mut ov_element_type_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief free port object\n @ingroup ov_node_c_api\n @param port The pointer to the instance of the ov_output_port_t to free."]
pub fn ov_output_port_free(port: *mut ov_output_port_t);
}
extern "C" {
#[doc = " @brief free const port\n @ingroup ov_node_c_api\n @param port The pointer to the instance of the ov_output_const_port_t to free."]
pub fn ov_output_const_port_free(port: *mut ov_output_const_port_t);
}
extern "C" {
#[doc = " @brief Constructs Tensor using element type, shape and external host ptr.\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param shape Tensor shape\n @param host_ptr Pointer to pre-allocated host memory\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_create_from_host_ptr(
type_: ov_element_type_e,
shape: ov_shape_t,
host_ptr: *mut ::std::os::raw::c_void,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Constructs Tensor using element type and shape. Allocate internal host storage using default allocator\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_create(
type_: ov_element_type_e,
shape: ov_shape_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set new shape for tensor, deallocate/allocate if new total size is bigger than previous one.\n @ingroup ov_tensor_c_api\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_set_shape(tensor: *mut ov_tensor_t, shape: ov_shape_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get shape for tensor.\n @ingroup ov_tensor_c_api\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_get_shape(tensor: *const ov_tensor_t, shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get type for tensor.\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_get_element_type(
tensor: *const ov_tensor_t,
type_: *mut ov_element_type_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief the total number of elements (a product of all the dims or 1 for scalar).\n @ingroup ov_tensor_c_api\n @param elements_size number of elements\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_get_size(tensor: *const ov_tensor_t, elements_size: *mut usize)
-> ov_status_e;
}
extern "C" {
#[doc = " @brief the size of the current Tensor in bytes.\n @ingroup ov_tensor_c_api\n @param byte_size the size of the current Tensor in bytes.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_get_byte_size(
tensor: *const ov_tensor_t,
byte_size: *mut usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Provides an access to the underlaying host memory.\n @ingroup ov_tensor_c_api\n @param data A point to host memory.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_tensor_data(
tensor: *const ov_tensor_t,
data: *mut *mut ::std::os::raw::c_void,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Free ov_tensor_t.\n @ingroup ov_tensor_c_api\n @param tensor A point to ov_tensor_t"]
pub fn ov_tensor_free(tensor: *mut ov_tensor_t);
}
extern "C" {
#[doc = " @brief Set an input/output tensor to infer on by the name of tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor_name Name of the input or output tensor.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_tensor(
infer_request: *mut ov_infer_request_t,
tensor_name: *const ::std::os::raw::c_char,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an input/output tensor to infer request for the port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the input or output tensor, which can be got by calling ov_model_t/ov_compiled_model_t interface.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_tensor_by_port(
infer_request: *mut ov_infer_request_t,
port: *const ov_output_port_t,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an input/output tensor to infer request for the port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Const port of the input or output tensor, which can be got by call interface from\n ov_model_t/ov_compiled_model_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_tensor_by_const_port(
infer_request: *mut ov_infer_request_t,
port: *const ov_output_const_port_t,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an input tensor to infer on by the index of tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the input port. If @p idx is greater than the number of model inputs, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_input_tensor_by_index(
infer_request: *mut ov_infer_request_t,
idx: usize,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an input tensor for the model with single input to infer on.\n @note If model has several inputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_input_tensor(
infer_request: *mut ov_infer_request_t,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an output tensor to infer by the index of output tensor.\n @note Index of the output preserved accross ov_model_t, ov_compiled_model_t.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the output tensor.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_output_tensor_by_index(
infer_request: *mut ov_infer_request_t,
idx: usize,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set an output tensor to infer models with single output.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_output_tensor(
infer_request: *mut ov_infer_request_t,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input/output tensor by the name of tensor.\n @note If model has several outputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor_name Name of the input or output tensor to get.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_tensor(
infer_request: *const ov_infer_request_t,
tensor_name: *const ::std::os::raw::c_char,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input/output tensor by const port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the tensor to get. @p port is not found, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_tensor_by_const_port(
infer_request: *const ov_infer_request_t,
port: *const ov_output_const_port_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input/output tensor by port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the tensor to get. @p port is not found, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_tensor_by_port(
infer_request: *const ov_infer_request_t,
port: *const ov_output_port_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input tensor by the index of input tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will\n return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_input_tensor_by_index(
infer_request: *const ov_infer_request_t,
idx: usize,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input tensor from the model with only one input tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_input_tensor(
infer_request: *const ov_infer_request_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an output tensor by the index of output tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will\n return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_output_tensor_by_index(
infer_request: *const ov_infer_request_t,
idx: usize,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an output tensor from the model with only one output tensor.\n @note If model has several outputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_output_tensor(
infer_request: *const ov_infer_request_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Infer specified input(s) in synchronous mode.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_infer(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Cancel inference request.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_cancel(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Start inference of specified input(s) in asynchronous mode.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_start_async(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Wait for the result to become available.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_wait(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Waits for the result to become available. Blocks until the specified timeout has elapsed or the result\n becomes available, whichever comes first.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param timeout Maximum duration, in milliseconds, to block for.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_wait_for(
infer_request: *mut ov_infer_request_t,
timeout: i64,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set callback function, which will be called when inference is done.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param callback A function to be called.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_set_callback(
infer_request: *mut ov_infer_request_t,
callback: *const ov_callback_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_infer_request_t.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t to free memory."]
pub fn ov_infer_request_free(infer_request: *mut ov_infer_request_t);
}
extern "C" {
#[doc = " @brief Query performance measures per layer to identify the most time consuming operation.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param profiling_infos Vector of profiling information for operations in a model.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_infer_request_get_profiling_info(
infer_request: *const ov_infer_request_t,
profiling_infos: *mut ov_profiling_info_list_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_profiling_info_list_t.\n @ingroup ov_infer_request_c_api\n @param profiling_infos A pointer to the ov_profiling_info_list_t to free memory."]
pub fn ov_profiling_info_list_free(profiling_infos: *mut ov_profiling_info_list_t);
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t to free memory."]
pub fn ov_model_free(model: *mut ov_model_t);
}
extern "C" {
#[doc = " @brief Get a const input port of ov_model_t,which only support single input model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_input(
model: *const ov_model_t,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const input port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name The name of input tensor.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_input_by_name(
model: *const ov_model_t,
tensor_name: *const ::std::os::raw::c_char,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const input port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_input_by_index(
model: *const ov_model_t,
index: usize,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get single input port of ov_model_t, which only support single input model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_input(
model: *const ov_model_t,
input_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name input tensor name (char *).\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_input_by_name(
model: *const ov_model_t,
tensor_name: *const ::std::os::raw::c_char,
input_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an input port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_input_by_index(
model: *const ov_model_t,
index: usize,
input_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a single const output port of ov_model_t, which only support single output model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_output(
model: *const ov_model_t,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const output port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_output_by_index(
model: *const ov_model_t,
index: usize,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const output port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name input tensor name (char *).\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_const_output_by_name(
model: *const ov_model_t,
tensor_name: *const ::std::os::raw::c_char,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a single output port of ov_model_t, which only support single output model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_output(
model: *const ov_model_t,
output_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an output port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param output_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_output_by_index(
model: *const ov_model_t,
index: usize,
output_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get an output port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name output tensor name (char *).\n @param output_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_output_by_name(
model: *const ov_model_t,
tensor_name: *const ::std::os::raw::c_char,
output_port: *mut *mut ov_output_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the input size of ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_size the model's input size.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_inputs_size(model: *const ov_model_t, input_size: *mut usize) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the output size of ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_size the model's output size.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_outputs_size(model: *const ov_model_t, output_size: *mut usize) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns true if any of the ops defined in the model is dynamic shape.\n @param model A pointer to the ov_model_t.\n @return true if model contains dynamic shapes"]
pub fn ov_model_is_dynamic(model: *const ov_model_t) -> bool;
}
extern "C" {
#[doc = " @brief Do reshape in model with a list of <name, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_names The list of input tensor names.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_reshape(
model: *const ov_model_t,
tensor_names: *mut *const ::std::os::raw::c_char,
partial_shapes: *const ov_partial_shape_t,
size: usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Do reshape in model with partial shape for a specified name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name The tensor name of input tensor.\n @param partialShape A PartialShape.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_reshape_input_by_name(
model: *const ov_model_t,
tensor_name: *const ::std::os::raw::c_char,
partial_shape: ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Do reshape in model for one node(port 0).\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param partialShape A PartialShape.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_reshape_single_input(
model: *const ov_model_t,
partial_shape: ov_partial_shape_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Do reshape in model with a list of <port id, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param port_indexes The array of port indexes.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_reshape_by_port_indexes(
model: *const ov_model_t,
port_indexes: *const usize,
partial_shape: *const ov_partial_shape_t,
size: usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Do reshape in model with a list of <ov_output_port_t, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_ports The ov_output_port_t list.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_reshape_by_ports(
model: *const ov_model_t,
output_ports: *mut *const ov_output_port_t,
partial_shapes: *const ov_partial_shape_t,
size: usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Gets the friendly name for a model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param friendly_name the model's friendly name.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_model_get_friendly_name(
model: *const ov_model_t,
friendly_name: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Allocates memory tensor in device memory or wraps user-supplied memory handle\n using the specified tensor description and low-level device-specific parameters.\n Returns a pointer to the object that implements the RemoteTensor interface.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param type Defines the element type of the tensor.\n @param shape Defines the shape of the tensor.\n @param object_args_size Size of the low-level tensor object parameters.\n @param remote_tensor Pointer to returned ov_tensor_t that contains remote tensor instance.\n @param ... variadic params Contains low-level tensor object parameters.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_context_create_tensor(
context: *const ov_remote_context_t,
type_: ov_element_type_e,
shape: ov_shape_t,
object_args_size: usize,
remote_tensor: *mut *mut ov_tensor_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns name of a device on which underlying object is allocated.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param device_name Device name will be returned.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_context_get_device_name(
context: *const ov_remote_context_t,
device_name: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns a string contains device-specific parameters required for low-level\n operations with the underlying object.\n Parameters include device/context handles, access flags,\n etc. Content of the returned map depends on a remote execution context that is\n currently set on the device (working scenario).\n One actaul example: \"CONTEXT_TYPE OCL OCL_CONTEXT 0x5583b2ec7b40 OCL_QUEUE 0x5583b2e98ff0\"\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param size The size of param pairs.\n @param params Param name:value list.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_context_get_params(
context: *const ov_remote_context_t,
size: *mut usize,
params: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief This method is used to create a host tensor object friendly for the device in current context.\n For example, GPU context may allocate USM host memory (if corresponding extension is available),\n which could be more efficient than regular host memory.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param type Defines the element type of the tensor.\n @param shape Defines the shape of the tensor.\n @param tensor Pointer to ov_tensor_t that contains host tensor.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_context_create_host_tensor(
context: *const ov_remote_context_t,
type_: ov_element_type_e,
shape: ov_shape_t,
tensor: *mut *mut ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_remote_context_t.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t to free memory.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_context_free(context: *mut ov_remote_context_t);
}
extern "C" {
#[doc = " @brief Returns a string contains device-specific parameters required for low-level\n operations with underlying object.\n Parameters include device/context/surface/buffer handles, access flags,\n etc. Content of the returned map depends on remote execution context that is\n currently set on the device (working scenario).\n One example: \"MEM_HANDLE:0x559ff6904b00;OCL_CONTEXT:0x559ff71d62f0;SHARED_MEM_TYPE:OCL_BUFFER;\"\n @ingroup ov_remote_context_c_api\n @param tensor Pointer to ov_tensor_t that contains host tensor.\n @param size The size of param pairs.\n @param params Param name:value list.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_tensor_get_params(
tensor: *mut ov_tensor_t,
size: *mut usize,
params: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns name of a device on which underlying object is allocated.\n @ingroup ov_remote_context_c_api\n @param remote_tensor A pointer to the remote tensor instance.\n @param device_name Device name will be return.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_remote_tensor_get_device_name(
remote_tensor: *mut ov_tensor_t,
device_name: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the input size of ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param input_size the compiled_model's input size.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_inputs_size(
compiled_model: *const ov_compiled_model_t,
size: *mut usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the single const input port of ov_compiled_model_t, which only support single input model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_input(
compiled_model: *const ov_compiled_model_t,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const input port of ov_compiled_model_t by port index.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param index input index.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_input_by_index(
compiled_model: *const ov_compiled_model_t,
index: usize,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const input port of ov_compiled_model_t by name.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param name input tensor name (char *).\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_input_by_name(
compiled_model: *const ov_compiled_model_t,
name: *const ::std::os::raw::c_char,
input_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the output size of ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param size the compiled_model's output size.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_outputs_size(
compiled_model: *const ov_compiled_model_t,
size: *mut usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the single const output port of ov_compiled_model_t, which only support single output model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_output(
compiled_model: *const ov_compiled_model_t,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const output port of ov_compiled_model_t by port index.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param index input index.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_output_by_index(
compiled_model: *const ov_compiled_model_t,
index: usize,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get a const output port of ov_compiled_model_t by name.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param name input tensor name (char *).\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_output_by_name(
compiled_model: *const ov_compiled_model_t,
name: *const ::std::os::raw::c_char,
output_port: *mut *mut ov_output_const_port_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Gets runtime model information from a device.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param model A pointer to the ov_model_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_get_runtime_model(
compiled_model: *const ov_compiled_model_t,
model: *mut *mut ov_model_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Creates an inference request object used to infer the compiled model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_create_infer_request(
compiled_model: *const ov_compiled_model_t,
infer_request: *mut *mut ov_infer_request_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Sets properties for a device, acceptable keys can be found in ov_property_key_xxx.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param ... variadic paramaters The format is <char *property_key, char* property_value>.\n Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_set_property(
compiled_model: *const ov_compiled_model_t,
property_key: *const ::std::os::raw::c_char,
property_value: *const ::std::os::raw::c_char
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Gets properties for current compiled model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param property_key Property key.\n @param property_value A pointer to property value.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_get_property(
compiled_model: *const ov_compiled_model_t,
property_key: *const ::std::os::raw::c_char,
property_value: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Exports the current compiled model to an output stream `std::ostream`.\n The exported model can also be imported via the ov::Core::import_model method.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param export_model_path Path to the file.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_compiled_model_export_model(
compiled_model: *const ov_compiled_model_t,
export_model_path: *const ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t to free memory."]
pub fn ov_compiled_model_free(compiled_model: *mut ov_compiled_model_t);
}
extern "C" {
#[doc = " @brief Returns pointer to device-specific shared context\n on a remote accelerator device that was used to create this CompiledModel.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param context Return context.\n @return Status code of the operation: OK(0) for success.\n"]
pub fn ov_compiled_model_get_context(
compiled_model: *const ov_compiled_model_t,
context: *mut *mut ov_remote_context_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get version of OpenVINO.\n @ingroup ov_core_c_api\n @param ov_version_t a pointer to the version\n @return Status code of the operation: OK(0) for success."]
pub fn ov_get_openvino_version(version: *mut ov_version_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_version_t.\n @ingroup ov_core_c_api\n @param version A pointer to the ov_version_t to free memory."]
pub fn ov_version_free(version: *mut ov_version_t);
}
extern "C" {
#[doc = " @brief Constructs OpenVINO Core instance by default.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_create(core: *mut *mut ov_core_t) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Constructs OpenVINO Core instance using XML configuration file with devices description.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param xml_config_file A path to .xml file with devices to load from. If XML configuration file is not specified,\n then default plugin.xml file will be used.\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_create_with_config(
xml_config_file: *const ::std::os::raw::c_char,
core: *mut *mut ov_core_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Constructs OpenVINO Core instance.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param xml_config_file_ws A path to model file with unicode.\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_create_with_config_unicode(
xml_config_file_ws: *const wchar_t,
core: *mut *mut ov_core_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_core_t.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t to free memory."]
pub fn ov_core_free(core: *mut ov_core_t);
}
extern "C" {
#[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param bin_path Path to a data file.\n For IR format (*.bin):\n * if `bin_path` is empty, will try to read a bin file with the same name as xml and\n * if the bin file with the same name is not found, will load IR without weights.\n For the following file formats the `bin_path` parameter is not used:\n * ONNX format (*.onnx)\n * PDPD (*.pdmodel)\n * TF (*.pb)\n * TFLite (*.tflite)\n @param model A pointer to the newly created model.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_read_model(
core: *const ov_core_t,
model_path: *const ::std::os::raw::c_char,
bin_path: *const ::std::os::raw::c_char,
model: *mut *mut ov_model_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats, path is unicode.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param bin_path Path to a data file.\n For IR format (*.bin):\n * if `bin_path` is empty, will try to read a bin file with the same name as xml and\n * if the bin file with the same name is not found, will load IR without weights.\n For the following file formats the `bin_path` parameter is not used:\n * ONNX format (*.onnx)\n * PDPD (*.pdmodel)\n * TF (*.pb)\n * TFLite (*.tflite)\n @param model A pointer to the newly created model.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_read_model_unicode(
core: *const ov_core_t,
model_path: *const wchar_t,
bin_path: *const wchar_t,
model: *mut *mut ov_model_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats with models string size.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, support model string containing\n several null chars.\n @param str_len The length of model string.\n @param weights Shared pointer to a constant tensor with weights.\n @param model A pointer to the newly created model.\n Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.\n @note Created model object shares the weights with the @p weights object.\n Thus, do not create @p weights on temporary data that can be freed later, since the model\n constant data will point to an invalid memory.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_read_model_from_memory_buffer(
core: *const ov_core_t,
model_str: *const ::std::os::raw::c_char,
str_len: usize,
weights: *const ov_tensor_t,
model: *mut *mut ov_model_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Creates a compiled model from a source model object.\n Users can create as many compiled models as they need and use\n them simultaneously (up to the limitation of the hardware resources).\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model Model object acquired from Core::read_model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... property paramater: Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_compile_model(
core: *const ov_core_t,
model: *const ov_model_t,
device_name: *const ::std::os::raw::c_char,
property_args_size: usize,
compiled_model: *mut *mut ov_compiled_model_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Reads a model and creates a compiled model from the IR/ONNX/PDPD file.\n This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow,\n especially for cases when caching is enabled and a cached model is available.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_compile_model_from_file(
core: *const ov_core_t,
model_path: *const ::std::os::raw::c_char,
device_name: *const ::std::os::raw::c_char,
property_args_size: usize,
compiled_model: *mut *mut ov_compiled_model_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Reads a model and creates a compiled model from the IR/ONNX/PDPD file.\n This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow,\n especially for cases when caching is enabled and a cached model is available.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_compile_model_from_file_unicode(
core: *const ov_core_t,
model_path: *const wchar_t,
device_name: *const ::std::os::raw::c_char,
property_args_size: usize,
compiled_model: *mut *mut ov_compiled_model_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Sets properties for a device, acceptable keys can be found in ov_property_key_xxx.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Name of a device.\n @param ... variadic paramaters The format is <char* property_key, char* property_value>.\n Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_set_property(
core: *const ov_core_t,
device_name: *const ::std::os::raw::c_char,
property_key: *const ::std::os::raw::c_char,
property_value: *const ::std::os::raw::c_char
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Gets properties related to device behaviour.\n The method extracts information that can be set via the set_property method.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Name of a device to get a property value.\n @param property_key Property key.\n @param property_value A pointer to property value with string format.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_get_property(
core: *const ov_core_t,
device_name: *const ::std::os::raw::c_char,
property_key: *const ::std::os::raw::c_char,
property_value: *mut *mut ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns devices available for inference.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param devices A pointer to the ov_available_devices_t instance.\n Core objects go over all registered plugins and ask about available devices.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_get_available_devices(
core: *const ov_core_t,
devices: *mut ov_available_devices_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Releases memory occpuied by ov_available_devices_t\n @ingroup ov_core_c_api\n @param devices A pointer to the ov_available_devices_t instance.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_available_devices_free(devices: *mut ov_available_devices_t);
}
extern "C" {
#[doc = " @brief Imports a compiled model from the previously exported one.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param content A pointer to content of the exported model.\n @param content_size Number of bytes in the exported network.\n @param device_name Name of a device to import a compiled model for.\n @param compiled_model A pointer to the newly created compiled_model.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_import_model(
core: *const ov_core_t,
content: *const ::std::os::raw::c_char,
content_size: usize,
device_name: *const ::std::os::raw::c_char,
compiled_model: *mut *mut ov_compiled_model_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Returns device plugins version information.\n Device name can be complex and identify multiple devices at once like `HETERO:CPU,GPU`;\n in this case, std::map contains multiple entries, each per device.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Device name to identify a plugin.\n @param versions A pointer to versions corresponding to device_name.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_get_versions_by_device_name(
core: *const ov_core_t,
device_name: *const ::std::os::raw::c_char,
versions: *mut ov_core_version_list_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Releases memory occupied by ov_core_version_list_t.\n @ingroup ov_core_c_api\n @param versions A pointer to the ov_core_version_list_t to free memory."]
pub fn ov_core_versions_free(versions: *mut ov_core_version_list_t);
}
extern "C" {
#[doc = " @brief Creates a new remote shared context object on the specified accelerator device\n using specified plugin-specific low-level device API parameters (device handle, pointer, context, etc.).\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Device name to identify a plugin.\n @param context_args_size How many property args will be for this remote context creation.\n @param context A pointer to the newly created remote context.\n @param ... variadic parmameters Actual context property parameter for remote context\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_create_context(
core: *const ov_core_t,
device_name: *const ::std::os::raw::c_char,
context_args_size: usize,
context: *mut *mut ov_remote_context_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Creates a compiled model from a source model within a specified remote context.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model Model object acquired from ov_core_read_model.\n @param context A pointer to the newly created remote context.\n @param property_args_size How many args will be for this compiled model.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... variadic parmameters Actual property parameter for remote context\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_compile_model_with_context(
core: *const ov_core_t,
model: *const ov_model_t,
context: *const ov_remote_context_t,
property_args_size: usize,
compiled_model: *mut *mut ov_compiled_model_t,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Gets a pointer to default (plugin-supplied) shared context object for the specified accelerator device.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Name of a device to get a default shared context from.\n @param context A pointer to the referenced remote context.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_core_get_default_context(
core: *const ov_core_t,
device_name: *const ::std::os::raw::c_char,
context: *mut *mut ov_remote_context_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Shut down the OpenVINO by deleting all static-duration objects allocated by the library and releasing\n dependent resources\n @ingroup ov_c_api\n @note This function should be used by advanced user to control unload the resources.\n\n You might want to use this function if you are developing a dynamically-loaded library which should clean up all\n resources after itself when the library is unloaded."]
pub fn ov_shutdown();
}
extern "C" {
#[doc = " @brief Create a ov_preprocess_prepostprocessor_t instance.\n @ingroup ov_prepostprocess_c_api\n @param model A pointer to the ov_model_t.\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_create(
model: *const ov_model_t,
preprocess: *mut *mut ov_preprocess_prepostprocessor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_prepostprocessor_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t to free memory."]
pub fn ov_preprocess_prepostprocessor_free(preprocess: *mut ov_preprocess_prepostprocessor_t);
}
extern "C" {
#[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_input_info(
preprocess: *const ov_preprocess_prepostprocessor_t,
preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance by tensor name.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_name The name of input.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_input_info_by_name(
preprocess: *const ov_preprocess_prepostprocessor_t,
tensor_name: *const ::std::os::raw::c_char,
preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance by tensor order.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_index The order of input.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_input_info_by_index(
preprocess: *const ov_preprocess_prepostprocessor_t,
tensor_index: usize,
preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_input_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t to free memory."]
pub fn ov_preprocess_input_info_free(preprocess_input_info: *mut ov_preprocess_input_info_t);
}
extern "C" {
#[doc = " @brief Get a ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_tensor_info A pointer to ov_preprocess_input_tensor_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_info_get_tensor_info(
preprocess_input_info: *const ov_preprocess_input_info_t,
preprocess_input_tensor_info: *mut *mut ov_preprocess_input_tensor_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t to free memory."]
pub fn ov_preprocess_input_tensor_info_free(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
);
}
extern "C" {
#[doc = " @brief Get a ov_preprocess_preprocess_steps_t.\n @ingroup ov_prepostprocess_c_api\n @param ov_preprocess_input_info_t A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_steps A pointer to ov_preprocess_preprocess_steps_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_info_get_preprocess_steps(
preprocess_input_info: *const ov_preprocess_input_info_t,
preprocess_input_steps: *mut *mut ov_preprocess_preprocess_steps_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_preprocess_steps_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t to free memory."]
pub fn ov_preprocess_preprocess_steps_free(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
);
}
extern "C" {
#[doc = " @brief Add resize operation to model's dimensions.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param resize_algorithm A ov_preprocess_resizeAlgorithm instance\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_resize(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
resize_algorithm: ov_preprocess_resize_algorithm_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Add scale preprocess operation. Divide each element of input by specified value.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param value Scaling value\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_scale(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
value: f32,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Add mean preprocess operation. Subtract specified value from each element of input.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param value Value to subtract from each element.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_mean(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
value: f32,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Crop input tensor between begin and end coordinates.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param begin Pointer to begin indexes for input tensor cropping.\n Negative values represent counting elements from the end of input tensor\n @param begin_size The size of begin array\n @param end Pointer to end indexes for input tensor cropping.\n End indexes are exclusive, which means values including end edge are not included in the output slice.\n Negative values represent counting elements from the end of input tensor\n @param end_size The size of end array\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_crop(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
begin: *mut i32,
begin_size: i32,
end: *mut i32,
end_size: i32,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Add 'convert layout' operation to specified layout.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_convert_layout(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
layout: *mut ov_layout_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Reverse channels operation.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_reverse_channels(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t precesion.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param element_type A point to element_type\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_element_type(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
element_type: ov_element_type_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t color format.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param colorFormat The enumerate of colorFormat\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_color_format(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
colorFormat: ov_color_format_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t color format with subname.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param colorFormat The enumerate of colorFormat\n @param sub_names_size The size of sub_names\n @param ... variadic params sub_names Optional list of sub-names assigned for each plane (e.g. \"Y\", \"UV\").\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_color_format_with_subname(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
colorFormat: ov_color_format_e,
sub_names_size: usize,
...
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t spatial_static_shape.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param input_height The height of input\n @param input_width The width of input\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_spatial_static_shape(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
input_height: usize,
input_width: usize,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t memory type.\n @ingroup prepostprocess\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param mem_type Memory type. Refer to ov_remote_context.h to get memory type string info.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_memory_type(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
mem_type: *const ::std::os::raw::c_char,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Convert ov_preprocess_preprocess_steps_t element type.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t.\n @param element_type preprocess input element type.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_convert_element_type(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
element_type: ov_element_type_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Convert ov_preprocess_preprocess_steps_t color.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t.\n @param colorFormat The enumerate of colorFormat.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_preprocess_steps_convert_color(
preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
colorFormat: ov_color_format_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Helper function to reuse element type and shape from user's created tensor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_from(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
tensor: *const ov_tensor_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t layout.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_tensor_info_set_layout(
preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
layout: *mut ov_layout_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_output_info(
preprocess: *const ov_preprocess_prepostprocessor_t,
preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_index The tensor index\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_output_info_by_index(
preprocess: *const ov_preprocess_prepostprocessor_t,
tensor_index: usize,
preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_name The name of input.\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_get_output_info_by_name(
preprocess: *const ov_preprocess_prepostprocessor_t,
tensor_name: *const ::std::os::raw::c_char,
preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_output_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t to free memory."]
pub fn ov_preprocess_output_info_free(preprocess_output_info: *mut ov_preprocess_output_info_t);
}
extern "C" {
#[doc = " @brief Get a ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_output_info_get_tensor_info(
preprocess_output_info: *const ov_preprocess_output_info_t,
preprocess_output_tensor_info: *mut *mut ov_preprocess_output_tensor_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_output_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t to free memory."]
pub fn ov_preprocess_output_tensor_info_free(
preprocess_output_tensor_info: *mut ov_preprocess_output_tensor_info_t,
);
}
extern "C" {
#[doc = " @brief Set ov_preprocess_input_tensor_info_t precesion.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t.\n @param element_type A point to element_type\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_output_set_element_type(
preprocess_output_tensor_info: *mut ov_preprocess_output_tensor_info_t,
element_type: ov_element_type_e,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Get current input model information.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_info_get_model_info(
preprocess_input_info: *const ov_preprocess_input_info_t,
preprocess_input_model_info: *mut *mut ov_preprocess_input_model_info_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Release the memory allocated by ov_preprocess_input_model_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t to free memory."]
pub fn ov_preprocess_input_model_info_free(
preprocess_input_model_info: *mut ov_preprocess_input_model_info_t,
);
}
extern "C" {
#[doc = " @brief Set layout for model's input tensor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_input_model_info_set_layout(
preprocess_input_model_info: *mut ov_preprocess_input_model_info_t,
layout: *mut ov_layout_t,
) -> ov_status_e;
}
extern "C" {
#[doc = " @brief Adds pre/post-processing operations to function passed in constructor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param model A pointer to the ov_model_t.\n @return Status code of the operation: OK(0) for success."]
pub fn ov_preprocess_prepostprocessor_build(
preprocess: *const ov_preprocess_prepostprocessor_t,
model: *mut *mut ov_model_t,
) -> ov_status_e;
}
}