1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
use super::types::*;
use crate::link;
type wchar_t = ::std::os::raw::c_char;
link! {

/* automatically generated by rust-bindgen 0.68.1 */

extern "C" {
    #[doc = " @brief Print the error info.\n @ingroup ov_base_c_api\n @param ov_status_e a status code."]
    pub fn ov_get_error_info(status: ov_status_e) -> *const ::std::os::raw::c_char;
}
extern "C" {
    #[doc = " @brief free char\n @ingroup ov_base_c_api\n @param content The pointer to the char to free."]
    pub fn ov_free(content: *const ::std::os::raw::c_char);
}
extern "C" {
    #[doc = " @brief Get the last error msg.\n @ingroup ov_base_c_api"]
    pub fn ov_get_last_err_msg() -> *const ::std::os::raw::c_char;
}
extern "C" {
    #[doc = " @brief Check this dimension whether is dynamic\n @ingroup ov_dimension_c_api\n @param dim The dimension pointer that will be checked.\n @return Boolean, true is dynamic and false is static."]
    pub fn ov_dimension_is_dynamic(dim: ov_dimension_t) -> bool;
}
extern "C" {
    #[doc = " @brief Create a layout object.\n @ingroup ov_layout_c_api\n @param layout The layout input pointer.\n @param layout_desc The description of layout.\n @return ov_status_e a status code, return OK if successful"]
    pub fn ov_layout_create(
        layout_desc: *const ::std::os::raw::c_char,
        layout: *mut *mut ov_layout_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Free layout object.\n @ingroup ov_layout_c_api\n @param layout will be released."]
    pub fn ov_layout_free(layout: *mut ov_layout_t);
}
extern "C" {
    #[doc = " @brief Convert layout object to a readable string.\n @ingroup ov_layout_c_api\n @param layout will be converted.\n @return string that describes the layout content."]
    pub fn ov_layout_to_string(layout: *const ov_layout_t) -> *const ::std::os::raw::c_char;
}
extern "C" {
    #[doc = " @brief Check this rank whether is dynamic\n @ingroup ov_rank_c_api\n @param rank The rank pointer that will be checked.\n @return bool The return value."]
    pub fn ov_rank_is_dynamic(rank: ov_rank_t) -> bool;
}
extern "C" {
    #[doc = " @brief Initialize a fully shape object, allocate space for its dimensions and set its content id dims is not null.\n @ingroup ov_shape_c_api\n @param rank The rank value for this object, it should be more than 0(>0)\n @param dims The dimensions data for this shape object, it's size should be equal to rank.\n @param shape The input/output shape object pointer.\n @return ov_status_e The return status code."]
    pub fn ov_shape_create(rank: i64, dims: *const i64, shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Free a shape object's internal memory.\n @ingroup ov_shape_c_api\n @param shape The input shape object pointer.\n @return ov_status_e The return status code."]
    pub fn ov_shape_free(shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Initialze a partial shape with static rank and dynamic dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support static rank.\n @param dims support dynamic and static dimension.\n  Static rank, but dynamic dimensions on some or all axes.\n     Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}`\n  Static rank, and static dimensions on all axes.\n     Examples: `{1,2,3,4}` or `{6}` or `{}`\n\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_create(
        rank: i64,
        dims: *const ov_dimension_t,
        partial_shape_obj: *mut ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Initialze a partial shape with dynamic rank and dynamic dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support dynamic and static rank.\n @param dims support dynamic and static dimension.\n  Dynamic rank:\n     Example: `?`\n  Static rank, but dynamic dimensions on some or all axes.\n     Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}`\n  Static rank, and static dimensions on all axes.\n     Examples: `{1,2,3,4}` or `{6}` or `{}\"`\n\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_create_dynamic(
        rank: ov_rank_t,
        dims: *const ov_dimension_t,
        partial_shape_obj: *mut ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Initialize a partial shape with static rank and static dimension.\n @ingroup ov_partial_shape_c_api\n @param rank support static rank.\n @param dims support static dimension.\n  Static rank, and static dimensions on all axes.\n     Examples: `{1,2,3,4}` or `{6}` or `{}`\n\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_create_static(
        rank: i64,
        dims: *const i64,
        partial_shape_obj: *mut ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release internal memory allocated in partial shape.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The object's internal memory will be released.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_free(partial_shape: *mut ov_partial_shape_t);
}
extern "C" {
    #[doc = " @brief Convert partial shape without dynamic data to a static shape.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @param shape The shape pointer.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_to_shape(
        partial_shape: ov_partial_shape_t,
        shape: *mut ov_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Convert shape to partial shape.\n @ingroup ov_partial_shape_c_api\n @param shape The shape pointer.\n @param partial_shape The partial_shape pointer.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_shape_to_partial_shape(
        shape: ov_shape_t,
        partial_shape: *mut ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Check this partial_shape whether is dynamic\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_partial_shape_is_dynamic(partial_shape: ov_partial_shape_t) -> bool;
}
extern "C" {
    #[doc = " @brief Helper function, convert a partial shape to readable string.\n @ingroup ov_partial_shape_c_api\n @param partial_shape The partial_shape pointer.\n @return A string reprensts partial_shape's content."]
    pub fn ov_partial_shape_to_string(
        partial_shape: ov_partial_shape_t,
    ) -> *const ::std::os::raw::c_char;
}
extern "C" {
    #[doc = " @brief Get the shape of port object.\n @ingroup ov_node_c_api\n @param port A pointer to ov_output_const_port_t.\n @param tensor_shape tensor shape.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_const_port_get_shape(
        port: *const ov_output_const_port_t,
        tensor_shape: *mut ov_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the shape of port object.\n @ingroup ov_node_c_api\n @param port A pointer to ov_output_port_t.\n @param tensor_shape tensor shape.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_port_get_shape(
        port: *const ov_output_port_t,
        tensor_shape: *mut ov_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the tensor name of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param tensor_name A pointer to the tensor name.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_port_get_any_name(
        port: *const ov_output_const_port_t,
        tensor_name: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the partial shape of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param partial_shape Partial shape.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_port_get_partial_shape(
        port: *const ov_output_const_port_t,
        partial_shape: *mut ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the tensor type of port.\n @ingroup ov_node_c_api\n @param port A pointer to the ov_output_const_port_t.\n @param tensor_type tensor type.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_port_get_element_type(
        port: *const ov_output_const_port_t,
        tensor_type: *mut ov_element_type_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief free port object\n @ingroup ov_node_c_api\n @param port The pointer to the instance of the ov_output_port_t to free."]
    pub fn ov_output_port_free(port: *mut ov_output_port_t);
}
extern "C" {
    #[doc = " @brief free const port\n @ingroup ov_node_c_api\n @param port The pointer to the instance of the ov_output_const_port_t to free."]
    pub fn ov_output_const_port_free(port: *mut ov_output_const_port_t);
}
extern "C" {
    #[doc = " @brief Constructs Tensor using element type, shape and external host ptr.\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param shape Tensor shape\n @param host_ptr Pointer to pre-allocated host memory\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_create_from_host_ptr(
        type_: ov_element_type_e,
        shape: ov_shape_t,
        host_ptr: *mut ::std::os::raw::c_void,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Constructs Tensor using element type and shape. Allocate internal host storage using default allocator\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_create(
        type_: ov_element_type_e,
        shape: ov_shape_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set new shape for tensor, deallocate/allocate if new total size is bigger than previous one.\n @ingroup ov_tensor_c_api\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_set_shape(tensor: *mut ov_tensor_t, shape: ov_shape_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get shape for tensor.\n @ingroup ov_tensor_c_api\n @param shape Tensor shape\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_get_shape(tensor: *const ov_tensor_t, shape: *mut ov_shape_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get type for tensor.\n @ingroup ov_tensor_c_api\n @param type Tensor element type\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_get_element_type(
        tensor: *const ov_tensor_t,
        type_: *mut ov_element_type_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief the total number of elements (a product of all the dims or 1 for scalar).\n @ingroup ov_tensor_c_api\n @param elements_size number of elements\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_get_size(tensor: *const ov_tensor_t, elements_size: *mut usize)
        -> ov_status_e;
}
extern "C" {
    #[doc = " @brief the size of the current Tensor in bytes.\n @ingroup ov_tensor_c_api\n @param byte_size the size of the current Tensor in bytes.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_get_byte_size(
        tensor: *const ov_tensor_t,
        byte_size: *mut usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Provides an access to the underlaying host memory.\n @ingroup ov_tensor_c_api\n @param data A point to host memory.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_tensor_data(
        tensor: *const ov_tensor_t,
        data: *mut *mut ::std::os::raw::c_void,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Free ov_tensor_t.\n @ingroup ov_tensor_c_api\n @param tensor A point to ov_tensor_t"]
    pub fn ov_tensor_free(tensor: *mut ov_tensor_t);
}
extern "C" {
    #[doc = " @brief Set an input/output tensor to infer on by the name of tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor_name  Name of the input or output tensor.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_tensor(
        infer_request: *mut ov_infer_request_t,
        tensor_name: *const ::std::os::raw::c_char,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an input/output tensor to infer request for the port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the input or output tensor, which can be got by calling ov_model_t/ov_compiled_model_t interface.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_tensor_by_port(
        infer_request: *mut ov_infer_request_t,
        port: *const ov_output_port_t,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an input/output tensor to infer request for the port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Const port of the input or output tensor, which can be got by call interface from\n ov_model_t/ov_compiled_model_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_tensor_by_const_port(
        infer_request: *mut ov_infer_request_t,
        port: *const ov_output_const_port_t,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an input tensor to infer on by the index of tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the input port. If @p idx is greater than the number of model inputs, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_input_tensor_by_index(
        infer_request: *mut ov_infer_request_t,
        idx: usize,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an input tensor for the model with single input to infer on.\n @note If model has several inputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_input_tensor(
        infer_request: *mut ov_infer_request_t,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an output tensor to infer by the index of output tensor.\n @note Index of the output preserved accross ov_model_t, ov_compiled_model_t.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the output tensor.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_output_tensor_by_index(
        infer_request: *mut ov_infer_request_t,
        idx: usize,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set an output tensor to infer models with single output.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_output_tensor(
        infer_request: *mut ov_infer_request_t,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input/output tensor by the name of tensor.\n @note If model has several outputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor_name Name of the input or output tensor to get.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_tensor(
        infer_request: *const ov_infer_request_t,
        tensor_name: *const ::std::os::raw::c_char,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input/output tensor by const port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the tensor to get. @p port is not found, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_tensor_by_const_port(
        infer_request: *const ov_infer_request_t,
        port: *const ov_output_const_port_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input/output tensor by port.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param port Port of the tensor to get. @p port is not found, an error will return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_tensor_by_port(
        infer_request: *const ov_infer_request_t,
        port: *const ov_output_port_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input tensor by the index of input tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will\n return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_input_tensor_by_index(
        infer_request: *const ov_infer_request_t,
        idx: usize,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input tensor from the model with only one input tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_input_tensor(
        infer_request: *const ov_infer_request_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an output tensor by the index of output tensor.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param idx Index of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will\n return.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_output_tensor_by_index(
        infer_request: *const ov_infer_request_t,
        idx: usize,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an output tensor from the model with only one output tensor.\n @note If model has several outputs, an error will return.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param tensor Reference to the tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_output_tensor(
        infer_request: *const ov_infer_request_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Infer specified input(s) in synchronous mode.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_infer(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Cancel inference request.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_cancel(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Start inference of specified input(s) in asynchronous mode.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_start_async(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Wait for the result to become available.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_wait(infer_request: *mut ov_infer_request_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Waits for the result to become available. Blocks until the specified timeout has elapsed or the result\n becomes available, whichever comes first.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param timeout Maximum duration, in milliseconds, to block for.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_wait_for(
        infer_request: *mut ov_infer_request_t,
        timeout: i64,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set callback function, which will be called when inference is done.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param callback  A function to be called.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_set_callback(
        infer_request: *mut ov_infer_request_t,
        callback: *const ov_callback_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_infer_request_t.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t to free memory."]
    pub fn ov_infer_request_free(infer_request: *mut ov_infer_request_t);
}
extern "C" {
    #[doc = " @brief Query performance measures per layer to identify the most time consuming operation.\n @ingroup ov_infer_request_c_api\n @param infer_request A pointer to the ov_infer_request_t.\n @param profiling_infos  Vector of profiling information for operations in a model.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_infer_request_get_profiling_info(
        infer_request: *const ov_infer_request_t,
        profiling_infos: *mut ov_profiling_info_list_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_profiling_info_list_t.\n @ingroup ov_infer_request_c_api\n @param profiling_infos A pointer to the ov_profiling_info_list_t to free memory."]
    pub fn ov_profiling_info_list_free(profiling_infos: *mut ov_profiling_info_list_t);
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t to free memory."]
    pub fn ov_model_free(model: *mut ov_model_t);
}
extern "C" {
    #[doc = " @brief Get a const input port of ov_model_t,which only support single input model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_input(
        model: *const ov_model_t,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const input port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name The name of input tensor.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_input_by_name(
        model: *const ov_model_t,
        tensor_name: *const ::std::os::raw::c_char,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const input port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_input_by_index(
        model: *const ov_model_t,
        index: usize,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get single input port of ov_model_t, which only support single input model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_input(
        model: *const ov_model_t,
        input_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name input tensor name (char *).\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_input_by_name(
        model: *const ov_model_t,
        tensor_name: *const ::std::os::raw::c_char,
        input_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an input port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param input_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_input_by_index(
        model: *const ov_model_t,
        index: usize,
        input_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a single const output port of ov_model_t, which only support single output model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_output(
        model: *const ov_model_t,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const output port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_output_by_index(
        model: *const ov_model_t,
        index: usize,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const output port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name input tensor name (char *).\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_const_output_by_name(
        model: *const ov_model_t,
        tensor_name: *const ::std::os::raw::c_char,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a single output port of ov_model_t, which only support single output model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_output(
        model: *const ov_model_t,
        output_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an output port of ov_model_t by port index.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param index input tensor index.\n @param output_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_output_by_index(
        model: *const ov_model_t,
        index: usize,
        output_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get an output port of ov_model_t by name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name output tensor name (char *).\n @param output_port A pointer to the ov_output_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_output_by_name(
        model: *const ov_model_t,
        tensor_name: *const ::std::os::raw::c_char,
        output_port: *mut *mut ov_output_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the input size of ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param input_size the model's input size.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_inputs_size(model: *const ov_model_t, input_size: *mut usize) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the output size of ov_model_t.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_size the model's output size.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_outputs_size(model: *const ov_model_t, output_size: *mut usize) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns true if any of the ops defined in the model is dynamic shape.\n @param model A pointer to the ov_model_t.\n @return true if model contains dynamic shapes"]
    pub fn ov_model_is_dynamic(model: *const ov_model_t) -> bool;
}
extern "C" {
    #[doc = " @brief Do reshape in model with a list of <name, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_names The list of input tensor names.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_reshape(
        model: *const ov_model_t,
        tensor_names: *mut *const ::std::os::raw::c_char,
        partial_shapes: *const ov_partial_shape_t,
        size: usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Do reshape in model with partial shape for a specified name.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param tensor_name The tensor name of input tensor.\n @param partialShape A PartialShape.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_reshape_input_by_name(
        model: *const ov_model_t,
        tensor_name: *const ::std::os::raw::c_char,
        partial_shape: ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Do reshape in model for one node(port 0).\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param partialShape A PartialShape.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_reshape_single_input(
        model: *const ov_model_t,
        partial_shape: ov_partial_shape_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Do reshape in model with a list of <port id, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param port_indexes The array of port indexes.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_reshape_by_port_indexes(
        model: *const ov_model_t,
        port_indexes: *const usize,
        partial_shape: *const ov_partial_shape_t,
        size: usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Do reshape in model with a list of <ov_output_port_t, partial shape>.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param output_ports The ov_output_port_t list.\n @param partialShape A PartialShape list.\n @param size The item count in the list.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_reshape_by_ports(
        model: *const ov_model_t,
        output_ports: *mut *const ov_output_port_t,
        partial_shapes: *const ov_partial_shape_t,
        size: usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Gets the friendly name for a model.\n @ingroup ov_model_c_api\n @param model A pointer to the ov_model_t.\n @param friendly_name the model's friendly name.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_model_get_friendly_name(
        model: *const ov_model_t,
        friendly_name: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Allocates memory tensor in device memory or wraps user-supplied memory handle\n using the specified tensor description and low-level device-specific parameters.\n Returns a pointer to the object that implements the RemoteTensor interface.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param type Defines the element type of the tensor.\n @param shape Defines the shape of the tensor.\n @param object_args_size Size of the low-level tensor object parameters.\n @param remote_tensor Pointer to returned ov_tensor_t that contains remote tensor instance.\n @param ... variadic params Contains low-level tensor object parameters.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_context_create_tensor(
        context: *const ov_remote_context_t,
        type_: ov_element_type_e,
        shape: ov_shape_t,
        object_args_size: usize,
        remote_tensor: *mut *mut ov_tensor_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns name of a device on which underlying object is allocated.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param device_name Device name will be returned.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_context_get_device_name(
        context: *const ov_remote_context_t,
        device_name: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns a string contains device-specific parameters required for low-level\n operations with the underlying object.\n Parameters include device/context handles, access flags,\n etc. Content of the returned map depends on a remote execution context that is\n currently set on the device (working scenario).\n One actaul example: \"CONTEXT_TYPE OCL OCL_CONTEXT 0x5583b2ec7b40 OCL_QUEUE 0x5583b2e98ff0\"\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param size The size of param pairs.\n @param params Param name:value list.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_context_get_params(
        context: *const ov_remote_context_t,
        size: *mut usize,
        params: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief This method is used to create a host tensor object friendly for the device in current context.\n For example, GPU context may allocate USM host memory (if corresponding extension is available),\n which could be more efficient than regular host memory.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t instance.\n @param type Defines the element type of the tensor.\n @param shape Defines the shape of the tensor.\n @param tensor Pointer to ov_tensor_t that contains host tensor.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_context_create_host_tensor(
        context: *const ov_remote_context_t,
        type_: ov_element_type_e,
        shape: ov_shape_t,
        tensor: *mut *mut ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_remote_context_t.\n @ingroup ov_remote_context_c_api\n @param context A pointer to the ov_remote_context_t to free memory.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_context_free(context: *mut ov_remote_context_t);
}
extern "C" {
    #[doc = " @brief Returns a string contains device-specific parameters required for low-level\n operations with underlying object.\n Parameters include device/context/surface/buffer handles, access flags,\n etc. Content of the returned map depends on remote execution context that is\n currently set on the device (working scenario).\n One example: \"MEM_HANDLE:0x559ff6904b00;OCL_CONTEXT:0x559ff71d62f0;SHARED_MEM_TYPE:OCL_BUFFER;\"\n @ingroup ov_remote_context_c_api\n @param tensor Pointer to ov_tensor_t that contains host tensor.\n @param size The size of param pairs.\n @param params Param name:value list.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_tensor_get_params(
        tensor: *mut ov_tensor_t,
        size: *mut usize,
        params: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns name of a device on which underlying object is allocated.\n @ingroup ov_remote_context_c_api\n @param remote_tensor A pointer to the remote tensor instance.\n @param device_name Device name will be return.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_remote_tensor_get_device_name(
        remote_tensor: *mut ov_tensor_t,
        device_name: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the input size of ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param input_size the compiled_model's input size.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_inputs_size(
        compiled_model: *const ov_compiled_model_t,
        size: *mut usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the single const input port of ov_compiled_model_t, which only support single input model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_input(
        compiled_model: *const ov_compiled_model_t,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const input port of ov_compiled_model_t by port index.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param index input index.\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_input_by_index(
        compiled_model: *const ov_compiled_model_t,
        index: usize,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const input port of ov_compiled_model_t by name.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param name input tensor name (char *).\n @param input_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_input_by_name(
        compiled_model: *const ov_compiled_model_t,
        name: *const ::std::os::raw::c_char,
        input_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the output size of ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param size the compiled_model's output size.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_outputs_size(
        compiled_model: *const ov_compiled_model_t,
        size: *mut usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the single const output port of ov_compiled_model_t, which only support single output model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_output(
        compiled_model: *const ov_compiled_model_t,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const output port of ov_compiled_model_t by port index.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param index input index.\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_output_by_index(
        compiled_model: *const ov_compiled_model_t,
        index: usize,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get a const output port of ov_compiled_model_t by name.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param name input tensor name (char *).\n @param output_port A pointer to the ov_output_const_port_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_output_by_name(
        compiled_model: *const ov_compiled_model_t,
        name: *const ::std::os::raw::c_char,
        output_port: *mut *mut ov_output_const_port_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Gets runtime model information from a device.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param model A pointer to the ov_model_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_get_runtime_model(
        compiled_model: *const ov_compiled_model_t,
        model: *mut *mut ov_model_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Creates an inference request object used to infer the compiled model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param infer_request A pointer to the ov_infer_request_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_create_infer_request(
        compiled_model: *const ov_compiled_model_t,
        infer_request: *mut *mut ov_infer_request_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Sets properties for a device, acceptable keys can be found in ov_property_key_xxx.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param ... variadic paramaters The format is <char *property_key, char* property_value>.\n Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_set_property(
        compiled_model: *const ov_compiled_model_t,
        property_key: *const ::std::os::raw::c_char,
        property_value: *const ::std::os::raw::c_char
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Gets properties for current compiled model.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param property_key Property key.\n @param property_value A pointer to property value.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_get_property(
        compiled_model: *const ov_compiled_model_t,
        property_key: *const ::std::os::raw::c_char,
        property_value: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Exports the current compiled model to an output stream `std::ostream`.\n The exported model can also be imported via the ov::Core::import_model method.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param export_model_path Path to the file.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_compiled_model_export_model(
        compiled_model: *const ov_compiled_model_t,
        export_model_path: *const ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_compiled_model_t.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t to free memory."]
    pub fn ov_compiled_model_free(compiled_model: *mut ov_compiled_model_t);
}
extern "C" {
    #[doc = " @brief Returns pointer to device-specific shared context\n on a remote accelerator device that was used to create this CompiledModel.\n @ingroup ov_compiled_model_c_api\n @param compiled_model A pointer to the ov_compiled_model_t.\n @param context Return context.\n @return Status code of the operation: OK(0) for success.\n"]
    pub fn ov_compiled_model_get_context(
        compiled_model: *const ov_compiled_model_t,
        context: *mut *mut ov_remote_context_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get version of OpenVINO.\n @ingroup ov_core_c_api\n @param ov_version_t a pointer to the version\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_get_openvino_version(version: *mut ov_version_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_version_t.\n @ingroup ov_core_c_api\n @param version A pointer to the ov_version_t to free memory."]
    pub fn ov_version_free(version: *mut ov_version_t);
}
extern "C" {
    #[doc = " @brief Constructs OpenVINO Core instance by default.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_create(core: *mut *mut ov_core_t) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Constructs OpenVINO Core instance using XML configuration file with devices description.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param xml_config_file A path to .xml file with devices to load from. If XML configuration file is not specified,\n then default plugin.xml file will be used.\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_create_with_config(
        xml_config_file: *const ::std::os::raw::c_char,
        core: *mut *mut ov_core_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Constructs OpenVINO Core instance.\n See RegisterPlugins for more details.\n @ingroup ov_core_c_api\n @param xml_config_file_ws A path to model file with unicode.\n @param core A pointer to the newly created ov_core_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_create_with_config_unicode(
        xml_config_file_ws: *const wchar_t,
        core: *mut *mut ov_core_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_core_t.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t to free memory."]
    pub fn ov_core_free(core: *mut ov_core_t);
}
extern "C" {
    #[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param bin_path Path to a data file.\n For IR format (*.bin):\n  * if `bin_path` is empty, will try to read a bin file with the same name as xml and\n  * if the bin file with the same name is not found, will load IR without weights.\n For the following file formats the `bin_path` parameter is not used:\n  * ONNX format (*.onnx)\n  * PDPD (*.pdmodel)\n  * TF (*.pb)\n  * TFLite (*.tflite)\n @param model A pointer to the newly created model.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_read_model(
        core: *const ov_core_t,
        model_path: *const ::std::os::raw::c_char,
        bin_path: *const ::std::os::raw::c_char,
        model: *mut *mut ov_model_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats, path is unicode.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param bin_path Path to a data file.\n For IR format (*.bin):\n  * if `bin_path` is empty, will try to read a bin file with the same name as xml and\n  * if the bin file with the same name is not found, will load IR without weights.\n For the following file formats the `bin_path` parameter is not used:\n  * ONNX format (*.onnx)\n  * PDPD (*.pdmodel)\n  * TF (*.pb)\n  * TFLite (*.tflite)\n @param model A pointer to the newly created model.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_read_model_unicode(
        core: *const ov_core_t,
        model_path: *const wchar_t,
        bin_path: *const wchar_t,
        model: *mut *mut ov_model_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats with models string size.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, support model string containing\n several null chars.\n @param str_len The length of model string.\n @param weights Shared pointer to a constant tensor with weights.\n @param model A pointer to the newly created model.\n Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.\n @note Created model object shares the weights with the @p weights object.\n Thus, do not create @p weights on temporary data that can be freed later, since the model\n constant data will point to an invalid memory.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_read_model_from_memory_buffer(
        core: *const ov_core_t,
        model_str: *const ::std::os::raw::c_char,
        str_len: usize,
        weights: *const ov_tensor_t,
        model: *mut *mut ov_model_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Creates a compiled model from a source model object.\n Users can create as many compiled models as they need and use\n them simultaneously (up to the limitation of the hardware resources).\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model Model object acquired from Core::read_model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... property paramater: Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_compile_model(
        core: *const ov_core_t,
        model: *const ov_model_t,
        device_name: *const ::std::os::raw::c_char,
        property_args_size: usize,
        compiled_model: *mut *mut ov_compiled_model_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Reads a model and creates a compiled model from the IR/ONNX/PDPD file.\n This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow,\n especially for cases when caching is enabled and a cached model is available.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_compile_model_from_file(
        core: *const ov_core_t,
        model_path: *const ::std::os::raw::c_char,
        device_name: *const ::std::os::raw::c_char,
        property_args_size: usize,
        compiled_model: *mut *mut ov_compiled_model_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Reads a model and creates a compiled model from the IR/ONNX/PDPD file.\n This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow,\n especially for cases when caching is enabled and a cached model is available.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model_path Path to a model.\n @param device_name Name of a device to load a model to.\n @param property_args_size How many properties args will be passed, each property contains 2 args: key and value.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... Optional pack of pairs: <char* property_key, char* property_value> relevant only\n for this load operation operation. Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_compile_model_from_file_unicode(
        core: *const ov_core_t,
        model_path: *const wchar_t,
        device_name: *const ::std::os::raw::c_char,
        property_args_size: usize,
        compiled_model: *mut *mut ov_compiled_model_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Sets properties for a device, acceptable keys can be found in ov_property_key_xxx.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Name of a device.\n @param ... variadic paramaters The format is <char* property_key, char* property_value>.\n Supported property key please see ov_property.h.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_set_property(
        core: *const ov_core_t,
        device_name: *const ::std::os::raw::c_char,
        property_key: *const ::std::os::raw::c_char,
        property_value: *const ::std::os::raw::c_char
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Gets properties related to device behaviour.\n The method extracts information that can be set via the set_property method.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name  Name of a device to get a property value.\n @param property_key  Property key.\n @param property_value A pointer to property value with string format.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_get_property(
        core: *const ov_core_t,
        device_name: *const ::std::os::raw::c_char,
        property_key: *const ::std::os::raw::c_char,
        property_value: *mut *mut ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns devices available for inference.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param devices A pointer to the ov_available_devices_t instance.\n Core objects go over all registered plugins and ask about available devices.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_get_available_devices(
        core: *const ov_core_t,
        devices: *mut ov_available_devices_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Releases memory occpuied by ov_available_devices_t\n @ingroup ov_core_c_api\n @param devices A pointer to the ov_available_devices_t instance.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_available_devices_free(devices: *mut ov_available_devices_t);
}
extern "C" {
    #[doc = " @brief Imports a compiled model from the previously exported one.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param content A pointer to content of the exported model.\n @param content_size Number of bytes in the exported network.\n @param device_name Name of a device to import a compiled model for.\n @param compiled_model A pointer to the newly created compiled_model.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_import_model(
        core: *const ov_core_t,
        content: *const ::std::os::raw::c_char,
        content_size: usize,
        device_name: *const ::std::os::raw::c_char,
        compiled_model: *mut *mut ov_compiled_model_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Returns device plugins version information.\n Device name can be complex and identify multiple devices at once like `HETERO:CPU,GPU`;\n in this case, std::map contains multiple entries, each per device.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Device name to identify a plugin.\n @param versions A pointer to versions corresponding to device_name.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_get_versions_by_device_name(
        core: *const ov_core_t,
        device_name: *const ::std::os::raw::c_char,
        versions: *mut ov_core_version_list_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Releases memory occupied by ov_core_version_list_t.\n @ingroup ov_core_c_api\n @param versions A pointer to the ov_core_version_list_t to free memory."]
    pub fn ov_core_versions_free(versions: *mut ov_core_version_list_t);
}
extern "C" {
    #[doc = " @brief Creates a new remote shared context object on the specified accelerator device\n using specified plugin-specific low-level device API parameters (device handle, pointer, context, etc.).\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Device name to identify a plugin.\n @param context_args_size How many property args will be for this remote context creation.\n @param context A pointer to the newly created remote context.\n @param ... variadic parmameters Actual context property parameter for remote context\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_create_context(
        core: *const ov_core_t,
        device_name: *const ::std::os::raw::c_char,
        context_args_size: usize,
        context: *mut *mut ov_remote_context_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Creates a compiled model from a source model within a specified remote context.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param model Model object acquired from ov_core_read_model.\n @param context A pointer to the newly created remote context.\n @param property_args_size How many args will be for this compiled model.\n @param compiled_model A pointer to the newly created compiled_model.\n @param ... variadic parmameters Actual property parameter for remote context\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_compile_model_with_context(
        core: *const ov_core_t,
        model: *const ov_model_t,
        context: *const ov_remote_context_t,
        property_args_size: usize,
        compiled_model: *mut *mut ov_compiled_model_t,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Gets a pointer to default (plugin-supplied) shared context object for the specified accelerator device.\n @ingroup ov_core_c_api\n @param core A pointer to the ov_core_t instance.\n @param device_name Name of a device to get a default shared context from.\n @param context A pointer to the referenced remote context.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_core_get_default_context(
        core: *const ov_core_t,
        device_name: *const ::std::os::raw::c_char,
        context: *mut *mut ov_remote_context_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Shut down the OpenVINO by deleting all static-duration objects allocated by the library and releasing\n dependent resources\n @ingroup ov_c_api\n @note This function should be used by advanced user to control unload the resources.\n\n You might want to use this function if you are developing a dynamically-loaded library which should clean up all\n resources after itself when the library is unloaded."]
    pub fn ov_shutdown();
}
extern "C" {
    #[doc = " @brief Create a ov_preprocess_prepostprocessor_t instance.\n @ingroup ov_prepostprocess_c_api\n @param model A pointer to the ov_model_t.\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_create(
        model: *const ov_model_t,
        preprocess: *mut *mut ov_preprocess_prepostprocessor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_prepostprocessor_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t to free memory."]
    pub fn ov_preprocess_prepostprocessor_free(preprocess: *mut ov_preprocess_prepostprocessor_t);
}
extern "C" {
    #[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_input_info(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance by tensor name.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_name The name of input.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_input_info_by_name(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        tensor_name: *const ::std::os::raw::c_char,
        preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the input info of ov_preprocess_prepostprocessor_t instance by tensor order.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_index The order of input.\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_input_info_by_index(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        tensor_index: usize,
        preprocess_input_info: *mut *mut ov_preprocess_input_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_input_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t to free memory."]
    pub fn ov_preprocess_input_info_free(preprocess_input_info: *mut ov_preprocess_input_info_t);
}
extern "C" {
    #[doc = " @brief Get a ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_tensor_info A pointer to ov_preprocess_input_tensor_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_info_get_tensor_info(
        preprocess_input_info: *const ov_preprocess_input_info_t,
        preprocess_input_tensor_info: *mut *mut ov_preprocess_input_tensor_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t to free memory."]
    pub fn ov_preprocess_input_tensor_info_free(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
    );
}
extern "C" {
    #[doc = " @brief Get a ov_preprocess_preprocess_steps_t.\n @ingroup ov_prepostprocess_c_api\n @param ov_preprocess_input_info_t A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_steps A pointer to ov_preprocess_preprocess_steps_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_info_get_preprocess_steps(
        preprocess_input_info: *const ov_preprocess_input_info_t,
        preprocess_input_steps: *mut *mut ov_preprocess_preprocess_steps_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_preprocess_steps_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t to free memory."]
    pub fn ov_preprocess_preprocess_steps_free(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
    );
}
extern "C" {
    #[doc = " @brief Add resize operation to model's dimensions.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param resize_algorithm A ov_preprocess_resizeAlgorithm instance\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_resize(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        resize_algorithm: ov_preprocess_resize_algorithm_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Add scale preprocess operation. Divide each element of input by specified value.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param value Scaling value\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_scale(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        value: f32,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Add mean preprocess operation. Subtract specified value from each element of input.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param value Value to subtract from each element.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_mean(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        value: f32,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Crop input tensor between begin and end coordinates.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param begin Pointer to begin indexes for input tensor cropping.\n Negative values represent counting elements from the end of input tensor\n @param begin_size The size of begin array\n @param end Pointer to end indexes for input tensor cropping.\n End indexes are exclusive, which means values including end edge are not included in the output slice.\n Negative values represent counting elements from the end of input tensor\n @param end_size The size of end array\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_crop(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        begin: *mut i32,
        begin_size: i32,
        end: *mut i32,
        end_size: i32,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Add 'convert layout' operation to specified layout.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_convert_layout(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        layout: *mut ov_layout_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Reverse channels operation.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_process_steps A pointer to ov_preprocess_preprocess_steps_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_reverse_channels(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t precesion.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param element_type A point to element_type\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_element_type(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        element_type: ov_element_type_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t color format.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param colorFormat The enumerate of colorFormat\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_color_format(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        colorFormat: ov_color_format_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t color format with subname.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param colorFormat The enumerate of colorFormat\n @param sub_names_size The size of sub_names\n @param ... variadic params sub_names Optional list of sub-names assigned for each plane (e.g. \"Y\", \"UV\").\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_color_format_with_subname(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        colorFormat: ov_color_format_e,
        sub_names_size: usize,
        ...
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t spatial_static_shape.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param input_height The height of input\n @param input_width The width of input\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_spatial_static_shape(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        input_height: usize,
        input_width: usize,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t memory type.\n @ingroup prepostprocess\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param mem_type Memory type. Refer to ov_remote_context.h to get memory type string info.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_memory_type(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        mem_type: *const ::std::os::raw::c_char,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Convert ov_preprocess_preprocess_steps_t element type.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t.\n @param element_type preprocess input element type.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_convert_element_type(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        element_type: ov_element_type_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Convert ov_preprocess_preprocess_steps_t color.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_steps A pointer to the ov_preprocess_preprocess_steps_t.\n @param colorFormat The enumerate of colorFormat.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_preprocess_steps_convert_color(
        preprocess_input_process_steps: *mut ov_preprocess_preprocess_steps_t,
        colorFormat: ov_color_format_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Helper function to reuse element type and shape from user's created tensor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param tensor A point to ov_tensor_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_from(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        tensor: *const ov_tensor_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t layout.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_tensor_info A pointer to the ov_preprocess_input_tensor_info_t.\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_tensor_info_set_layout(
        preprocess_input_tensor_info: *mut ov_preprocess_input_tensor_info_t,
        layout: *mut ov_layout_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_output_info(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_index The tensor index\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_output_info_by_index(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        tensor_index: usize,
        preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get the output info of ov_preprocess_output_info_t instance.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param tensor_name The name of input.\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_get_output_info_by_name(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        tensor_name: *const ::std::os::raw::c_char,
        preprocess_output_info: *mut *mut ov_preprocess_output_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_output_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t to free memory."]
    pub fn ov_preprocess_output_info_free(preprocess_output_info: *mut ov_preprocess_output_info_t);
}
extern "C" {
    #[doc = " @brief Get a ov_preprocess_input_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_info A pointer to the ov_preprocess_output_info_t.\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_output_info_get_tensor_info(
        preprocess_output_info: *const ov_preprocess_output_info_t,
        preprocess_output_tensor_info: *mut *mut ov_preprocess_output_tensor_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_output_tensor_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t to free memory."]
    pub fn ov_preprocess_output_tensor_info_free(
        preprocess_output_tensor_info: *mut ov_preprocess_output_tensor_info_t,
    );
}
extern "C" {
    #[doc = " @brief Set ov_preprocess_input_tensor_info_t precesion.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_output_tensor_info A pointer to the ov_preprocess_output_tensor_info_t.\n @param element_type A point to element_type\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_output_set_element_type(
        preprocess_output_tensor_info: *mut ov_preprocess_output_tensor_info_t,
        element_type: ov_element_type_e,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Get current input model information.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_info A pointer to the ov_preprocess_input_info_t.\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_info_get_model_info(
        preprocess_input_info: *const ov_preprocess_input_info_t,
        preprocess_input_model_info: *mut *mut ov_preprocess_input_model_info_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Release the memory allocated by ov_preprocess_input_model_info_t.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t to free memory."]
    pub fn ov_preprocess_input_model_info_free(
        preprocess_input_model_info: *mut ov_preprocess_input_model_info_t,
    );
}
extern "C" {
    #[doc = " @brief Set layout for model's input tensor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess_input_model_info A pointer to the ov_preprocess_input_model_info_t\n @param layout A point to ov_layout_t\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_input_model_info_set_layout(
        preprocess_input_model_info: *mut ov_preprocess_input_model_info_t,
        layout: *mut ov_layout_t,
    ) -> ov_status_e;
}
extern "C" {
    #[doc = " @brief Adds pre/post-processing operations to function passed in constructor.\n @ingroup ov_prepostprocess_c_api\n @param preprocess A pointer to the ov_preprocess_prepostprocessor_t.\n @param model A pointer to the ov_model_t.\n @return Status code of the operation: OK(0) for success."]
    pub fn ov_preprocess_prepostprocessor_build(
        preprocess: *const ov_preprocess_prepostprocessor_t,
        model: *mut *mut ov_model_t,
    ) -> ov_status_e;
}

}