async_tensorrt/ffi/
memory.rs

1use cpp::cpp;
2
3pub struct HostBuffer(*mut std::ffi::c_void);
4
5/// Implements [`Send`] for [`HostBuffer`].
6///
7/// # Safety
8///
9/// The TensorRT API is thread-safe with regards to all operations on [`HostBuffer`].
10unsafe impl Send for HostBuffer {}
11
12/// Implements [`Sync`] for [`HostBuffer`].
13///
14/// # Safety
15///
16/// The TensorRT API is thread-safe with regards to all operations on [`HostBuffer`].
17unsafe impl Sync for HostBuffer {}
18
19/// Handle TensorRT-related memory accesible to caller.
20///
21/// [TensorRT documentation](https://docs.nvidia.com/deeplearning/tensorrt/api/c_api/classnvinfer1_1_1_i_host_memory.html)
22impl HostBuffer {
23    /// Wrap internal pointer as [`HostBuffer`].
24    ///
25    /// # Safety
26    ///
27    /// The pointer must point to a valid `IHostMemory` object.
28    #[inline]
29    pub(crate) fn wrap(internal: *mut std::ffi::c_void) -> Self {
30        HostBuffer(internal)
31    }
32
33    /// Get data slice pointing to the host buffer.
34    #[inline]
35    pub fn as_bytes(&self) -> &[u8] {
36        let data = self.data() as *const u8;
37        let size = self.size();
38        // SAFETY: This is safe because:
39        // * The pointer is valid because we just got it from TensorRT.
40        // * The pointer will remain valid as long as `HostBuffer` remains around.
41        unsafe { std::slice::from_raw_parts(data, size) }
42    }
43
44    /// Get readonly pointer to host buffer data.
45    ///
46    /// [TensorRT documentation](https://docs.nvidia.com/deeplearning/tensorrt/api/c_api/classnvinfer1_1_1_i_host_memory.html#a95d49ae9b0a5479af9433cb101a26782)
47    #[inline]
48    pub fn data(&self) -> *const std::ffi::c_void {
49        let internal = self.as_ptr();
50        cpp!(unsafe [
51            internal as "const void*"
52        ] -> *mut std::ffi::c_void as "void*" {
53            return ((const IHostMemory*) internal)->data();
54        })
55    }
56
57    /// Get size of host buffer data.
58    ///
59    /// [TensorRT documentation](https://docs.nvidia.com/deeplearning/tensorrt/api/c_api/classnvinfer1_1_1_i_host_memory.html#adede91569ebccd258b357f29ba706e8e)
60    #[inline]
61    pub fn size(&self) -> usize {
62        let internal = self.as_ptr();
63        cpp!(unsafe [
64            internal as "const void*"
65        ] -> usize as "std::size_t" {
66            return ((const IHostMemory*) internal)->size();
67        })
68    }
69
70    /// Get internal readonly pointer.
71    #[inline(always)]
72    pub fn as_ptr(&self) -> *const std::ffi::c_void {
73        let HostBuffer(internal) = *self;
74        internal
75    }
76
77    /// Get internal mutable pointer.
78    #[inline(always)]
79    pub fn as_mut_ptr(&mut self) -> *mut std::ffi::c_void {
80        let HostBuffer(internal) = *self;
81        internal
82    }
83}
84
85impl Drop for HostBuffer {
86    fn drop(&mut self) {
87        let internal = self.as_mut_ptr();
88        cpp!(unsafe [
89            internal as "void*"
90        ] {
91            destroy((IHostMemory*) internal);
92        });
93    }
94}
95
96#[cfg(test)]
97mod tests {
98    use crate::tests::utils::*;
99
100    #[tokio::test]
101    async fn test_host_buffer_data_and_size() {
102        let network_plan = simple_network_plan!();
103        assert!(!network_plan.data().is_null());
104        assert!(network_plan.size() > 0);
105        let bytes = network_plan.as_bytes();
106        assert_eq!(unsafe { *(network_plan.data() as *const u8) }, bytes[0]);
107        assert_eq!(network_plan.size(), bytes.len());
108    }
109}