Skip to main content

dma_api/
common.rs

1use core::alloc::Layout;
2
3use crate::{DeviceDma, DmaDirection, DmaError, DmaMapHandle};
4
5pub(crate) struct DCommon {
6    pub handle: DmaMapHandle,
7    pub osal: DeviceDma,
8    pub direction: DmaDirection,
9}
10
11unsafe impl Send for DCommon {}
12
13impl DCommon {
14    pub fn new_zero(
15        os: &DeviceDma,
16        layout: Layout,
17        direction: DmaDirection,
18    ) -> Result<Self, DmaError> {
19        let handle = unsafe { os.alloc_coherent(layout) }?;
20        let ptr = handle.cpu_addr;
21        unsafe {
22            ptr.write_bytes(0, handle.size());
23        }
24        os.flush_invalidate(ptr, handle.size());
25
26        Ok(Self {
27            handle: DmaMapHandle {
28                handle,
29                map_alloc_virt: None,
30            },
31            osal: os.clone(),
32            direction,
33        })
34    }
35
36    pub fn as_mut_slice(&mut self) -> &mut [u8] {
37        unsafe {
38            core::slice::from_raw_parts_mut(self.handle.cpu_addr.as_ptr(), self.handle.size())
39        }
40    }
41
42    pub fn prepare_read(&self, offset: usize, size: usize) {
43        self.osal
44            .prepare_read(&self.handle, offset, size, self.direction);
45    }
46
47    pub fn confirm_write(&self, offset: usize, size: usize) {
48        self.osal
49            .confirm_write(&self.handle, offset, size, self.direction);
50    }
51
52    pub fn confirm_write_all(&self) {
53        self.osal
54            .confirm_write(&self.handle, 0, self.handle.size(), self.direction);
55    }
56}
57
58impl Drop for DCommon {
59    fn drop(&mut self) {
60        if self.handle.size() > 0 {
61            unsafe {
62                self.osal.dealloc_coherent(self.handle.handle);
63            }
64        }
65    }
66}