1use core::{num::NonZeroUsize, ptr::NonNull};
2
3use crate::{Direction, DmaError, DmaHandle};
4
5cfg_if::cfg_if! {
6 if #[cfg(target_arch = "aarch64")] {
7 #[path = "aarch64.rs"]
8 pub mod arch;
9 } else{
10 #[path = "nop.rs"]
11 pub mod arch;
12 }
13}
14
15pub trait DmaOp: Sync + Send + 'static {
16 fn page_size(&self) -> usize;
17
18 unsafe fn map_single(
23 &self,
24 dma_mask: u64,
25 addr: NonNull<u8>,
26 size: NonZeroUsize,
27 align: usize,
28 direction: Direction,
29 ) -> Result<DmaHandle, DmaError>;
30
31 unsafe fn unmap_single(&self, handle: DmaHandle);
36
37 fn flush(&self, addr: NonNull<u8>, size: usize) {
39 arch::flush(addr, size)
40 }
41
42 fn invalidate(&self, addr: NonNull<u8>, size: usize) {
44 arch::invalidate(addr, size)
45 }
46
47 unsafe fn alloc_coherent(
53 &self,
54 dma_mask: u64,
55 layout: core::alloc::Layout,
56 ) -> Option<DmaHandle>;
57
58 unsafe fn dealloc_coherent(&self, handle: DmaHandle);
62
63 fn prepare_read(&self, handle: &DmaHandle, offset: usize, size: usize, direction: Direction) {
64 if matches!(direction, Direction::FromDevice | Direction::Bidirectional) {
65 let ptr = unsafe { handle.dma_virt().add(offset) };
66
67 self.invalidate(ptr, size);
68
69 if let Some(virt) = handle.alloc_virt
70 && virt != handle.origin_virt
71 {
72 unsafe {
73 let src = core::slice::from_raw_parts(ptr.as_ptr(), size);
74 let dst = core::slice::from_raw_parts_mut(
75 handle.origin_virt.as_ptr().add(offset),
76 size,
77 );
78
79 dst.copy_from_slice(src);
80 }
81 }
82 }
83 }
84
85 fn confirm_write(&self, handle: &DmaHandle, offset: usize, size: usize, direction: Direction) {
86 if matches!(direction, Direction::ToDevice | Direction::Bidirectional) {
87 let ptr = unsafe { handle.dma_virt().add(offset) };
88
89 if let Some(virt) = handle.alloc_virt
90 && virt != handle.origin_virt
91 {
92 unsafe {
93 core::ptr::copy_nonoverlapping(
94 handle.origin_virt.as_ptr().add(offset),
95 ptr.as_ptr(),
96 size,
97 );
98 }
99 }
100
101 self.flush(ptr, size)
102 }
103 }
104}