Skip to main content

sparreal_kernel/os/mem/
dma.rs

1use alloc::alloc::handle_alloc_error;
2use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
3
4pub use dma_api::{DmaDirection, DmaError, DmaHandle, DmaMapHandle, DmaOp};
5
6use super::{
7    address::{PhysAddr, VirtAddr},
8    kernel_memory_allocator, page_size,
9};
10
11pub struct KernelDmaOp;
12
13static KERNEL_DMA_OP: KernelDmaOp = KernelDmaOp;
14
15pub fn kernel_dma_op() -> &'static KernelDmaOp {
16    &KERNEL_DMA_OP
17}
18
19impl DmaOp for KernelDmaOp {
20    fn page_size(&self) -> usize {
21        page_size()
22    }
23
24    unsafe fn map_single(
25        &self,
26        dma_mask: u64,
27        addr: NonNull<u8>,
28        size: NonZeroUsize,
29        align: usize,
30        _direction: DmaDirection,
31    ) -> Result<DmaMapHandle, DmaError> {
32        let layout = Layout::from_size_align(size.get(), align.max(1))?;
33        let phys: PhysAddr = VirtAddr::from(addr).into();
34        let dma_addr = phys.raw() as u64;
35
36        if dma_addr > dma_mask || !dma_addr.is_multiple_of(align.max(1) as u64) {
37            return Err(DmaError::AlignMismatch {
38                required: align.max(1),
39                address: dma_addr.into(),
40            });
41        }
42
43        Ok(unsafe { DmaMapHandle::new(addr, dma_addr.into(), layout, None) })
44    }
45
46    unsafe fn unmap_single(&self, _handle: DmaMapHandle) {}
47
48    unsafe fn alloc_coherent(&self, dma_mask: u64, layout: Layout) -> Option<DmaHandle> {
49        let ptr = unsafe { kernel_memory_allocator().alloc_with_mask(layout, dma_mask) };
50        let ptr = NonNull::new(ptr)?;
51
52        unsafe {
53            ptr.as_ptr().write_bytes(0, layout.size());
54        }
55
56        let phys: PhysAddr = VirtAddr::from(ptr).into();
57        let dma_addr = phys.raw() as u64;
58        if dma_addr > dma_mask || !dma_addr.is_multiple_of(layout.align() as u64) {
59            unsafe { kernel_memory_allocator().dealloc_raw(ptr.as_ptr(), layout) };
60            return None;
61        }
62
63        Some(unsafe { DmaHandle::new(ptr, dma_addr.into(), layout) })
64    }
65
66    unsafe fn dealloc_coherent(&self, handle: DmaHandle) {
67        unsafe { kernel_memory_allocator().dealloc_raw(handle.as_ptr().as_ptr(), handle.layout()) }
68    }
69}
70
71pub fn alloc_with_mask(layout: Layout, dma_mask: u64) -> NonNull<u8> {
72    let ptr = unsafe { kernel_memory_allocator().alloc_with_mask(layout, dma_mask) };
73    NonNull::new(ptr).unwrap_or_else(|| handle_alloc_error(layout))
74}