dma_api/
lib.rs

1#![cfg_attr(target_os = "none", no_std)]
2#![doc = include_str!("../README.md")]
3
4extern crate alloc;
5
6use core::{alloc::Layout, num::NonZeroUsize, ops::Deref, ptr::NonNull};
7
8mod osal;
9
10mod array;
11mod common;
12mod dbox;
13// mod slice;
14
15pub use array::*;
16pub use common::SingleMapping;
17pub use dbox::*;
18pub use osal::DmaOp;
19// pub use slice::*;
20
21// mod stream;
22
23// pub use stream::*;
24
25/// DMA 传输方向
26///
27/// 参考 Linux `enum dma_data_direction`
28#[derive(Debug, Clone, Copy, PartialEq, Eq)]
29#[repr(C)]
30pub enum Direction {
31    /// 数据从 CPU 传输到设备 (DMA_TO_DEVICE)
32    ToDevice,
33    /// 数据从设备传输到 CPU (DMA_FROM_DEVICE)
34    FromDevice,
35    /// 双向传输 (DMA_BIDIRECTIONAL)
36    Bidirectional,
37}
38
39/// DMA 地址类型
40pub type DmaAddr = u64;
41
42/// 物理地址类型
43pub type PhysAddr = u64;
44
45/// DMA 错误类型
46#[derive(thiserror::Error, Debug, Clone, Copy, PartialEq, Eq)]
47pub enum DmaError {
48    #[error("DMA allocation failed")]
49    NoMemory,
50    #[error("Invalid layout for DMA allocation")]
51    LayoutError,
52    #[error("DMA address {addr:#x} does not match device mask {mask:#x}")]
53    DmaMaskNotMatch { addr: DmaAddr, mask: u64 },
54}
55
56impl From<core::alloc::LayoutError> for DmaError {
57    fn from(_: core::alloc::LayoutError) -> Self {
58        DmaError::LayoutError
59    }
60}
61
62#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
63pub struct DmaHandle {
64    pub origin_virt: NonNull<u8>,
65    pub dma_addr: DmaAddr,
66    pub layout: Layout,
67    pub alloc_virt: Option<NonNull<u8>>,
68}
69impl DmaHandle {
70    pub fn size(&self) -> usize {
71        self.layout.size()
72    }
73
74    pub fn align(&self) -> usize {
75        self.layout.align()
76    }
77
78    pub fn as_ptr(&self) -> *mut u8 {
79        self.origin_virt.as_ptr()
80    }
81
82    pub(crate) fn dma_virt(&self) -> NonNull<u8> {
83        if let Some(virt) = self.alloc_virt {
84            virt
85        } else {
86            self.origin_virt
87        }
88    }
89
90    pub fn dma_addr(&self) -> DmaAddr {
91        self.dma_addr
92    }
93}
94unsafe impl Send for DmaHandle {}
95
96impl Deref for DmaHandle {
97    type Target = core::alloc::Layout;
98    fn deref(&self) -> &Self::Target {
99        &self.layout
100    }
101}
102
103#[derive(Clone)]
104pub struct DeviceDma {
105    os: &'static dyn DmaOp,
106    mask: u64,
107}
108
109impl DeviceDma {
110    pub fn new(dma_mask: u64, osal: &'static dyn DmaOp) -> Self {
111        Self {
112            mask: dma_mask,
113            os: osal,
114        }
115    }
116
117    pub fn dma_mask(&self) -> u64 {
118        self.mask
119    }
120
121    pub fn flush(&self, addr: NonNull<u8>, size: usize) {
122        self.os.flush(addr, size)
123    }
124
125    pub fn invalidate(&self, addr: NonNull<u8>, size: usize) {
126        self.os.invalidate(addr, size)
127    }
128
129    pub fn page_size(&self) -> usize {
130        self.os.page_size()
131    }
132
133    fn prepare_read(&self, handle: &DmaHandle, offset: usize, size: usize, direction: Direction) {
134        self.os.prepare_read(handle, offset, size, direction)
135    }
136
137    fn confirm_write(&self, handle: &DmaHandle, offset: usize, size: usize, direction: Direction) {
138        self.os.confirm_write(handle, offset, size, direction)
139    }
140
141    unsafe fn alloc_coherent(&self, layout: core::alloc::Layout) -> Option<DmaHandle> {
142        let res = unsafe { self.os.alloc_coherent(self.mask, layout) };
143        #[cfg(debug_assertions)]
144        {
145            if let Some(ref handle) = res {
146                assert!(
147                    self.mask >= handle.dma_addr + layout.size() as u64,
148                    "DMA mask not match: addr={:#x}, size={:#x}, mask={:#x}",
149                    handle.dma_addr,
150                    layout.size(),
151                    self.mask
152                );
153            }
154        }
155        res
156    }
157
158    unsafe fn dealloc_coherent(&self, handle: DmaHandle) {
159        unsafe { self.os.dealloc_coherent(handle) }
160    }
161
162    unsafe fn _map_single(
163        &self,
164        addr: NonNull<u8>,
165        size: NonZeroUsize,
166        align: usize,
167        direction: Direction,
168    ) -> Result<DmaHandle, DmaError> {
169        let res = unsafe { self.os.map_single(self.mask, addr, size, align, direction) };
170        #[cfg(debug_assertions)]
171        {
172            if let Ok(ref handle) = res {
173                assert!(
174                    self.mask >= handle.dma_addr + size.get() as u64,
175                    "DMA mask not match: addr={:#x}, size={:#x}, mask={:#x}",
176                    handle.dma_addr,
177                    size,
178                    self.mask
179                );
180
181                assert!(
182                    handle.dma_addr % (align as u64) == 0,
183                    "DMA address not aligned: addr={:#x}, align={:#x}",
184                    handle.dma_addr,
185                    align
186                );
187            }
188        }
189
190        res
191    }
192
193    unsafe fn unmap_single(&self, handle: DmaHandle) {
194        unsafe { self.os.unmap_single(handle) }
195    }
196
197    pub fn new_array<T>(
198        &self,
199        size: usize,
200        align: usize,
201        direction: Direction,
202    ) -> Result<array::DArray<T>, DmaError> {
203        array::DArray::new_zero(self, size, align, direction)
204    }
205
206    pub fn new_box<T>(
207        &self,
208        align: usize,
209        direction: Direction,
210    ) -> Result<dbox::DBox<T>, DmaError> {
211        dbox::DBox::new_zero(self, align, direction)
212    }
213
214    pub fn map_single(
215        &self,
216        addr: NonNull<u8>,
217        size: NonZeroUsize,
218        align: usize,
219        direction: Direction,
220    ) -> Result<common::SingleMapping, DmaError> {
221        common::SingleMapping::new(self, addr, size, align, direction)
222    }
223}