1use core::{alloc::Layout, cmp::PartialOrd, ops::Deref, ptr::NonNull};
2use derive_more::{
3 Add, AddAssign, Debug, Display, Div, From, Into, Mul, MulAssign, Sub, SubAssign,
4};
5
6#[derive(
7 Debug,
8 Display,
9 Clone,
10 Copy,
11 PartialEq,
12 Eq,
13 PartialOrd,
14 Hash,
15 From,
16 Into,
17 Add,
18 AddAssign,
19 Mul,
20 MulAssign,
21 Sub,
22 SubAssign,
23 Div,
24)]
25#[debug("{}", format_args!("{_0:#X}"))]
26#[display("{}", format_args!("{_0:#X}"))]
27pub struct DmaAddr(u64);
28
29impl DmaAddr {
30 pub fn as_u64(&self) -> u64 {
31 self.0
32 }
33
34 pub fn checked_add(&self, rhs: u64) -> Option<Self> {
35 self.0.checked_add(rhs).map(DmaAddr)
36 }
37}
38
39impl PartialEq<u64> for DmaAddr {
40 fn eq(&self, other: &u64) -> bool {
41 self.0 == *other
42 }
43}
44
45impl PartialOrd<u64> for DmaAddr {
46 fn partial_cmp(&self, other: &u64) -> Option<core::cmp::Ordering> {
47 self.0.partial_cmp(other)
48 }
49}
50
51#[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Hash, From, Into, Add, Mul, Sub)]
53#[debug("{}", format_args!("{_0:#X}"))]
54#[display("{}", format_args!("{_0:#X}"))]
55pub struct PhysAddr(u64);
56
57impl PhysAddr {
58 pub fn as_u64(&self) -> u64 {
59 self.0
60 }
61}
62
63#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
65pub enum DmaDirection {
66 ToDevice,
68 FromDevice,
70 Bidirectional,
72}
73
74#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq)]
76pub enum DmaError {
77 #[error("DMA allocation failed")]
78 NoMemory,
79 #[error("Invalid layout")]
80 LayoutError(#[from] core::alloc::LayoutError),
81 #[error("DMA address {addr} does not match device mask {mask:#X}")]
82 DmaMaskNotMatch { addr: DmaAddr, mask: u64 },
83 #[error("DMA align mismatch: required={required:#X}, but address={address}")]
84 AlignMismatch { required: usize, address: DmaAddr },
85 #[error("Null pointer provided for DMA mapping")]
86 NullPointer,
87 #[error("Zero-sized buffer cannot be used for DMA")]
88 ZeroSizedBuffer,
89}
90
91#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
97pub struct DmaHandle {
98 pub(crate) cpu_addr: NonNull<u8>,
100 pub(crate) dma_addr: DmaAddr,
102 pub(crate) layout: Layout,
104 }
108
109impl DmaHandle {
110 pub unsafe fn new(cpu_addr: NonNull<u8>, dma_addr: DmaAddr, layout: Layout) -> Self {
129 Self {
130 cpu_addr,
131 dma_addr,
132 layout,
133 }
134 }
135
136 pub fn size(&self) -> usize {
138 self.layout.size()
139 }
140
141 pub fn align(&self) -> usize {
143 self.layout.align()
144 }
145
146 pub fn as_ptr(&self) -> NonNull<u8> {
148 self.cpu_addr
149 }
150
151 pub fn dma_addr(&self) -> DmaAddr {
153 self.dma_addr
154 }
155
156 pub fn layout(&self) -> Layout {
158 self.layout
159 }
160}
161
162#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
163pub struct DmaMapHandle {
164 pub(crate) handle: DmaHandle,
165 pub(crate) map_alloc_virt: Option<NonNull<u8>>,
166}
167
168impl Deref for DmaMapHandle {
169 type Target = DmaHandle;
170 fn deref(&self) -> &Self::Target {
171 &self.handle
172 }
173}
174
175impl DmaMapHandle {
176 pub unsafe fn new(
198 cpu_addr: NonNull<u8>,
199 dma_addr: DmaAddr,
200 layout: Layout,
201 alloc_virt: Option<NonNull<u8>>,
202 ) -> Self {
203 let handle = DmaHandle {
204 cpu_addr,
205 dma_addr,
206 layout,
207 };
208 Self {
209 handle,
210 map_alloc_virt: alloc_virt,
211 }
212 }
213
214 pub fn alloc_virt(&self) -> Option<NonNull<u8>> {
215 self.map_alloc_virt
216 }
217}