1use core::{alloc::Layout, ptr::NonNull};
2
3use crate::{DeviceDma, DmaDirection, DmaError, common::DCommon};
4
5pub struct DArray<T> {
43 data: DCommon,
44 _phantom: core::marker::PhantomData<T>,
45}
46
47unsafe impl<T> Send for DArray<T> where T: Send {}
48
49impl<T> DArray<T> {
50 pub(crate) fn new_zero_with_align(
51 os: &DeviceDma,
52 size: usize,
53 align: usize,
54 direction: DmaDirection,
55 ) -> Result<Self, DmaError> {
56 let layout = Layout::from_size_align(
57 size * core::mem::size_of::<T>(),
58 align.max(core::mem::align_of::<T>()),
59 )?;
60 let data = DCommon::new_zero(os, layout, direction)?;
61 Ok(Self {
62 data,
63 _phantom: core::marker::PhantomData,
64 })
65 }
66
67 pub(crate) fn new_zero(
68 os: &DeviceDma,
69 size: usize,
70 direction: DmaDirection,
71 ) -> Result<Self, DmaError> {
72 Self::new_zero_with_align(os, size, core::mem::align_of::<T>(), direction)
73 }
74
75 pub fn dma_addr(&self) -> crate::DmaAddr {
84 self.data.handle.dma_addr
85 }
86
87 pub fn len(&self) -> usize {
93 self.data.handle.size() / core::mem::size_of::<T>()
94 }
95
96 pub fn is_empty(&self) -> bool {
102 self.len() == 0
103 }
104
105 pub fn bytes_len(&self) -> usize {
111 self.data.handle.size()
112 }
113
114 pub fn read(&self, index: usize) -> Option<T> {
128 if index >= self.len() {
129 return None;
130 }
131
132 unsafe {
133 let offset = index * core::mem::size_of::<T>();
134 self.data.prepare_read(offset, core::mem::size_of::<T>());
135 Some(self.data.handle.cpu_addr.cast().add(index).read())
136 }
137 }
138
139 pub fn set(&mut self, index: usize, value: T) {
154 assert!(
155 index < self.len(),
156 "index out of range, index: {},len: {}",
157 index,
158 self.len()
159 );
160
161 unsafe {
162 let offset = index * core::mem::size_of::<T>();
163 let ptr = self.data.handle.cpu_addr.cast::<T>().add(index);
164 ptr.write(value);
165 self.data.confirm_write(offset, core::mem::size_of::<T>());
166 }
167 }
168
169 pub fn iter(&self) -> DArrayIter<'_, T> {
178 DArrayIter {
179 array: self,
180 index: 0,
181 }
182 }
183
184 pub fn copy_from_slice(&mut self, src: &[T]) {
196 assert!(
197 src.len() <= self.len(),
198 "source slice is larger than DArray, src len: {}, DArray len: {}",
199 src.len(),
200 self.len()
201 );
202 unsafe {
203 let dst_ptr = self.data.handle.cpu_addr.as_ptr();
204 let len = core::mem::size_of_val(src);
205 dst_ptr.copy_from_nonoverlapping(src.as_ptr() as *const u8, len);
206 }
207 self.data.confirm_write_all();
208 }
209
210 pub fn prepare_read(&self, offset: usize, size: usize) {
214 assert!(
215 offset <= self.bytes_len() && size <= self.bytes_len().saturating_sub(offset),
216 "range out of bounds, offset: {}, size: {}, bytes_len: {}",
217 offset,
218 size,
219 self.bytes_len()
220 );
221 self.data.prepare_read(offset, size);
222 }
223
224 pub fn confirm_write(&self, offset: usize, size: usize) {
228 assert!(
229 offset <= self.bytes_len() && size <= self.bytes_len().saturating_sub(offset),
230 "range out of bounds, offset: {}, size: {}, bytes_len: {}",
231 offset,
232 size,
233 self.bytes_len()
234 );
235 self.data.confirm_write(offset, size);
236 }
237
238 pub fn prepare_read_all(&self) {
240 self.data.prepare_read(0, self.bytes_len());
241 }
242
243 pub fn confirm_write_all(&self) {
245 self.data.confirm_write_all();
246 }
247
248 pub fn write_with<R>(&mut self, len: usize, f: impl FnOnce(&mut [T]) -> R) -> R {
250 assert!(
251 len <= self.len(),
252 "range out of bounds, len: {}, array len: {}",
253 len,
254 self.len()
255 );
256 let ret = {
257 let data = unsafe { self.as_mut_slice() };
258 f(&mut data[..len])
259 };
260 self.confirm_write(0, len * core::mem::size_of::<T>());
261 ret
262 }
263
264 pub fn read_with<R>(&self, len: usize, f: impl FnOnce(&[T]) -> R) -> R {
266 assert!(
267 len <= self.len(),
268 "range out of bounds, len: {}, array len: {}",
269 len,
270 self.len()
271 );
272 self.prepare_read(0, len * core::mem::size_of::<T>());
273 let data = unsafe { core::slice::from_raw_parts(self.as_ptr().as_ptr(), len) };
274 f(data)
275 }
276
277 pub unsafe fn as_mut_slice(&mut self) -> &mut [T] {
281 let ptr = self.data.handle.cpu_addr;
282 unsafe {
283 core::slice::from_raw_parts_mut(
284 ptr.as_ptr() as *mut T,
285 self.bytes_len() / core::mem::size_of::<T>(),
286 )
287 }
288 }
289
290 pub fn as_ptr(&self) -> NonNull<T> {
291 self.data.handle.as_ptr().cast::<T>()
292 }
293}
294
295pub struct DArrayIter<'a, T> {
296 array: &'a DArray<T>,
297 index: usize,
298}
299
300impl<'a, T> Iterator for DArrayIter<'a, T> {
301 type Item = T;
302
303 fn next(&mut self) -> Option<Self::Item> {
304 if self.index >= self.array.len() {
305 return None;
306 }
307 let value = self.array.read(self.index);
308 self.index += 1;
309 value
310 }
311}