1use core::future::{poll_fn, Future};
6use core::pin::Pin;
7use core::sync::atomic::{fence, AtomicUsize, Ordering};
8use core::task::{Context, Poll, Waker};
9
10use embassy_hal_internal::{into_ref, Peripheral, PeripheralRef};
11use embassy_sync::waitqueue::AtomicWaker;
12
13use super::ringbuffer::{DmaCtrl, Error, ReadableDmaRingBuffer, WritableDmaRingBuffer};
14use super::word::{Word, WordSize};
15use super::{AnyChannel, Channel, Request, STATE};
16use crate::{interrupt, pac, peripherals};
17
18pub use pac::dmac::vals::Pl as Priority;
19pub use pac::dmac::vals::Dir as Dir;
20
21pub(crate) struct ChannelInfo {
22 pub(crate) dma: pac::dmac::Dmac,
23 pub(crate) num: usize,
24}
25
26#[derive(Debug, Copy, Clone, PartialEq, Eq)]
28#[cfg_attr(feature = "defmt", derive(defmt::Format))]
29#[non_exhaustive]
30pub struct TransferOptions {
31 pub priority: Priority,
33 pub interrupt_priority: interrupt::Priority,
35 pub circular: bool,
41 pub half_transfer_ir: bool,
43 pub complete_transfer_ir: bool,
45}
46
47impl Default for TransferOptions {
48 fn default() -> Self {
49 Self {
50 priority: Priority::VeryHigh,
51 interrupt_priority: interrupt::Priority::P1,
52 circular: false,
53 half_transfer_ir: false,
54 complete_transfer_ir: true,
55 }
56 }
57}
58
59impl From<WordSize> for pac::dmac::vals::Size {
60 fn from(raw: WordSize) -> Self {
61 match raw {
62 WordSize::OneByte => Self::Bits8,
63 WordSize::TwoBytes => Self::Bits16,
64 WordSize::FourBytes => Self::Bits32,
65 }
66 }
67}
68
69pub(crate) struct ChannelState {
70 waker: AtomicWaker,
71 complete_count: AtomicUsize,
72}
73
74impl ChannelState {
75 pub(crate) const NEW: Self = Self {
76 waker: AtomicWaker::new(),
77 complete_count: AtomicUsize::new(0),
78 };
79}
80
81pub(crate) unsafe fn init(
83 cs: critical_section::CriticalSection,
84) {
85 crate::rcc::enable_and_reset_with_cs::<peripherals::DMAC1>(cs);
86}
87
88impl AnyChannel {
89 pub(crate) unsafe fn on_irq(&self) {
91 let info = self.info();
92 let state = &STATE[self.id as usize];
93 let r = info.dma;
94 let cr = r.ccr(info.num);
95 let isr = r.isr().read();
96
97 if isr.teif(info.num) {
98 panic!("DMA: error on DMA@{:08x} channel {}", r.as_ptr() as u32, info.num);
99 }
100
101 if isr.htif(info.num) && cr.read().htie() {
102 r.ifcr().write(|w| w.set_chtif(info.num % 4, true));
104 } else if isr.tcif(info.num % 4) && cr.read().tcie() {
105 r.ifcr().write(|w| w.set_ctcif(info.num % 4, true));
107
108 if !r.ccr(info.num).read().circ() {
111 r.ccr(info.num).modify(|w| {
112 w.set_en(false);
113 });
114 }
115 state.complete_count.fetch_add(1, Ordering::Release);
116 } else {
117 return;
118 }
119 state.waker.wake();
120 }
121
122 unsafe fn configure(
123 &self,
124 request: Request,
125 dir: Dir,
126 peri_addr: *const u32,
127 mem_addr: *mut u32,
128 mem_len: usize,
129 incr_mem: bool,
130 mem_size: WordSize,
131 peri_size: WordSize,
132 options: TransferOptions,
133 ) {
134 fence(Ordering::SeqCst);
136
137 let info = self.info();
138 let r = info.dma;
139 let state: &ChannelState = &STATE[self.id as usize];
140 let channel_num = info.num;
141
142 state.complete_count.store(0, Ordering::Release);
143 self.clear_irqs();
144
145 let ndtr = match (mem_size, peri_size) {
148 (WordSize::FourBytes, WordSize::OneByte) => mem_len * 4,
149 (WordSize::FourBytes, WordSize::TwoBytes) | (WordSize::TwoBytes, WordSize::OneByte) => mem_len * 2,
150 (WordSize::FourBytes, WordSize::FourBytes)
151 | (WordSize::TwoBytes, WordSize::TwoBytes)
152 | (WordSize::OneByte, WordSize::OneByte) => mem_len,
153 (WordSize::TwoBytes, WordSize::FourBytes) | (WordSize::OneByte, WordSize::TwoBytes) => {
154 assert!(mem_len % 2 == 0);
155 mem_len / 2
156 }
157 (WordSize::OneByte, WordSize::FourBytes) => {
158 assert!(mem_len % 4 == 0);
159 mem_len / 4
160 }
161 };
162
163 assert!(ndtr > 0 && ndtr <= 0xFFFF);
164
165
166 r.cpar(channel_num).write_value(pac::dmac::regs::Cpar(peri_addr as _));
167
168 let mem_addr = if mem_addr as u32 >= 0x2000_0000 {
171 mem_addr as u32
172 } else {
173 mem_addr as u32 + 0x5000_0000
174 };
175 info!("mem_addr {:X}", (mem_addr as u32 + 0x5000_0000));
176 r.cm0ar(channel_num).write_value(pac::dmac::regs::Cm0ar(mem_addr));
177 r.cndtr(channel_num).write_value(pac::dmac::regs::Cndtr(ndtr as _));
178 r.cselr(channel_num / 4)
179 .modify(|w| w.set_cs(channel_num % 4, request as u8));
180 r.ccr(channel_num).write(|w| {
181 w.set_dir(dir.into());
182 w.set_msize(mem_size.into());
183 w.set_psize(peri_size.into());
184 w.set_pl(options.priority.into());
185 w.set_minc(incr_mem);
186 w.set_pinc(false);
187 w.set_teie(true);
188 w.set_htie(options.half_transfer_ir);
189 w.set_tcie(options.complete_transfer_ir);
190 w.set_circ(options.circular);
191 w.set_en(false); });
193
194 crate::_generated::enable_dma_channel_interrupt_priority(self.id, options.interrupt_priority);
195 }
196
197 fn start(&self) {
198 let info = self.info();
199 let r = info.dma;
200 r.ccr(info.num).modify(|w| w.set_en(true))
201 }
202
203 fn clear_irqs(&self) {
204 let info = self.info();
205 let r = info.dma;
206
207 r.ifcr().write(|w| {
208 w.set_chtif(info.num, true);
209 w.set_ctcif(info.num, true);
210 w.set_cteif(info.num, true);
211 });
212 }
213
214 fn request_stop(&self) {
215 let info = self.info();
216 let r = info.dma;
217 r.ccr(info.num).write(|w| {
219 w.set_teie(true);
220 w.set_tcie(true);
221 });
222 }
223
224 fn request_pause(&self) {
225 let info = self.info();
226 let r = info.dma;
227 r.ccr(info.num).modify(|w| {
229 w.set_en(false);
230 });
231 }
232
233 fn is_running(&self) -> bool {
234 let info = self.info();
235 let r = info.dma;
236 r.ccr(info.num).read().en()
237 }
238
239 fn get_remaining_transfers(&self) -> u16 {
240 let info = self.info();
241 let r = info.dma;
242 r.cndtr(info.num).read().ndt()
243 }
244
245 fn disable_circular_mode(&self) {
246 let info = self.info();
247 let r = info.dma;
248 r.ccr(info.num).modify(|w| {
249 w.set_circ(false);
250 })
251 }
252
253 fn poll_stop(&self) -> Poll<()> {
254 use core::sync::atomic::compiler_fence;
255 compiler_fence(Ordering::SeqCst);
256
257 if !self.is_running() {
258 Poll::Ready(())
259 } else {
260 Poll::Pending
261 }
262 }
263}
264
265#[must_use = "futures do nothing unless you `.await` or poll them"]
267pub struct Transfer<'a> {
268 channel: PeripheralRef<'a, AnyChannel>,
269}
270
271impl<'a> Transfer<'a> {
272 pub unsafe fn new_read<W: Word>(
274 channel: impl Peripheral<P = impl Channel> + 'a,
275 request: Request,
276 peri_addr: *mut W,
277 buf: &'a mut [W],
278 options: TransferOptions,
279 ) -> Self {
280 Self::new_read_raw(channel, request, peri_addr, buf, options)
281 }
282
283 pub unsafe fn new_read_raw<MW: Word, PW: Word>(
285 channel: impl Peripheral<P = impl Channel> + 'a,
286 request: Request,
287 peri_addr: *mut PW,
288 buf: *mut [MW],
289 options: TransferOptions,
290 ) -> Self {
291 into_ref!(channel);
292
293 Self::new_inner(
294 channel.map_into(),
295 request,
296 Dir::PeripheralToMemory,
297 peri_addr as *const u32,
298 buf as *mut MW as *mut u32,
299 buf.len(),
300 true,
301 MW::size(),
302 PW::size(),
303 options,
304 )
305 }
306
307 pub unsafe fn new_write<MW: Word, PW: Word>(
309 channel: impl Peripheral<P = impl Channel> + 'a,
310 request: Request,
311 buf: &'a [MW],
312 peri_addr: *mut PW,
313 options: TransferOptions,
314 ) -> Self {
315 Self::new_write_raw(channel, request, buf, peri_addr, options)
316 }
317
318 pub unsafe fn new_write_raw<MW: Word, PW: Word>(
320 channel: impl Peripheral<P = impl Channel> + 'a,
321 request: Request,
322 buf: *const [MW],
323 peri_addr: *mut PW,
324 options: TransferOptions,
325 ) -> Self {
326 into_ref!(channel);
327
328 Self::new_inner(
329 channel.map_into(),
330 request,
331 Dir::MemoryToPeripheral,
332 peri_addr as *const u32,
333 buf as *const MW as *mut u32,
334 buf.len(),
335 true,
336 MW::size(),
337 PW::size(),
338 options,
339 )
340 }
341
342 pub unsafe fn new_write_repeated<W: Word>(
344 channel: impl Peripheral<P = impl Channel> + 'a,
345 request: Request,
346 repeated: &'a W,
347 count: usize,
348 peri_addr: *mut W,
349 options: TransferOptions,
350 ) -> Self {
351 into_ref!(channel);
352
353 Self::new_inner(
354 channel.map_into(),
355 request,
356 Dir::MemoryToPeripheral,
357 peri_addr as *const u32,
358 repeated as *const W as *mut u32,
359 count,
360 false,
361 W::size(),
362 W::size(),
363 options,
364 )
365 }
366
367 unsafe fn new_inner(
368 channel: PeripheralRef<'a, AnyChannel>,
369 request: Request,
370 dir: Dir,
371 peri_addr: *const u32,
372 mem_addr: *mut u32,
373 mem_len: usize,
374 incr_mem: bool,
375 mem_size: WordSize,
376 peri_size: WordSize,
377 options: TransferOptions,
378 ) -> Self {
379 assert!(mem_len > 0 && mem_len <= 0xFFFF);
380
381 channel.configure(
382 request, dir, peri_addr, mem_addr, mem_len, incr_mem, mem_size, peri_size, options,
383 );
384 channel.start();
385 Self { channel }
386 }
387
388 pub fn request_stop(&mut self) {
394 self.channel.request_stop()
395 }
396
397 pub fn request_pause(&mut self) {
402 self.channel.request_pause()
403 }
404
405 pub fn is_running(&mut self) -> bool {
410 self.channel.is_running()
411 }
412
413 pub fn get_remaining_transfers(&self) -> u16 {
416 self.channel.get_remaining_transfers()
417 }
418
419 pub fn blocking_wait(mut self) {
421 while self.is_running() {}
422
423 fence(Ordering::SeqCst);
425
426 core::mem::forget(self);
427 }
428}
429
430impl<'a> Drop for Transfer<'a> {
431 fn drop(&mut self) {
432 self.request_stop();
433 while self.is_running() {}
434
435 fence(Ordering::SeqCst);
437 }
438}
439
440impl<'a> Unpin for Transfer<'a> {}
441impl<'a> Future for Transfer<'a> {
442 type Output = ();
443 fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
444 let state: &ChannelState = &STATE[self.channel.id as usize];
445
446 state.waker.register(cx.waker());
447
448 if self.is_running() {
449 Poll::Pending
450 } else {
451 Poll::Ready(())
452 }
453 }
454}
455struct DmaCtrlImpl<'a>(PeripheralRef<'a, AnyChannel>);
458
459impl<'a> DmaCtrl for DmaCtrlImpl<'a> {
460 fn get_remaining_transfers(&self) -> usize {
461 self.0.get_remaining_transfers() as _
462 }
463
464 fn reset_complete_count(&mut self) -> usize {
465 let state = &STATE[self.0.id as usize];
466 return state.complete_count.swap(0, Ordering::AcqRel);
467 }
468
469 fn set_waker(&mut self, waker: &Waker) {
470 STATE[self.0.id as usize].waker.register(waker);
471 }
472}
473
474pub struct ReadableRingBuffer<'a, W: Word> {
476 channel: PeripheralRef<'a, AnyChannel>,
477 ringbuf: ReadableDmaRingBuffer<'a, W>,
478}
479
480impl<'a, W: Word> ReadableRingBuffer<'a, W> {
481 pub unsafe fn new(
483 channel: impl Peripheral<P = impl Channel> + 'a,
484 request: Request,
485 peri_addr: *mut W,
486 buffer: &'a mut [W],
487 mut options: TransferOptions,
488 ) -> Self {
489 into_ref!(channel);
490 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
491
492 let buffer_ptr = buffer.as_mut_ptr();
493 let len = buffer.len();
494 let dir = Dir::PeripheralToMemory;
495 let data_size = W::size();
496
497 options.half_transfer_ir = true;
498 options.complete_transfer_ir = true;
499 options.circular = true;
500
501 channel.configure(
502 request,
503 dir,
504 peri_addr as *mut u32,
505 buffer_ptr as *mut u32,
506 len,
507 true,
508 data_size,
509 data_size,
510 options,
511 );
512
513 Self {
514 channel,
515 ringbuf: ReadableDmaRingBuffer::new(buffer),
516 }
517 }
518
519 pub fn start(&mut self) {
523 self.channel.start();
524 }
525
526 pub fn clear(&mut self) {
528 self.ringbuf.reset(&mut DmaCtrlImpl(self.channel.reborrow()));
529 }
530
531 pub fn read(&mut self, buf: &mut [W]) -> Result<(usize, usize), Error> {
537 self.ringbuf.read(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
538 }
539
540 pub async fn read_exact(&mut self, buffer: &mut [W]) -> Result<usize, Error> {
552 self.ringbuf
553 .read_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
554 .await
555 }
556
557 pub fn len(&mut self) -> Result<usize, Error> {
559 Ok(self.ringbuf.len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
560 }
561
562 pub const fn capacity(&self) -> usize {
564 self.ringbuf.cap()
565 }
566
567 pub fn set_waker(&mut self, waker: &Waker) {
569 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
570 }
571
572 pub fn request_stop(&mut self) {
578 self.channel.request_stop()
579 }
580
581 pub fn request_pause(&mut self) {
586 self.channel.request_pause()
587 }
588
589 pub fn is_running(&mut self) -> bool {
594 self.channel.is_running()
595 }
596
597 pub async fn stop(&mut self) {
607 self.channel.disable_circular_mode();
608 poll_fn(|cx| {
610 self.set_waker(cx.waker());
611 self.channel.poll_stop()
612 })
613 .await
614 }
615}
616
617impl<'a, W: Word> Drop for ReadableRingBuffer<'a, W> {
618 fn drop(&mut self) {
619 self.request_stop();
620 while self.is_running() {}
621
622 fence(Ordering::SeqCst);
624 }
625}
626
627pub struct WritableRingBuffer<'a, W: Word> {
629 channel: PeripheralRef<'a, AnyChannel>,
630 ringbuf: WritableDmaRingBuffer<'a, W>,
631}
632
633impl<'a, W: Word> WritableRingBuffer<'a, W> {
634 pub unsafe fn new(
636 channel: impl Peripheral<P = impl Channel> + 'a,
637 request: Request,
638 peri_addr: *mut W,
639 buffer: &'a mut [W],
640 mut options: TransferOptions,
641 ) -> Self {
642 into_ref!(channel);
643 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
644
645 let len = buffer.len();
646 let dir = Dir::MemoryToPeripheral;
647 let data_size = W::size();
648 let buffer_ptr = buffer.as_mut_ptr();
649
650 options.half_transfer_ir = true;
651 options.complete_transfer_ir = true;
652 options.circular = true;
653
654 channel.configure(
655 request,
656 dir,
657 peri_addr as *mut u32,
658 buffer_ptr as *mut u32,
659 len,
660 true,
661 data_size,
662 data_size,
663 options,
664 );
665
666 Self {
667 channel,
668 ringbuf: WritableDmaRingBuffer::new(buffer),
669 }
670 }
671
672 pub fn start(&mut self) {
676 self.channel.start();
677 }
678
679 pub fn clear(&mut self) {
681 self.ringbuf.reset(&mut DmaCtrlImpl(self.channel.reborrow()));
682 }
683
684 pub fn write_immediate(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
687 self.ringbuf.write_immediate(buf)
688 }
689
690 pub fn write(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
693 self.ringbuf.write(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
694 }
695
696 pub async fn write_exact(&mut self, buffer: &[W]) -> Result<usize, Error> {
698 self.ringbuf
699 .write_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
700 .await
701 }
702
703 pub async fn wait_write_error(&mut self) -> Result<usize, Error> {
705 self.ringbuf
706 .wait_write_error(&mut DmaCtrlImpl(self.channel.reborrow()))
707 .await
708 }
709
710 pub fn len(&mut self) -> Result<usize, Error> {
712 Ok(self.ringbuf.len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
713 }
714
715 pub const fn capacity(&self) -> usize {
717 self.ringbuf.cap()
718 }
719
720 pub fn set_waker(&mut self, waker: &Waker) {
722 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
723 }
724
725 pub fn request_stop(&mut self) {
731 self.channel.request_stop()
732 }
733
734 pub fn request_pause(&mut self) {
739 self.channel.request_pause()
740 }
741
742 pub fn is_running(&mut self) -> bool {
747 self.channel.is_running()
748 }
749
750 pub async fn stop(&mut self) {
758 self.channel.disable_circular_mode();
759 poll_fn(|cx| {
761 self.set_waker(cx.waker());
762 self.channel.poll_stop()
763 })
764 .await
765 }
766}
767
768impl<'a, W: Word> Drop for WritableRingBuffer<'a, W> {
769 fn drop(&mut self) {
770 self.request_stop();
771 while self.is_running() {}
772
773 fence(Ordering::SeqCst);
775 }
776}