1use core::future::{poll_fn, Future};
6use core::pin::Pin;
7use core::sync::atomic::{fence, AtomicUsize, Ordering};
8use core::task::{Context, Poll, Waker};
9
10use embassy_hal_internal::{into_ref, Peripheral, PeripheralRef};
11use embassy_sync::waitqueue::AtomicWaker;
12use py32_metapac::dma::vals;
13
14use super::ringbuffer::{DmaCtrl, Error, ReadableDmaRingBuffer, WritableDmaRingBuffer};
15use super::word::{Word, WordSize};
16use super::{AnyChannel, Channel, Dir, Request, STATE};
17use crate::interrupt::typelevel::Interrupt;
18use crate::{interrupt, pac};
19
20pub(crate) struct ChannelInfo {
21 pub(crate) dma: DmaInfo,
22 pub(crate) num: usize,
23 }
26
27#[derive(Clone, Copy)]
28pub(crate) enum DmaInfo {
29 Dma(pac::dma::Dma),
30}
31
32#[derive(Debug, Copy, Clone, PartialEq, Eq)]
34#[cfg_attr(feature = "defmt", derive(defmt::Format))]
35#[non_exhaustive]
36pub struct TransferOptions {
37 pub priority: Priority,
47 pub circular: bool,
53 pub half_transfer_ir: bool,
55 pub complete_transfer_ir: bool,
57}
58
59impl Default for TransferOptions {
60 fn default() -> Self {
61 Self {
62 priority: Priority::VeryHigh,
67 circular: false,
68 half_transfer_ir: false,
69 complete_transfer_ir: true,
70 }
71 }
72}
73
74#[derive(Debug, Copy, Clone, PartialEq, Eq)]
76#[cfg_attr(feature = "defmt", derive(defmt::Format))]
77pub enum Priority {
78 Low,
80 Medium,
82 High,
84 VeryHigh,
86}
87
88impl From<Priority> for vals::Pl {
89 fn from(value: Priority) -> Self {
90 match value {
91 Priority::Low => pac::dma::vals::Pl::LOW,
92 Priority::Medium => pac::dma::vals::Pl::MEDIUM,
93 Priority::High => pac::dma::vals::Pl::HIGH,
94 Priority::VeryHigh => pac::dma::vals::Pl::VERYHIGH,
95 }
96 }
97}
98
99impl From<WordSize> for vals::Size {
100 fn from(raw: WordSize) -> Self {
101 match raw {
102 WordSize::OneByte => Self::BITS8,
103 WordSize::TwoBytes => Self::BITS16,
104 WordSize::FourBytes => Self::BITS32,
105 }
106 }
107}
108
109impl From<Dir> for vals::Dir {
110 fn from(raw: Dir) -> Self {
111 match raw {
112 Dir::MemoryToPeripheral => Self::MEMORYTOPERIPHERAL,
113 Dir::PeripheralToMemory => Self::PERIPHERALTOMEMORY,
114 }
115 }
116}
117
118pub(crate) struct ChannelState {
119 waker: AtomicWaker,
120 complete_count: AtomicUsize,
121}
122
123impl ChannelState {
124 pub(crate) const NEW: Self = Self {
125 waker: AtomicWaker::new(),
126 complete_count: AtomicUsize::new(0),
127 };
128}
129
130pub(crate) unsafe fn init(
132 cs: critical_section::CriticalSection,
133 dma_priority: interrupt::Priority,
134) {
135 foreach_interrupt! {
136 ($peri:ident, dma, $block:ident, $signal_name:ident, $irq:ident) => {
137 crate::interrupt::typelevel::$irq::set_priority_with_cs(cs, dma_priority);
138 crate::interrupt::typelevel::$irq::enable();
140 };
141 ($peri:ident, bdma, $block:ident, $signal_name:ident, $irq:ident) => {
142 crate::interrupt::typelevel::$irq::set_priority_with_cs(cs, bdma_priority);
143 crate::interrupt::typelevel::$irq::enable();
145 };
146 }
147 crate::_generated::init_dma();
148}
149
150impl AnyChannel {
151 pub(crate) unsafe fn on_irq(&self) {
153 let info = self.info();
154 let state = &STATE[self.id as usize];
155 match self.info().dma {
156 DmaInfo::Dma(r) => {
157 let cr = r.st(info.num).cr();
158 let isr = r.isr().read();
159
160 if isr.teif(info.num) {
161 panic!(
162 "DMA: error on DMA@{:08x} channel {}",
163 r.as_ptr() as u32,
164 info.num
165 );
166 }
167
168 if isr.htif(info.num) && cr.read().htie() {
169 r.ifcr().write(|w| w.set_htif(info.num, true));
171 } else if isr.tcif(info.num) && cr.read().tcie() {
172 r.ifcr().write(|w| w.set_tcif(info.num, true));
174
175 let count = state.complete_count.load(Ordering::Acquire);
176 state.complete_count.store(count + 1, Ordering::Release);
177
178 if r.st(info.num).ndtr().read() == 0 {
179 r.st(info.num).cr().modify(|w| {
181 w.set_en(false);
182 });
183 }
184 } else {
185 return;
186 }
187 state.waker.wake();
188 }
189 }
190 }
191
192 unsafe fn configure(
193 &self,
194 request: Request,
195 dir: Dir,
196 peri_addr: *const u32,
197 mem_addr: *mut u32,
198 mem_len: usize,
199 incr_mem: bool,
200 data_size: WordSize,
201 options: TransferOptions,
202 ) {
203 let info = self.info();
204
205 assert!(mem_len > 0 && mem_len <= 0xFFFF);
212
213 match info.num / 4 {
214 0 => {
215 pac::SYSCFG.cfgr3().modify(|w| {
216 w.set_dma_map(info.num % 4, request);
217 });
218 }
219 #[cfg(py32f072)]
220 1 => {
221 pac::SYSCFG.cfgr4().modify(|w| {
222 w.set_dma_map(info.num % 4, request);
223 });
224 }
225 _ => panic!("Invalid DMA channel number"),
226 }
227
228 match self.info().dma {
229 DmaInfo::Dma(r) => {
230 let ch = r.st(info.num);
231
232 fence(Ordering::SeqCst);
234
235 self.clear_irqs();
236
237 ch.par().write_value(peri_addr as u32);
238 ch.mar().write_value(mem_addr as u32);
239 ch.ndtr().write_value(mem_len as _);
240
241 ch.cr().write(|w| {
242 w.set_dir(dir.into());
243 w.set_msize(data_size.into());
244 w.set_psize(data_size.into());
245 w.set_pl(options.priority.into());
246 w.set_minc(incr_mem);
247 w.set_pinc(false);
248 w.set_circ(options.circular);
249
250 w.set_teie(true);
251 w.set_htie(options.half_transfer_ir);
252 w.set_tcie(options.complete_transfer_ir);
253
254 w.set_en(false); });
256 }
257 }
258 }
259
260 fn start(&self) {
261 let info = self.info();
262 match self.info().dma {
263 DmaInfo::Dma(r) => {
264 let ch = r.st(info.num);
265 ch.cr().modify(|w| w.set_en(true))
266 }
267 }
268 }
269
270 fn clear_irqs(&self) {
271 let info = self.info();
272 match self.info().dma {
273 DmaInfo::Dma(r) => {
274 r.ifcr().write(|w| {
275 w.set_gif(info.num, true);
276 });
280 }
281 }
282 }
283
284 fn request_stop(&self) {
285 let info = self.info();
286 match self.info().dma {
287 DmaInfo::Dma(r) => {
288 r.st(info.num).cr().write(|w| {
290 w.set_teie(true);
291 w.set_tcie(true);
292 });
293 }
294 }
295 }
296
297 fn request_pause(&self) {
298 let info = self.info();
299 match self.info().dma {
300 DmaInfo::Dma(r) => {
301 r.st(info.num).cr().modify(|w| {
303 w.set_en(false);
304 });
305 }
306 }
307 }
308
309 fn is_running(&self) -> bool {
310 let info = self.info();
311 match self.info().dma {
312 DmaInfo::Dma(r) => r.st(info.num).cr().read().en(),
313 }
314 }
315
316 fn get_remaining_transfers(&self) -> u16 {
317 let info = self.info();
318 match self.info().dma {
319 DmaInfo::Dma(r) => r.st(info.num).ndtr().read() as _,
320 }
321 }
322
323 fn disable_circular_mode(&self) {
324 let info = self.info();
325 match self.info().dma {
326 DmaInfo::Dma(regs) => regs.st(info.num).cr().modify(|w| {
327 w.set_circ(false);
328 }),
329 }
330 }
331
332 fn poll_stop(&self) -> Poll<()> {
333 use core::sync::atomic::compiler_fence;
334 compiler_fence(Ordering::SeqCst);
335
336 if !self.is_running() {
337 Poll::Ready(())
338 } else {
339 Poll::Pending
340 }
341 }
342}
343
344#[must_use = "futures do nothing unless you `.await` or poll them"]
346pub struct Transfer<'a> {
347 channel: PeripheralRef<'a, AnyChannel>,
348}
349
350impl<'a> Transfer<'a> {
351 pub unsafe fn new_read<W: Word>(
353 channel: impl Peripheral<P = impl Channel> + 'a,
354 request: Request,
355 peri_addr: *mut W,
356 buf: &'a mut [W],
357 options: TransferOptions,
358 ) -> Self {
359 Self::new_read_raw(channel, request, peri_addr, buf, options)
360 }
361
362 pub unsafe fn new_read_raw<W: Word>(
364 channel: impl Peripheral<P = impl Channel> + 'a,
365 request: Request,
366 peri_addr: *mut W,
367 buf: *mut [W],
368 options: TransferOptions,
369 ) -> Self {
370 into_ref!(channel);
371
372 Self::new_inner(
373 channel.map_into(),
374 request,
375 Dir::PeripheralToMemory,
376 peri_addr as *const u32,
377 buf as *mut W as *mut u32,
378 buf.len(),
379 true,
380 W::size(),
381 options,
382 )
383 }
384
385 pub unsafe fn new_write<W: Word>(
387 channel: impl Peripheral<P = impl Channel> + 'a,
388 request: Request,
389 buf: &'a [W],
390 peri_addr: *mut W,
391 options: TransferOptions,
392 ) -> Self {
393 Self::new_write_raw(channel, request, buf, peri_addr, options)
394 }
395
396 pub unsafe fn new_write_raw<W: Word>(
398 channel: impl Peripheral<P = impl Channel> + 'a,
399 request: Request,
400 buf: *const [W],
401 peri_addr: *mut W,
402 options: TransferOptions,
403 ) -> Self {
404 into_ref!(channel);
405
406 Self::new_inner(
407 channel.map_into(),
408 request,
409 Dir::MemoryToPeripheral,
410 peri_addr as *const u32,
411 buf as *const W as *mut u32,
412 buf.len(),
413 true,
414 W::size(),
415 options,
416 )
417 }
418
419 pub unsafe fn new_write_repeated<W: Word>(
421 channel: impl Peripheral<P = impl Channel> + 'a,
422 request: Request,
423 repeated: &'a W,
424 count: usize,
425 peri_addr: *mut W,
426 options: TransferOptions,
427 ) -> Self {
428 into_ref!(channel);
429
430 Self::new_inner(
431 channel.map_into(),
432 request,
433 Dir::MemoryToPeripheral,
434 peri_addr as *const u32,
435 repeated as *const W as *mut u32,
436 count,
437 false,
438 W::size(),
439 options,
440 )
441 }
442
443 unsafe fn new_inner(
444 channel: PeripheralRef<'a, AnyChannel>,
445 _request: Request,
446 dir: Dir,
447 peri_addr: *const u32,
448 mem_addr: *mut u32,
449 mem_len: usize,
450 incr_mem: bool,
451 data_size: WordSize,
452 options: TransferOptions,
453 ) -> Self {
454 assert!(mem_len > 0 && mem_len <= 0xFFFF);
455
456 channel.configure(
457 _request, dir, peri_addr, mem_addr, mem_len, incr_mem, data_size, options,
458 );
459 channel.start();
460
461 Self { channel }
462 }
463
464 pub fn request_stop(&mut self) {
470 self.channel.request_stop()
471 }
472
473 pub fn request_pause(&mut self) {
478 self.channel.request_pause()
479 }
480
481 pub fn is_running(&mut self) -> bool {
486 self.channel.is_running()
487 }
488
489 pub fn get_remaining_transfers(&self) -> u16 {
492 self.channel.get_remaining_transfers()
493 }
494
495 pub fn blocking_wait(mut self) {
497 while self.is_running() {}
498
499 fence(Ordering::SeqCst);
501
502 core::mem::forget(self);
503 }
504}
505
506impl<'a> Drop for Transfer<'a> {
507 fn drop(&mut self) {
508 self.request_stop();
509 while self.is_running() {}
510
511 fence(Ordering::SeqCst);
513 }
514}
515
516impl<'a> Unpin for Transfer<'a> {}
517impl<'a> Future for Transfer<'a> {
518 type Output = ();
519 fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
520 let state: &ChannelState = &STATE[self.channel.id as usize];
521
522 state.waker.register(cx.waker());
523
524 if self.is_running() {
525 Poll::Pending
526 } else {
527 Poll::Ready(())
528 }
529 }
530}
531
532struct DmaCtrlImpl<'a>(PeripheralRef<'a, AnyChannel>);
535
536impl<'a> DmaCtrl for DmaCtrlImpl<'a> {
537 fn get_remaining_transfers(&self) -> usize {
538 self.0.get_remaining_transfers() as _
539 }
540
541 fn reset_complete_count(&mut self) -> usize {
542 let state = &STATE[self.0.id as usize];
543 #[cfg(not(armv6m))]
544 return state.complete_count.swap(0, Ordering::AcqRel);
545 #[cfg(armv6m)]
546 return critical_section::with(|_| {
547 let x = state.complete_count.load(Ordering::Acquire);
548 state.complete_count.store(0, Ordering::Release);
549 x
550 });
551 }
552
553 fn set_waker(&mut self, waker: &Waker) {
554 STATE[self.0.id as usize].waker.register(waker);
555 }
556}
557
558pub struct ReadableRingBuffer<'a, W: Word> {
560 channel: PeripheralRef<'a, AnyChannel>,
561 ringbuf: ReadableDmaRingBuffer<'a, W>,
562}
563
564impl<'a, W: Word> ReadableRingBuffer<'a, W> {
565 pub unsafe fn new(
567 channel: impl Peripheral<P = impl Channel> + 'a,
568 _request: Request,
569 peri_addr: *mut W,
570 buffer: &'a mut [W],
571 mut options: TransferOptions,
572 ) -> Self {
573 into_ref!(channel);
574 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
575
576 let buffer_ptr = buffer.as_mut_ptr();
577 let len = buffer.len();
578 let dir = Dir::PeripheralToMemory;
579 let data_size = W::size();
580
581 options.half_transfer_ir = true;
582 options.complete_transfer_ir = true;
583 options.circular = true;
584
585 channel.configure(
586 _request,
587 dir,
588 peri_addr as *mut u32,
589 buffer_ptr as *mut u32,
590 len,
591 true,
592 data_size,
593 options,
594 );
595
596 Self {
597 channel,
598 ringbuf: ReadableDmaRingBuffer::new(buffer),
599 }
600 }
601
602 pub fn start(&mut self) {
606 self.channel.start();
607 }
608
609 pub fn clear(&mut self) {
611 self.ringbuf
612 .reset(&mut DmaCtrlImpl(self.channel.reborrow()));
613 }
614
615 pub fn read(&mut self, buf: &mut [W]) -> Result<(usize, usize), Error> {
621 self.ringbuf
622 .read(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
623 }
624
625 pub async fn read_exact(&mut self, buffer: &mut [W]) -> Result<usize, Error> {
637 self.ringbuf
638 .read_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
639 .await
640 }
641
642 pub fn len(&mut self) -> Result<usize, Error> {
644 Ok(self
645 .ringbuf
646 .len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
647 }
648
649 pub const fn capacity(&self) -> usize {
651 self.ringbuf.cap()
652 }
653
654 pub fn set_waker(&mut self, waker: &Waker) {
656 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
657 }
658
659 pub fn request_stop(&mut self) {
665 self.channel.request_stop()
666 }
667
668 pub fn request_pause(&mut self) {
673 self.channel.request_pause()
674 }
675
676 pub fn is_running(&mut self) -> bool {
681 self.channel.is_running()
682 }
683
684 pub async fn stop(&mut self) {
694 self.channel.disable_circular_mode();
695 poll_fn(|cx| {
697 self.set_waker(cx.waker());
698 self.channel.poll_stop()
699 })
700 .await
701 }
702}
703
704impl<'a, W: Word> Drop for ReadableRingBuffer<'a, W> {
705 fn drop(&mut self) {
706 self.request_stop();
707 while self.is_running() {}
708
709 fence(Ordering::SeqCst);
711 }
712}
713
714pub struct WritableRingBuffer<'a, W: Word> {
716 channel: PeripheralRef<'a, AnyChannel>,
717 ringbuf: WritableDmaRingBuffer<'a, W>,
718}
719
720impl<'a, W: Word> WritableRingBuffer<'a, W> {
721 pub unsafe fn new(
723 channel: impl Peripheral<P = impl Channel> + 'a,
724 _request: Request,
725 peri_addr: *mut W,
726 buffer: &'a mut [W],
727 mut options: TransferOptions,
728 ) -> Self {
729 into_ref!(channel);
730 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
731
732 let len = buffer.len();
733 let dir = Dir::MemoryToPeripheral;
734 let data_size = W::size();
735 let buffer_ptr = buffer.as_mut_ptr();
736
737 options.half_transfer_ir = true;
738 options.complete_transfer_ir = true;
739 options.circular = true;
740
741 channel.configure(
742 _request,
743 dir,
744 peri_addr as *mut u32,
745 buffer_ptr as *mut u32,
746 len,
747 true,
748 data_size,
749 options,
750 );
751
752 Self {
753 channel,
754 ringbuf: WritableDmaRingBuffer::new(buffer),
755 }
756 }
757
758 pub fn start(&mut self) {
762 self.channel.start();
763 }
764
765 pub fn clear(&mut self) {
767 self.ringbuf
768 .reset(&mut DmaCtrlImpl(self.channel.reborrow()));
769 }
770
771 pub fn write_immediate(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
774 self.ringbuf.write_immediate(buf)
775 }
776
777 pub fn write(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
780 self.ringbuf
781 .write(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
782 }
783
784 pub async fn write_exact(&mut self, buffer: &[W]) -> Result<usize, Error> {
786 self.ringbuf
787 .write_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
788 .await
789 }
790
791 pub async fn wait_write_error(&mut self) -> Result<usize, Error> {
793 self.ringbuf
794 .wait_write_error(&mut DmaCtrlImpl(self.channel.reborrow()))
795 .await
796 }
797
798 pub fn len(&mut self) -> Result<usize, Error> {
800 Ok(self
801 .ringbuf
802 .len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
803 }
804
805 pub const fn capacity(&self) -> usize {
807 self.ringbuf.cap()
808 }
809
810 pub fn set_waker(&mut self, waker: &Waker) {
812 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
813 }
814
815 pub fn request_stop(&mut self) {
821 self.channel.request_stop()
822 }
823
824 pub fn request_pause(&mut self) {
829 self.channel.request_pause()
830 }
831
832 pub fn is_running(&mut self) -> bool {
837 self.channel.is_running()
838 }
839
840 pub async fn stop(&mut self) {
848 self.channel.disable_circular_mode();
849 poll_fn(|cx| {
851 self.set_waker(cx.waker());
852 self.channel.poll_stop()
853 })
854 .await
855 }
856}
857
858impl<'a, W: Word> Drop for WritableRingBuffer<'a, W> {
859 fn drop(&mut self) {
860 self.request_stop();
861 while self.is_running() {}
862
863 fence(Ordering::SeqCst);
865 }
866}