use core::{
mem::ManuallyDrop,
ops::{Deref, DerefMut},
};
#[cfg(not(esp32s2))]
use crate::dma::{AnyGdmaChannel, AnyGdmaRxChannel, AnyGdmaTxChannel, DmaEligible};
use crate::{
Async,
Blocking,
DriverMode,
dma::{
BurstConfig,
Channel,
ChannelRx,
ChannelTx,
DmaChannelConvert,
DmaDescriptor,
DmaError,
DmaPeripheral,
DmaRxBuf,
DmaRxBuffer,
DmaRxInterrupt,
DmaTxBuf,
DmaTxBuffer,
DmaTxInterrupt,
},
};
#[cfg(esp32s2)]
use crate::{
dma::{CopyDmaRxChannel, CopyDmaTxChannel},
peripherals::DMA_COPY,
};
cfg_if::cfg_if! {
if #[cfg(esp32s2)] {
type Mem2MemChannel<'d> = DMA_COPY<'d>;
type Mem2MemRxChannel<'d> = CopyDmaRxChannel<'d>;
type Mem2MemTxChannel<'d> = CopyDmaTxChannel<'d>;
} else {
type Mem2MemChannel<'d> = AnyGdmaChannel<'d>;
type Mem2MemRxChannel<'d> = AnyGdmaRxChannel<'d>;
type Mem2MemTxChannel<'d> = AnyGdmaTxChannel<'d>;
}
}
pub struct Mem2Mem<'d, Dm>
where
Dm: DriverMode,
{
pub rx: Mem2MemRx<'d, Dm>,
pub tx: Mem2MemTx<'d, Dm>,
}
impl<'d> Mem2Mem<'d, Blocking> {
pub fn new(
channel: impl DmaChannelConvert<Mem2MemChannel<'d>>,
#[cfg(dma_kind = "gdma")] peripheral: impl DmaEligible,
) -> Self {
unsafe {
Self::new_unsafe(
channel,
#[cfg(dma_kind = "gdma")]
peripheral.dma_peripheral(),
)
}
}
pub unsafe fn new_unsafe(
channel: impl DmaChannelConvert<Mem2MemChannel<'d>>,
#[cfg(dma_kind = "gdma")] peripheral: DmaPeripheral,
) -> Self {
let channel = Channel::new(channel.degrade());
cfg_if::cfg_if! {
if #[cfg(dma_kind = "gdma")] {
let mut channel = channel;
channel.rx.set_mem2mem_mode(true);
} else {
let peripheral = DmaPeripheral::Spi2;
}
}
Mem2Mem {
rx: Mem2MemRx {
channel: channel.rx,
peripheral,
},
tx: Mem2MemTx {
channel: channel.tx,
peripheral,
},
}
}
pub fn with_descriptors(
self,
rx_descriptors: &'static mut [DmaDescriptor],
tx_descriptors: &'static mut [DmaDescriptor],
config: BurstConfig,
) -> Result<SimpleMem2Mem<'d, Blocking>, DmaError> {
SimpleMem2Mem::new(self, rx_descriptors, tx_descriptors, config)
}
pub fn into_async(self) -> Mem2Mem<'d, Async> {
Mem2Mem {
rx: self.rx.into_async(),
tx: self.tx.into_async(),
}
}
}
pub struct Mem2MemRx<'d, Dm: DriverMode> {
channel: ChannelRx<Dm, Mem2MemRxChannel<'d>>,
peripheral: DmaPeripheral,
}
impl<'d> Mem2MemRx<'d, Blocking> {
pub fn into_async(self) -> Mem2MemRx<'d, Async> {
Mem2MemRx {
channel: self.channel.into_async(),
peripheral: self.peripheral,
}
}
}
impl<'d, Dm> Mem2MemRx<'d, Dm>
where
Dm: DriverMode,
{
pub fn receive<BUF>(
mut self,
mut buf: BUF,
) -> Result<Mem2MemRxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
where
BUF: DmaRxBuffer,
{
let result = unsafe {
self.channel
.prepare_transfer(self.peripheral, &mut buf)
.and_then(|_| self.channel.start_transfer())
};
if let Err(e) = result {
return Err((e, self, buf));
}
Ok(Mem2MemRxTransfer {
m2m: ManuallyDrop::new(self),
buf_view: ManuallyDrop::new(buf.into_view()),
})
}
}
pub struct Mem2MemRxTransfer<'d, M: DriverMode, BUF: DmaRxBuffer> {
m2m: ManuallyDrop<Mem2MemRx<'d, M>>,
buf_view: ManuallyDrop<BUF::View>,
}
impl<'d, M: DriverMode, BUF: DmaRxBuffer> Mem2MemRxTransfer<'d, M, BUF> {
pub fn is_done(&self) -> bool {
let done_interrupts = DmaRxInterrupt::DescriptorError | DmaRxInterrupt::DescriptorEmpty;
!self
.m2m
.channel
.pending_in_interrupts()
.is_disjoint(done_interrupts)
}
pub fn wait(self) -> (Result<(), DmaError>, Mem2MemRx<'d, M>, BUF::Final) {
while !self.is_done() {}
let (m2m, view) = self.release();
let result = if m2m.channel.has_error() {
Err(DmaError::DescriptorError)
} else {
Ok(())
};
(result, m2m, BUF::from_view(view))
}
pub fn stop(self) -> (Mem2MemRx<'d, M>, BUF::Final) {
let (mut m2m, view) = self.release();
m2m.channel.stop_transfer();
(m2m, BUF::from_view(view))
}
fn release(mut self) -> (Mem2MemRx<'d, M>, BUF::View) {
let result = unsafe {
let m2m = ManuallyDrop::take(&mut self.m2m);
let view = ManuallyDrop::take(&mut self.buf_view);
(m2m, view)
};
core::mem::forget(self);
result
}
}
impl<M: DriverMode, BUF: DmaRxBuffer> Deref for Mem2MemRxTransfer<'_, M, BUF> {
type Target = BUF::View;
fn deref(&self) -> &Self::Target {
&self.buf_view
}
}
impl<M: DriverMode, BUF: DmaRxBuffer> DerefMut for Mem2MemRxTransfer<'_, M, BUF> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.buf_view
}
}
impl<M: DriverMode, BUF: DmaRxBuffer> Drop for Mem2MemRxTransfer<'_, M, BUF> {
fn drop(&mut self) {
self.m2m.channel.stop_transfer();
let view = unsafe {
ManuallyDrop::drop(&mut self.m2m);
ManuallyDrop::take(&mut self.buf_view)
};
let _ = BUF::from_view(view);
}
}
pub struct Mem2MemTx<'d, Dm: DriverMode> {
channel: ChannelTx<Dm, Mem2MemTxChannel<'d>>,
peripheral: DmaPeripheral,
}
impl<'d> Mem2MemTx<'d, Blocking> {
pub fn into_async(self) -> Mem2MemTx<'d, Async> {
Mem2MemTx {
channel: self.channel.into_async(),
peripheral: self.peripheral,
}
}
}
impl<'d, Dm: DriverMode> Mem2MemTx<'d, Dm> {
pub fn send<BUF>(
mut self,
mut buf: BUF,
) -> Result<Mem2MemTxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
where
BUF: DmaTxBuffer,
{
let result = unsafe {
self.channel
.prepare_transfer(self.peripheral, &mut buf)
.and_then(|_| self.channel.start_transfer())
};
if let Err(e) = result {
return Err((e, self, buf));
}
Ok(Mem2MemTxTransfer {
m2m: ManuallyDrop::new(self),
buf_view: ManuallyDrop::new(buf.into_view()),
})
}
}
pub struct Mem2MemTxTransfer<'d, Dm: DriverMode, BUF: DmaTxBuffer> {
m2m: ManuallyDrop<Mem2MemTx<'d, Dm>>,
buf_view: ManuallyDrop<BUF::View>,
}
impl<'d, Dm: DriverMode, BUF: DmaTxBuffer> Mem2MemTxTransfer<'d, Dm, BUF> {
pub fn is_done(&self) -> bool {
let done_interrupts = DmaTxInterrupt::DescriptorError | DmaTxInterrupt::TotalEof;
!self
.m2m
.channel
.pending_out_interrupts()
.is_disjoint(done_interrupts)
}
pub fn wait(self) -> (Result<(), DmaError>, Mem2MemTx<'d, Dm>, BUF::Final) {
while !self.is_done() {}
let (m2m, view) = self.release();
let result = if m2m.channel.has_error() {
Err(DmaError::DescriptorError)
} else {
Ok(())
};
(result, m2m, BUF::from_view(view))
}
pub fn stop(self) -> (Mem2MemTx<'d, Dm>, BUF::Final) {
let (mut m2m, view) = self.release();
m2m.channel.stop_transfer();
(m2m, BUF::from_view(view))
}
fn release(mut self) -> (Mem2MemTx<'d, Dm>, BUF::View) {
let result = unsafe {
let m2m = ManuallyDrop::take(&mut self.m2m);
let view = ManuallyDrop::take(&mut self.buf_view);
(m2m, view)
};
core::mem::forget(self);
result
}
}
impl<Dm: DriverMode, BUF: DmaTxBuffer> Deref for Mem2MemTxTransfer<'_, Dm, BUF> {
type Target = BUF::View;
fn deref(&self) -> &Self::Target {
&self.buf_view
}
}
impl<Dm: DriverMode, BUF: DmaTxBuffer> DerefMut for Mem2MemTxTransfer<'_, Dm, BUF> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.buf_view
}
}
impl<Dm: DriverMode, BUF: DmaTxBuffer> Drop for Mem2MemTxTransfer<'_, Dm, BUF> {
fn drop(&mut self) {
self.m2m.channel.stop_transfer();
let view = unsafe {
ManuallyDrop::drop(&mut self.m2m);
ManuallyDrop::take(&mut self.buf_view)
};
let _ = BUF::from_view(view);
}
}
pub struct SimpleMem2Mem<'d, Dm: DriverMode> {
state: State<'d, Dm>,
config: BurstConfig,
}
enum State<'d, Dm: DriverMode> {
Idle(
Mem2Mem<'d, Dm>,
&'d mut [DmaDescriptor],
&'d mut [DmaDescriptor],
),
Active(
Mem2MemRxTransfer<'d, Dm, DmaRxBuf>,
Mem2MemTxTransfer<'d, Dm, DmaTxBuf>,
),
InUse,
}
impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
pub fn new(
mem2mem: Mem2Mem<'d, Dm>,
rx_descriptors: &'d mut [DmaDescriptor],
tx_descriptors: &'d mut [DmaDescriptor],
config: BurstConfig,
) -> Result<Self, DmaError> {
if rx_descriptors.is_empty() || tx_descriptors.is_empty() {
return Err(DmaError::OutOfDescriptors);
}
Ok(Self {
state: State::Idle(mem2mem, rx_descriptors, tx_descriptors),
config,
})
}
}
impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
pub fn start_transfer(
&mut self,
rx_buffer: &mut [u8],
tx_buffer: &[u8],
) -> Result<SimpleMem2MemTransfer<'_, 'd, Dm>, DmaError> {
let State::Idle(mem2mem, rx_descriptors, tx_descriptors) =
core::mem::replace(&mut self.state, State::InUse)
else {
panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
};
let rx_buffer =
unsafe { core::slice::from_raw_parts_mut(rx_buffer.as_mut_ptr(), rx_buffer.len()) };
let tx_buffer =
unsafe { core::slice::from_raw_parts_mut(tx_buffer.as_ptr() as _, tx_buffer.len()) };
let rx_descriptors = unsafe {
core::slice::from_raw_parts_mut(rx_descriptors.as_mut_ptr(), rx_descriptors.len())
};
let tx_descriptors = unsafe {
core::slice::from_raw_parts_mut(tx_descriptors.as_mut_ptr(), tx_descriptors.len())
};
let dma_rx_buf = unwrap!(
DmaRxBuf::new_with_config(rx_descriptors, rx_buffer, self.config),
"There's no way to get the descriptors back yet"
);
let rx = match mem2mem.rx.receive(dma_rx_buf) {
Ok(rx) => rx,
Err((err, rx, buf)) => {
let (rx_descriptors, _rx_buffer) = buf.split();
self.state = State::Idle(
Mem2Mem { rx, tx: mem2mem.tx },
rx_descriptors,
tx_descriptors,
);
return Err(err);
}
};
let dma_tx_buf = unwrap!(
DmaTxBuf::new_with_config(tx_descriptors, tx_buffer, self.config),
"There's no way to get the descriptors back yet"
);
let tx = match mem2mem.tx.send(dma_tx_buf) {
Ok(tx) => tx,
Err((err, tx, buf)) => {
let (tx_descriptors, _tx_buffer) = buf.split();
let (rx, buf) = rx.stop();
let (rx_descriptors, _rx_buffer) = buf.split();
self.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
return Err(err);
}
};
self.state = State::Active(rx, tx);
Ok(SimpleMem2MemTransfer(self))
}
}
impl<Dm: DriverMode> Drop for SimpleMem2Mem<'_, Dm> {
fn drop(&mut self) {
if !matches!(&mut self.state, State::Idle(_, _, _)) {
panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
}
}
}
pub struct SimpleMem2MemTransfer<'a, 'd, Dm: DriverMode>(&'a mut SimpleMem2Mem<'d, Dm>);
impl<Dm: DriverMode> SimpleMem2MemTransfer<'_, '_, Dm> {
pub fn is_done(&self) -> bool {
let State::Active(rx, tx) = &self.0.state else {
unreachable!()
};
tx.is_done()
&& rx
.m2m
.channel
.pending_in_interrupts()
.contains(DmaRxInterrupt::SuccessfulEof)
}
pub fn wait(self) -> Result<(), DmaError> {
while !self.is_done() {}
Ok(())
}
}
impl<Dm: DriverMode> Drop for SimpleMem2MemTransfer<'_, '_, Dm> {
fn drop(&mut self) {
let State::Active(rx, tx) = core::mem::replace(&mut self.0.state, State::InUse) else {
unreachable!()
};
let (tx, dma_tx_buf) = tx.stop();
let (rx, dma_rx_buf) = rx.stop();
let (tx_descriptors, _tx_buffer) = dma_tx_buf.split();
let (rx_descriptors, _rx_buffer) = dma_rx_buf.split();
self.0.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
}
}