use crate::base::{from_bool32, Error};
use miniaudio_sys as sys;
use std::os::raw::c_void;
use std::ptr::NonNull;
use std::sync::Arc;
#[repr(transparent)]
#[derive(Debug)]
pub(crate) struct RingBuffer<T: Clone> {
inner: sys::ma_rb,
_buffer_type: std::marker::PhantomData<T>,
}
impl<T: Clone> RingBuffer<T> {
pub(crate) fn split(self) -> (RingBufferSend<T>, RingBufferRecv<T>) {
let wrapped = Arc::new(self);
let recv = RingBufferRecv {
inner: Arc::clone(&wrapped),
};
let send = RingBufferSend { inner: wrapped };
(send, recv)
}
pub(crate) fn create_pair(
subbuffer_len: usize,
subbuffer_count: usize,
) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
RingBuffer::new(subbuffer_len, subbuffer_count).map(Self::split)
}
pub(crate) fn create_pair_preallocated(
subbuffer_len: usize,
subbufer_count: usize,
subbffer_stride_in_bytes: usize,
preallocated: Box<[T]>,
) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
RingBuffer::new_preallocated(
subbuffer_len,
subbufer_count,
subbffer_stride_in_bytes,
preallocated,
)
.map(Self::split)
}
pub(crate) fn new(
subbuffer_len: usize,
subbuffer_count: usize,
) -> Result<RingBuffer<T>, Error> {
let size_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
let stride_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
unsafe { Self::new_raw(size_in_bytes, subbuffer_count, stride_in_bytes, None) }
}
pub(crate) fn new_preallocated(
subbuffer_len: usize,
subbuffer_count: usize,
mut subbuffer_stride_in_bytes: usize,
preallocated: Box<[T]>,
) -> Result<RingBuffer<T>, Error> {
let subbuffer_size_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
if subbuffer_stride_in_bytes < subbuffer_size_in_bytes {
subbuffer_stride_in_bytes = subbuffer_size_in_bytes;
}
if subbuffer_count * subbuffer_stride_in_bytes
!= preallocated.len() * std::mem::size_of::<T>()
{
ma_debug_panic!("preallocated buffer size too small for arguments");
return Err(Error::InvalidArgs);
}
unsafe {
let preallocated_ptr_slice = Box::into_raw(preallocated);
let preallocated_ptr = (*preallocated_ptr_slice).as_mut_ptr();
let result = Self::new_raw(
subbuffer_size_in_bytes,
subbuffer_count,
subbuffer_stride_in_bytes,
NonNull::new(preallocated_ptr).map(NonNull::cast),
);
if result.is_err() {
drop(Box::from_raw(preallocated_ptr_slice));
}
result
}
}
unsafe fn new_raw(
subbuffer_size_in_bytes: usize,
subbuffer_count: usize,
subbuffer_stride_in_bytes: usize,
preallocated_buffer: Option<NonNull<()>>,
) -> Result<RingBuffer<T>, Error> {
let mut ring_buffer = std::mem::MaybeUninit::<sys::ma_rb>::uninit();
let result = sys::ma_rb_init_ex(
subbuffer_size_in_bytes,
subbuffer_count,
subbuffer_stride_in_bytes,
preallocated_buffer
.map(|p| p.cast().as_ptr())
.unwrap_or(std::ptr::null_mut()),
std::ptr::null(),
ring_buffer.as_mut_ptr(),
);
map_result!(
result,
RingBuffer {
inner: ring_buffer.assume_init(),
_buffer_type: std::marker::PhantomData,
}
)
}
pub(crate) fn read<F>(&self, count_requested: usize, f: F) -> usize
where
F: FnOnce(&[T]),
{
let mut bytes = count_requested * std::mem::size_of::<T>();
let mut buf_ptr: *mut c_void = std::ptr::null_mut();
let acquire_result = unsafe {
sys::ma_rb_acquire_read(&self.inner as *const _ as *mut _, &mut bytes, &mut buf_ptr)
};
debug_assert!(acquire_result == 0);
debug_assert!(bytes % std::mem::size_of::<T>() == 0);
let count = bytes / std::mem::size_of::<T>();
if count == 0 || buf_ptr.is_null() {
f(&[]);
return 0;
}
let items = unsafe { std::slice::from_raw_parts(buf_ptr.cast::<T>(), count) };
f(items);
let commit_result =
unsafe { sys::ma_rb_commit_read(&self.inner as *const _ as *mut _, bytes, buf_ptr) };
debug_assert!(commit_result == 0);
count
}
pub(crate) fn write<F>(&self, count_requested: usize, f: F) -> usize
where
F: FnOnce(&mut [T]),
{
let mut bytes = count_requested * std::mem::size_of::<T>();
let mut buf_ptr: *mut c_void = std::ptr::null_mut();
let acquire_result = unsafe {
sys::ma_rb_acquire_write(&self.inner as *const _ as *mut _, &mut bytes, &mut buf_ptr)
};
debug_assert!(acquire_result == 0);
debug_assert!(bytes % std::mem::size_of::<T>() == 0);
let count = bytes / std::mem::size_of::<T>();
if count == 0 || buf_ptr.is_null() {
f(&mut []);
return 0;
}
let items = unsafe { std::slice::from_raw_parts_mut(buf_ptr.cast::<T>(), count) };
f(items);
let commit_result =
unsafe { sys::ma_rb_commit_write(&self.inner as *const _ as *mut _, bytes, buf_ptr) };
debug_assert!(commit_result == 0);
count
}
#[inline]
#[allow(dead_code)]
pub(crate) fn pointer_distance(&self) -> usize {
let byte_distance =
unsafe { sys::ma_rb_pointer_distance(&self.inner as *const _ as *mut _) as usize };
debug_assert!(byte_distance % std::mem::size_of::<T>() == 0);
byte_distance / std::mem::size_of::<T>()
}
#[inline]
pub(crate) fn available_read(&self) -> usize {
let bytes_available =
unsafe { sys::ma_rb_available_read(&self.inner as *const _ as *mut _) as usize };
debug_assert!(bytes_available % std::mem::size_of::<T>() == 0);
bytes_available / std::mem::size_of::<T>()
}
#[inline]
pub(crate) fn available_write(&self) -> usize {
let bytes_available =
unsafe { sys::ma_rb_available_write(&self.inner as *const _ as *mut _) as usize };
debug_assert!(bytes_available % std::mem::size_of::<T>() == 0);
bytes_available / std::mem::size_of::<T>()
}
#[inline]
#[allow(dead_code)]
pub(crate) fn subbuffer_size(&self) -> usize {
unsafe { sys::ma_rb_get_subbuffer_size(&self.inner as *const _ as *mut _) }
}
#[inline]
#[allow(dead_code)]
pub(crate) fn subbuffer_stride(&self) -> usize {
unsafe { sys::ma_rb_get_subbuffer_stride(&self.inner as *const _ as *mut _) }
}
#[inline]
#[allow(dead_code)]
pub(crate) fn subbuffer_offset(&self, index: usize) -> usize {
unsafe { sys::ma_rb_get_subbuffer_offset(&self.inner as *const _ as *mut _, index) }
}
}
unsafe impl<T: Send + Sized + Clone> Send for RingBuffer<T> {}
unsafe impl<T: Send + Sized + Clone> Sync for RingBuffer<T> {}
pub struct RingBufferSend<T: Clone> {
inner: Arc<RingBuffer<T>>,
}
impl<T: Clone> RingBufferSend<T> {
pub fn write(&self, src: &[T]) -> usize {
self.inner.write(src.len(), |dest| {
dest.clone_from_slice(&src[0..dest.len()]);
})
}
pub fn write_with<F>(&self, count_requested: usize, f: F) -> usize
where
F: FnOnce(&mut [T]),
{
self.inner.write(count_requested, f)
}
pub fn available(&mut self) -> usize {
self.inner.available_write()
}
}
impl<T: Clone> Clone for RingBufferSend<T> {
fn clone(&self) -> Self {
RingBufferSend {
inner: Arc::clone(&self.inner),
}
}
}
pub struct RingBufferRecv<T: Clone> {
inner: Arc<RingBuffer<T>>,
}
impl<T: Clone> RingBufferRecv<T> {
pub fn read(&self, dest: &mut [T]) -> usize {
self.inner.read(dest.len(), |src| {
(&mut dest[0..src.len()]).clone_from_slice(src);
})
}
pub fn read_with<F>(&self, count_requested: usize, f: F) -> usize
where
F: FnOnce(&[T]),
{
self.inner.read(count_requested, f)
}
pub fn available(&mut self) -> usize {
self.inner.available_read()
}
}
impl<T: Clone> Clone for RingBufferRecv<T> {
fn clone(&self) -> Self {
RingBufferRecv {
inner: Arc::clone(&self.inner),
}
}
}
impl<T: Clone> Drop for RingBuffer<T> {
fn drop(&mut self) {
unsafe {
let buffer_ptr = self.inner.pBuffer;
let count = self.inner.subbufferCount;
let owns_buffer = from_bool32(self.inner.ownsBuffer());
sys::ma_rb_uninit(&mut self.inner);
if !owns_buffer && !buffer_ptr.is_null() {
let preallocated_slice = std::slice::from_raw_parts_mut(buffer_ptr, count as usize);
let _preallocated_box = Box::from_raw(preallocated_slice.as_mut_ptr());
}
};
}
}
pub fn ring_buffer<T: Clone + Send>(
subbuffer_len: usize,
subbuffer_count: usize,
) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
RingBuffer::create_pair(subbuffer_len, subbuffer_count)
}
pub fn ring_buffer_preallocated<T: Clone + Send>(
subbuffer_len: usize,
subbuffer_count: usize,
subbuffer_stride_in_bytes: usize,
preallocated: Box<[T]>,
) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
RingBuffer::create_pair_preallocated(
subbuffer_len,
subbuffer_count,
subbuffer_stride_in_bytes,
preallocated,
)
}