1use crate::base::{from_bool32, Error};
7use miniaudio_sys as sys;
8use std::os::raw::c_void;
9use std::ptr::NonNull;
10use std::sync::Arc;
11
12#[repr(transparent)]
13#[derive(Debug)]
14pub(crate) struct RingBuffer<T: Clone> {
15 inner: sys::ma_rb,
16 _buffer_type: std::marker::PhantomData<T>,
17}
18
19impl<T: Clone> RingBuffer<T> {
20 pub(crate) fn split(self) -> (RingBufferSend<T>, RingBufferRecv<T>) {
21 let wrapped = Arc::new(self);
22 let recv = RingBufferRecv {
23 inner: Arc::clone(&wrapped),
24 };
25 let send = RingBufferSend { inner: wrapped };
26 (send, recv)
27 }
28
29 pub(crate) fn create_pair(
30 subbuffer_len: usize,
31 subbuffer_count: usize,
32 ) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
33 RingBuffer::new(subbuffer_len, subbuffer_count).map(Self::split)
34 }
35
36 pub(crate) fn create_pair_preallocated(
37 subbuffer_len: usize,
38 subbufer_count: usize,
39 subbffer_stride_in_bytes: usize,
40 preallocated: Box<[T]>,
41 ) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
42 RingBuffer::new_preallocated(
43 subbuffer_len,
44 subbufer_count,
45 subbffer_stride_in_bytes,
46 preallocated,
47 )
48 .map(Self::split)
49 }
50
51 pub(crate) fn new(
52 subbuffer_len: usize,
53 subbuffer_count: usize,
54 ) -> Result<RingBuffer<T>, Error> {
55 let size_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
56 let stride_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
57
58 unsafe { Self::new_raw(size_in_bytes, subbuffer_count, stride_in_bytes, None) }
59 }
60
61 pub(crate) fn new_preallocated(
62 subbuffer_len: usize,
63 subbuffer_count: usize,
64 mut subbuffer_stride_in_bytes: usize,
65 preallocated: Box<[T]>,
66 ) -> Result<RingBuffer<T>, Error> {
67 let subbuffer_size_in_bytes = std::mem::size_of::<T>() * subbuffer_len;
68
69 if subbuffer_stride_in_bytes < subbuffer_size_in_bytes {
70 subbuffer_stride_in_bytes = subbuffer_size_in_bytes;
71 }
72
73 if subbuffer_count * subbuffer_stride_in_bytes
74 != preallocated.len() * std::mem::size_of::<T>()
75 {
76 ma_debug_panic!("preallocated buffer size too small for arguments");
77 return Err(Error::InvalidArgs);
78 }
79
80 unsafe {
81 let preallocated_ptr_slice = Box::into_raw(preallocated);
82 let preallocated_ptr = (*preallocated_ptr_slice).as_mut_ptr();
83 let result = Self::new_raw(
84 subbuffer_size_in_bytes,
85 subbuffer_count,
86 subbuffer_stride_in_bytes,
87 NonNull::new(preallocated_ptr).map(NonNull::cast),
88 );
89
90 if result.is_err() {
92 drop(Box::from_raw(preallocated_ptr_slice));
93 }
94
95 result
96 }
97 }
98
99 unsafe fn new_raw(
100 subbuffer_size_in_bytes: usize,
101 subbuffer_count: usize,
102 subbuffer_stride_in_bytes: usize,
103 preallocated_buffer: Option<NonNull<()>>,
104 ) -> Result<RingBuffer<T>, Error> {
105 let mut ring_buffer = std::mem::MaybeUninit::<sys::ma_rb>::uninit();
106
107 let result = sys::ma_rb_init_ex(
108 subbuffer_size_in_bytes,
109 subbuffer_count,
110 subbuffer_stride_in_bytes,
111 preallocated_buffer
112 .map(|p| p.cast().as_ptr())
113 .unwrap_or(std::ptr::null_mut()),
114 std::ptr::null(),
115 ring_buffer.as_mut_ptr(),
116 );
117
118 map_result!(
119 result,
120 RingBuffer {
121 inner: ring_buffer.assume_init(),
122 _buffer_type: std::marker::PhantomData,
123 }
124 )
125 }
126
127 pub(crate) fn read<F>(&self, count_requested: usize, f: F) -> usize
131 where
132 F: FnOnce(&[T]),
133 {
134 let mut bytes = count_requested * std::mem::size_of::<T>();
135 let mut buf_ptr: *mut c_void = std::ptr::null_mut();
136 let acquire_result = unsafe {
137 sys::ma_rb_acquire_read(&self.inner as *const _ as *mut _, &mut bytes, &mut buf_ptr)
138 };
139
140 debug_assert!(acquire_result == 0);
142 debug_assert!(bytes % std::mem::size_of::<T>() == 0);
143
144 let count = bytes / std::mem::size_of::<T>();
145
146 if count == 0 || buf_ptr.is_null() {
147 f(&[]);
148 return 0;
149 }
150
151 let items = unsafe { std::slice::from_raw_parts(buf_ptr.cast::<T>(), count) };
152
153 f(items);
154
155 let commit_result =
156 unsafe { sys::ma_rb_commit_read(&self.inner as *const _ as *mut _, bytes, buf_ptr) };
157
158 debug_assert!(commit_result == 0);
160
161 count
162 }
163
164 pub(crate) fn write<F>(&self, count_requested: usize, f: F) -> usize
168 where
169 F: FnOnce(&mut [T]),
170 {
171 let mut bytes = count_requested * std::mem::size_of::<T>();
172 let mut buf_ptr: *mut c_void = std::ptr::null_mut();
173 let acquire_result = unsafe {
174 sys::ma_rb_acquire_write(&self.inner as *const _ as *mut _, &mut bytes, &mut buf_ptr)
175 };
176
177 debug_assert!(acquire_result == 0);
179 debug_assert!(bytes % std::mem::size_of::<T>() == 0);
180
181 let count = bytes / std::mem::size_of::<T>();
182
183 if count == 0 || buf_ptr.is_null() {
184 f(&mut []);
185 return 0;
186 }
187
188 let items = unsafe { std::slice::from_raw_parts_mut(buf_ptr.cast::<T>(), count) };
189
190 f(items);
191
192 let commit_result =
193 unsafe { sys::ma_rb_commit_write(&self.inner as *const _ as *mut _, bytes, buf_ptr) };
194
195 debug_assert!(commit_result == 0);
197
198 count
199 }
200
201 #[inline]
206 #[allow(dead_code)]
207 pub(crate) fn pointer_distance(&self) -> usize {
208 let byte_distance =
209 unsafe { sys::ma_rb_pointer_distance(&self.inner as *const _ as *mut _) as usize };
210 debug_assert!(byte_distance % std::mem::size_of::<T>() == 0);
211 byte_distance / std::mem::size_of::<T>()
212 }
213
214 #[inline]
215 pub(crate) fn available_read(&self) -> usize {
216 let bytes_available =
217 unsafe { sys::ma_rb_available_read(&self.inner as *const _ as *mut _) as usize };
218 debug_assert!(bytes_available % std::mem::size_of::<T>() == 0);
219 bytes_available / std::mem::size_of::<T>()
220 }
221
222 #[inline]
223 pub(crate) fn available_write(&self) -> usize {
224 let bytes_available =
225 unsafe { sys::ma_rb_available_write(&self.inner as *const _ as *mut _) as usize };
226 debug_assert!(bytes_available % std::mem::size_of::<T>() == 0);
227 bytes_available / std::mem::size_of::<T>()
228 }
229
230 #[inline]
232 #[allow(dead_code)]
233 pub(crate) fn subbuffer_size(&self) -> usize {
234 unsafe { sys::ma_rb_get_subbuffer_size(&self.inner as *const _ as *mut _) }
235 }
236
237 #[inline]
239 #[allow(dead_code)]
240 pub(crate) fn subbuffer_stride(&self) -> usize {
241 unsafe { sys::ma_rb_get_subbuffer_stride(&self.inner as *const _ as *mut _) }
242 }
243
244 #[inline]
247 #[allow(dead_code)]
248 pub(crate) fn subbuffer_offset(&self, index: usize) -> usize {
249 unsafe { sys::ma_rb_get_subbuffer_offset(&self.inner as *const _ as *mut _, index) }
250 }
251
252 }
255
256unsafe impl<T: Send + Sized + Clone> Send for RingBuffer<T> {}
257unsafe impl<T: Send + Sized + Clone> Sync for RingBuffer<T> {}
258
259pub struct RingBufferSend<T: Clone> {
262 inner: Arc<RingBuffer<T>>,
263}
264
265impl<T: Clone> RingBufferSend<T> {
266 pub fn write(&self, src: &[T]) -> usize {
271 self.inner.write(src.len(), |dest| {
272 dest.clone_from_slice(&src[0..dest.len()]);
273 })
274 }
275
276 pub fn write_with<F>(&self, count_requested: usize, f: F) -> usize
280 where
281 F: FnOnce(&mut [T]),
282 {
283 self.inner.write(count_requested, f)
284 }
285
286 pub fn available(&mut self) -> usize {
288 self.inner.available_write()
289 }
290}
291
292impl<T: Clone> Clone for RingBufferSend<T> {
293 fn clone(&self) -> Self {
294 RingBufferSend {
295 inner: Arc::clone(&self.inner),
296 }
297 }
298}
299
300pub struct RingBufferRecv<T: Clone> {
303 inner: Arc<RingBuffer<T>>,
304}
305
306impl<T: Clone> RingBufferRecv<T> {
307 pub fn read(&self, dest: &mut [T]) -> usize {
312 self.inner.read(dest.len(), |src| {
313 (&mut dest[0..src.len()]).clone_from_slice(src);
314 })
315 }
316
317 pub fn read_with<F>(&self, count_requested: usize, f: F) -> usize
321 where
322 F: FnOnce(&[T]),
323 {
324 self.inner.read(count_requested, f)
325 }
326
327 pub fn available(&mut self) -> usize {
329 self.inner.available_read()
330 }
331}
332
333impl<T: Clone> Clone for RingBufferRecv<T> {
334 fn clone(&self) -> Self {
335 RingBufferRecv {
336 inner: Arc::clone(&self.inner),
337 }
338 }
339}
340
341impl<T: Clone> Drop for RingBuffer<T> {
342 fn drop(&mut self) {
343 unsafe {
344 let buffer_ptr = self.inner.pBuffer;
345 let count = self.inner.subbufferCount;
346 let owns_buffer = from_bool32(self.inner.ownsBuffer());
347
348 sys::ma_rb_uninit(&mut self.inner);
349
350 if !owns_buffer && !buffer_ptr.is_null() {
352 let preallocated_slice = std::slice::from_raw_parts_mut(buffer_ptr, count as usize);
353 let _preallocated_box = Box::from_raw(preallocated_slice.as_mut_ptr());
354 }
355 };
356 }
357}
358
359pub fn ring_buffer<T: Clone + Send>(
364 subbuffer_len: usize,
365 subbuffer_count: usize,
366) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
367 RingBuffer::create_pair(subbuffer_len, subbuffer_count)
368}
369
370pub fn ring_buffer_preallocated<T: Clone + Send>(
375 subbuffer_len: usize,
376 subbuffer_count: usize,
377 subbuffer_stride_in_bytes: usize,
378 preallocated: Box<[T]>,
379) -> Result<(RingBufferSend<T>, RingBufferRecv<T>), Error> {
380 RingBuffer::create_pair_preallocated(
381 subbuffer_len,
382 subbuffer_count,
383 subbuffer_stride_in_bytes,
384 preallocated,
385 )
386}