mutringbuf/iterators/
iterator_trait.rs1use crate::iterators::sync_iterators::detached::Detached;
2use crate::ring_buffer::storage::MRBIndex;
3use crate::ring_buffer::variants::ring_buffer_trait::{IterManager, StorageManager};
4use crate::ring_buffer::wrappers::buf_ref::BufRef;
5use crate::{MutRB, Storage, UnsafeSyncCell};
6use core::mem::transmute;
7use core::slice;
8
9#[cfg(feature = "vmem")]
11pub type MutableSlice<'a, T> = &'a mut [T];
12#[cfg(not(feature = "vmem"))]
13pub type MutableSlice<'a, T> = (&'a mut [T], &'a mut [T]);
14
15#[cfg(feature = "vmem")]
17pub type NonMutableSlice<'a, T> = &'a [T];
18#[cfg(not(feature = "vmem"))]
19pub type NonMutableSlice<'a, T> = (&'a [T], &'a [T]);
20
21#[allow(private_bounds)]
23pub trait MRBIterator: PrivateMRBIterator<Self::Item> {
24 type Item;
25
26 #[inline]
28 fn detach(self) -> Detached<Self>
29 where
30 Self: Sized,
31 {
32 Detached::from_iter(self)
33 }
34
35 #[inline]
40 unsafe fn advance(&mut self, count: usize) {
41 unsafe { self._advance(count) };
42 }
43
44 #[inline]
46 fn available(&mut self) -> usize {
47 self._available()
48 }
49
50 fn wait_for(&mut self, count: usize) {
52 while self.available() < count {}
53 }
54
55 #[inline]
57 fn index(&self) -> usize {
58 self._index()
59 }
60
61 #[inline]
63 fn buf_len(&self) -> usize {
64 self.buffer().inner_len()
65 }
66
67 fn alive_iters(&self) -> u8 {
69 self.buffer().alive_iters()
70 }
71
72 #[inline(always)]
74 fn prod_index(&self) -> usize {
75 self.buffer().prod_index()
76 }
77 #[inline(always)]
79 fn work_index(&self) -> usize {
80 self.buffer().work_index()
81 }
82 #[inline(always)]
84 fn cons_index(&self) -> usize {
85 self.buffer().cons_index()
86 }
87
88 #[inline]
96 fn get_workable<'a>(&mut self) -> Option<&'a mut Self::Item> {
97 self.next_ref_mut()
98 }
99
100 #[inline]
107 fn get_workable_slice_exact<'a>(
108 &mut self,
109 count: usize,
110 ) -> Option<MutableSlice<'a, <Self as MRBIterator>::Item>> {
111 self.next_chunk_mut(count)
112 }
113
114 #[inline]
121 fn get_workable_slice_avail<'a>(
122 &mut self,
123 ) -> Option<MutableSlice<'a, <Self as MRBIterator>::Item>> {
124 match self.available() {
125 0 => None,
126 avail => self.get_workable_slice_exact(avail),
127 }
128 }
129
130 #[inline]
138 fn get_workable_slice_multiple_of<'a>(
139 &mut self,
140 rhs: usize,
141 ) -> Option<MutableSlice<'a, <Self as MRBIterator>::Item>> {
142 let avail = self.available();
143
144 unsafe {
145 match avail.unchecked_sub(avail % rhs) {
146 0 => None,
147 avail => self.get_workable_slice_exact(avail),
148 }
149 }
150 }
151}
152
153pub(crate) trait PrivateMRBIterator<T> {
154 fn buffer(&self) -> &BufRef<'_, impl MutRB<Item = T>>;
155 fn _available(&mut self) -> usize;
156 fn cached_avail(&self) -> usize;
157 fn set_cached_avail(&mut self, avail: usize);
158 fn _index(&self) -> usize;
159 fn set_local_index(&mut self, index: usize);
160 fn set_atomic_index(&self, index: usize);
162
163 fn succ_index(&self) -> usize;
165
166 #[inline]
167 unsafe fn _advance(&mut self, count: usize) {
168 unsafe { self.advance_local(count) };
169
170 self.set_atomic_index(self._index());
171 }
172
173 #[inline]
174 unsafe fn advance_local(&mut self, count: usize) {
175 self.set_local_index(unsafe { self._index().unchecked_add(count) });
176
177 if self._index() >= self.buffer().inner_len() {
178 self.set_local_index(unsafe { self._index().unchecked_sub(self.buffer().inner_len()) });
179 }
180
181 self.set_cached_avail(self.cached_avail().saturating_sub(count));
182 }
183
184 #[inline]
186 fn check(&mut self, count: usize) -> bool {
187 self.cached_avail() >= count || self._available() >= count
188 }
189
190 #[inline]
192 fn next(&mut self) -> Option<T> {
193 self.check(1).then(|| unsafe {
194 let ret = self.buffer().inner()._index(self._index()).take_inner();
195
196 self._advance(1);
197
198 ret
199 })
200 }
201
202 #[inline]
204 fn next_duplicate(&mut self) -> Option<T> {
205 self.check(1).then(|| unsafe {
206 let ret = self
207 .buffer()
208 .inner()
209 ._index(self._index())
210 .inner_duplicate();
211
212 self._advance(1);
213
214 ret
215 })
216 }
217
218 #[inline]
220 fn next_ref<'a>(&mut self) -> Option<&'a T> {
221 unsafe {
222 self.check(1)
223 .then(|| self.buffer().inner()._index(self._index()).inner_ref())
224 }
225 }
226
227 #[inline]
229 fn next_ref_mut<'a>(&mut self) -> Option<&'a mut T> {
230 unsafe {
231 self.check(1)
232 .then(|| self.buffer().inner()._index(self._index()).inner_ref_mut())
233 }
234 }
235
236 #[inline]
238 fn next_ref_mut_init(&mut self) -> Option<*mut T> {
239 self.check(1)
240 .then(|| self.buffer().inner()._index(self._index()).as_mut_ptr())
241 }
242
243 #[cfg(feature = "vmem")]
244 #[inline]
245 fn next_chunk<'a>(&mut self, count: usize) -> Option<NonMutableSlice<'a, T>> {
246 self.check(count).then(|| unsafe {
247 transmute::<&[UnsafeSyncCell<T>], &[T]>(slice::from_raw_parts(
248 self.buffer().inner().as_ptr().add(self._index()),
249 count,
250 ))
251 })
252 }
253 #[cfg(feature = "vmem")]
254 #[inline]
255 fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<MutableSlice<'a, T>> {
256 self.check(count).then(|| unsafe {
257 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(slice::from_raw_parts_mut(
258 self.buffer().inner_mut().as_mut_ptr().add(self._index()),
259 count,
260 ))
261 })
262 }
263
264 #[cfg(not(feature = "vmem"))]
265 #[inline]
266 fn next_chunk<'a>(&mut self, count: usize) -> Option<NonMutableSlice<'a, T>> {
267 self.check(count).then(|| {
268 let len = self.buffer().inner_len();
269
270 unsafe {
271 let ptr = self.buffer().inner().as_ptr();
272
273 if self._index() + count >= len {
274 (
275 transmute::<&[UnsafeSyncCell<T>], &[T]>(slice::from_raw_parts(
276 ptr.add(self._index()),
277 len.unchecked_sub(self._index()),
278 )),
279 transmute::<&[UnsafeSyncCell<T>], &[T]>(slice::from_raw_parts(
280 ptr,
281 self._index().unchecked_add(count).unchecked_sub(len),
282 )),
283 )
284 } else {
285 (
286 transmute::<&[UnsafeSyncCell<T>], &[T]>(slice::from_raw_parts(
287 ptr.add(self._index()),
288 count,
289 )),
290 &mut [] as &[T],
291 )
292 }
293 }
294 })
295 }
296
297 #[cfg(not(feature = "vmem"))]
298 #[inline]
299 fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<MutableSlice<'a, T>> {
300 self.check(count).then(|| {
301 let len = self.buffer().inner_len();
302
303 unsafe {
304 let ptr = self.buffer().inner_mut().as_mut_ptr();
305
306 if self._index() + count >= len {
307 (
308 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(slice::from_raw_parts_mut(
309 ptr.add(self._index()),
310 len.unchecked_sub(self._index()),
311 )),
312 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(slice::from_raw_parts_mut(
313 ptr,
314 self._index().unchecked_add(count).unchecked_sub(len),
315 )),
316 )
317 } else {
318 (
319 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(slice::from_raw_parts_mut(
320 ptr.add(self._index()),
321 count,
322 )),
323 &mut [] as &mut [T],
324 )
325 }
326 }
327 })
328 }
329}
330
331pub(crate) mod iter_macros {
332 macro_rules! private_impl {
333 () => {
334 #[inline]
335 fn buffer(&self) -> &BufRef<'_, impl MutRB<Item = T>> {
336 &self.buffer
337 }
338
339 #[inline]
340 fn _index(&self) -> usize {
341 self.index
342 }
343 #[inline]
344 fn set_local_index(&mut self, index: usize) {
345 self.index = index;
346 }
347
348 #[inline]
349 fn cached_avail(&self) -> usize {
350 self.cached_avail
351 }
352 #[inline]
353 fn set_cached_avail(&mut self, avail: usize) {
354 self.cached_avail = avail;
355 }
356 };
357 }
358
359 pub(crate) use private_impl;
360}