mutringbuf/iterators/
iterator_trait.rs

1use crate::iterators::sync_iterators::detached::Detached;
2
3/// Returned by slice-specialised functions.
4/// # Fields:
5/// - 1: head
6/// - 2: tail
7pub type WorkableSlice<'a, T> = (&'a mut [T], &'a mut [T]);
8
9
10/// Trait implemented by iterators.
11#[allow(private_bounds)]
12pub trait MRBIterator: PrivateMRBIterator<PItem = Self::Item> {
13    type Item;
14
15    /// Detaches the iterator yielding a [`Detached`].
16    #[inline]
17    fn detach(self) -> Detached<Self> where Self: Sized {
18        Detached::from_iter(self)
19    }
20    
21    /// Advances the iterator by `count`.
22    ///
23    /// # Safety
24    /// An iterator should never overstep its successor, so it must always be: `count` <= [`MRBIterator::available()`]!
25    unsafe fn advance(&mut self, count: usize);
26
27    /// Returns the number of items available for an iterator.
28    fn available(&mut self) -> usize;
29
30    /// Waits, blocking the thread in a loop, until there are at least `count` available items.
31    fn wait_for(&mut self, count: usize) {
32        while self.available() < count {}
33    }
34
35    /// Returns the index of the iterator.
36    fn index(&self) -> usize;
37
38    /// Returns the length of the buffer.
39    fn buf_len(&self) -> usize;
40
41    /// Returns `true` if the producer iterator is still alive, `false` if it has been dropped.
42    fn is_prod_alive(&self) -> bool;
43    /// Returns `true` if the worker iterator is still alive, `false` if it has been dropped.
44    fn is_work_alive(&self) -> bool;
45    /// Returns `true` if the consumer iterator is still alive, `false` if it has been dropped.
46    fn is_cons_alive(&self) -> bool;
47    /// Returns the index of the producer.
48    fn prod_index(&self) -> usize;
49    /// Returns the index of the worker.
50    fn work_index(&self) -> usize;
51    /// Returns the index of the consumer.
52    fn cons_index(&self) -> usize;
53    
54    /// Returns a mutable references to the current value.
55    ///
56    /// <div class="warning">
57    ///
58    /// Being these references, [`Self::advance()`] has to be called when done with the mutation
59    /// in order to move the iterator.
60    /// </div>
61    #[inline]
62    fn get_workable<'a>(&mut self) -> Option<&'a mut Self::Item> {
63        self.next_ref_mut()
64    }
65
66    /// Returns a tuple of mutable slice references, the sum of which with len equal to `count`.
67    /// <div class="warning">
68    ///
69    /// Being these references, [`Self::advance()`] has to be called when done with the mutation
70    /// in order to move the iterator.
71    /// </div>
72    #[inline]
73    fn get_workable_slice_exact<'a>(&mut self, count: usize) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
74        self.next_chunk_mut(count)
75    }
76
77    /// Returns a tuple of mutable slice references, the sum of which with len equal to [`Self::available()`].
78    /// <div class="warning">
79    ///
80    /// Being these references, [`Self::advance()`] has to be called when done with the mutation
81    /// in order to move the iterator.
82    /// </div>
83    #[inline]
84    fn get_workable_slice_avail<'a>(&mut self) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
85        match self.available() {
86            0 => None,
87            avail => self.get_workable_slice_exact(avail)
88        }
89    }
90
91    /// Returns a tuple of mutable slice references, the sum of which with len equal to the
92    /// higher multiple of `rhs`.
93    /// <div class="warning">
94    ///
95    /// Being these references, [`Self::advance()`] has to be called when done with the mutation
96    /// in order to move the iterator.
97    /// </div>
98    #[inline]
99    fn get_workable_slice_multiple_of<'a>(&mut self, rhs: usize) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
100        let avail = self.available();
101
102        match avail - avail % rhs {
103            0 => None,
104            avail => self.get_workable_slice_exact(avail)
105        }
106    }
107}
108
109pub(crate) trait PrivateMRBIterator {
110    type PItem;
111    
112    fn cached_avail(&mut self) -> usize;
113    fn set_cached_avail(&mut self, avail: usize);
114    unsafe fn set_local_index(&mut self, index: usize);
115    
116    unsafe fn advance_local(&mut self, count: usize);
117    
118    /// Sets the global index of this iterator.
119    fn set_atomic_index(&self, index: usize);
120
121    /// Returns the global index of successor.
122    fn succ_index(&self) -> usize;
123
124    /// Checks whether the current index can be returned
125    fn check(&mut self, count: usize) -> bool;
126
127    /// Returns Some(current element), if `check()` returns `true`, else None
128    fn next(&mut self) -> Option<Self::PItem>;
129
130    /// Returns Some(current element), if `check()` returns `true`, else None. The value is duplicated.
131    fn next_duplicate(&mut self) -> Option<Self::PItem>;
132
133    /// Returns Some(&UnsafeSyncCell<current element>), if `check()` returns `true`, else None
134    fn next_ref<'a>(&mut self) -> Option<&'a Self::PItem>;
135
136    /// Returns Some(&UnsafeSyncCell<current element>), if `check()` returns `true`, else None
137    fn next_ref_mut<'a>(&mut self) -> Option<&'a mut Self::PItem>;
138
139    /// As next_ref_mut, but can be used for initialisation of inner MaybeUninit.
140    fn next_ref_mut_init(&mut self) -> Option<*mut Self::PItem>;
141
142    fn next_chunk<'a>(&mut self, count: usize) -> Option<(&'a [Self::PItem], &'a [Self::PItem])>;
143
144    fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<(&'a mut [Self::PItem], &'a mut [Self::PItem])>;
145}
146
147pub(crate) mod iter_macros {
148    macro_rules! public_impl { () => (
149        #[inline]
150        fn is_prod_alive(&self) -> bool {
151            self.buffer.prod_alive()
152        }
153    
154        #[inline]
155        fn is_work_alive(&self) -> bool {
156            self.buffer.work_alive()
157        }
158    
159        #[inline]
160        fn is_cons_alive(&self) -> bool {
161            self.buffer.cons_alive()
162        }
163    
164        #[inline]
165        fn prod_index(&self) -> usize {
166            self.buffer.prod_index()
167        }
168    
169        #[inline]
170        fn work_index(&self) -> usize {
171            self.buffer.work_index()
172        }
173    
174        #[inline]
175        fn cons_index(&self) -> usize {
176            self.buffer.cons_index()
177        }
178        
179        #[inline]
180        unsafe fn advance(&mut self, count: usize) {
181            self.advance_local(count);
182
183            self.set_atomic_index(self.index);
184        }
185
186        #[inline]
187        fn index(&self) -> usize {
188            self.index
189        }
190
191        #[inline]
192        fn buf_len(&self) -> usize {
193            self.buffer.inner_len()
194        }
195    )}
196
197    macro_rules! private_impl { () => (
198        #[inline]
199        fn cached_avail(&mut self) -> usize {
200            self.cached_avail
201        }
202    
203        #[inline]
204        fn set_cached_avail(&mut self, avail: usize) {
205            self.cached_avail = avail;
206        }
207    
208        #[inline]
209        unsafe fn set_local_index(&mut self, index: usize) {
210            self.index = index;
211        }
212            
213        #[inline]
214        unsafe fn advance_local(&mut self, count: usize) {
215            self.index = self.index.unchecked_add(count);
216
217            if self.index >= self.buf_len() {
218                self.index = self.index.unchecked_sub(self.buf_len());
219            }
220
221            self.cached_avail = self.cached_avail.saturating_sub(count);
222        }
223        
224        #[inline]
225        fn check(&mut self, count: usize) -> bool {
226            self.cached_avail >= count || self.available() >= count
227        }
228
229        #[inline]
230        fn next(&mut self) -> Option<T> {
231            self.check(1).then(|| unsafe {
232                let ret = self.buffer.inner()[self.index].take_inner();
233
234                self.advance(1);
235
236                ret
237            })
238        }
239        
240        #[inline]
241        fn next_duplicate(&mut self) -> Option<T> {
242            self.check(1).then(|| unsafe {
243                let ret = self.buffer.inner()[self.index].inner_duplicate();
244
245                self.advance(1);
246
247                ret
248            })
249        }
250
251        #[inline]
252        fn next_ref<'a>(&mut self) -> Option<&'a T> {
253            unsafe { self.check(1).then(|| self.buffer.inner()[self.index].inner_ref()) }
254        }
255
256        #[inline]
257        fn next_ref_mut<'a>(&mut self) -> Option<&'a mut T> {
258            unsafe { self.check(1).then(|| self.buffer.inner()[self.index].inner_ref_mut()) }
259        }
260
261        #[inline]
262        fn next_ref_mut_init(&mut self) -> Option<*mut T> {
263            self.check(1).then(|| self.buffer.inner()[self.index].as_mut_ptr())
264        }
265
266        #[inline]
267        fn next_chunk<'a>(&mut self, count: usize) -> Option<(&'a [T], &'a [T])> {
268            self.check(count).then(|| {
269                
270                unsafe {
271                    let ptr = self.buffer.inner().as_ptr();
272                    
273                    if self.index + count >= self.buf_len() {
274                        (
275                            transmute::<&[UnsafeSyncCell<T>], &[T]>(
276                                slice::from_raw_parts(ptr.add(self.index), self.buf_len() - self.index)
277                            ),
278                            transmute::<&[UnsafeSyncCell<T>], &[T]>(
279                                slice::from_raw_parts(ptr, self.index + count - self.buf_len())
280                            )
281                        )
282                    } else {
283                        (
284                            transmute::<&[UnsafeSyncCell<T>], &[T]>(
285                                slice::from_raw_parts(ptr.add(self.index), count)
286                            ),
287                            &mut [] as &[T]
288                        )
289                    }
290                }
291            })
292        }
293
294        #[inline]
295        fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<(&'a mut [T], &'a mut [T])> {
296            self.check(count).then(|| {
297                
298                unsafe {
299                    let ptr = self.buffer.inner_mut().as_mut_ptr();
300                    
301                    if self.index + count >= self.buf_len() {
302                        (
303                            transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
304                                slice::from_raw_parts_mut(ptr.add(self.index), self.buf_len() - self.index)
305                            ),
306                            transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
307                                slice::from_raw_parts_mut(ptr, self.index + count - self.buf_len())
308                            )
309                        )
310                    } else {
311                        (
312                            transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
313                                slice::from_raw_parts_mut(ptr.add(self.index), count)
314                            ),
315                            &mut [] as &mut [T]
316                        )
317                    }
318                }
319            })
320        }
321    )}
322
323    pub(crate) use { public_impl, private_impl };
324}