mutringbuf/iterators/
iterator_trait.rs1use crate::iterators::sync_iterators::detached::Detached;
2
3pub type WorkableSlice<'a, T> = (&'a mut [T], &'a mut [T]);
8
9
10#[allow(private_bounds)]
12pub trait MRBIterator: PrivateMRBIterator<PItem = Self::Item> {
13 type Item;
14
15 #[inline]
17 fn detach(self) -> Detached<Self> where Self: Sized {
18 Detached::from_iter(self)
19 }
20
21 unsafe fn advance(&mut self, count: usize);
26
27 fn available(&mut self) -> usize;
29
30 fn wait_for(&mut self, count: usize) {
32 while self.available() < count {}
33 }
34
35 fn index(&self) -> usize;
37
38 fn buf_len(&self) -> usize;
40
41 fn is_prod_alive(&self) -> bool;
43 fn is_work_alive(&self) -> bool;
45 fn is_cons_alive(&self) -> bool;
47 fn prod_index(&self) -> usize;
49 fn work_index(&self) -> usize;
51 fn cons_index(&self) -> usize;
53
54 #[inline]
62 fn get_workable<'a>(&mut self) -> Option<&'a mut Self::Item> {
63 self.next_ref_mut()
64 }
65
66 #[inline]
73 fn get_workable_slice_exact<'a>(&mut self, count: usize) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
74 self.next_chunk_mut(count)
75 }
76
77 #[inline]
84 fn get_workable_slice_avail<'a>(&mut self) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
85 match self.available() {
86 0 => None,
87 avail => self.get_workable_slice_exact(avail)
88 }
89 }
90
91 #[inline]
99 fn get_workable_slice_multiple_of<'a>(&mut self, rhs: usize) -> Option<WorkableSlice<'a, <Self as MRBIterator>::Item>> {
100 let avail = self.available();
101
102 match avail - avail % rhs {
103 0 => None,
104 avail => self.get_workable_slice_exact(avail)
105 }
106 }
107}
108
109pub(crate) trait PrivateMRBIterator {
110 type PItem;
111
112 fn cached_avail(&mut self) -> usize;
113 fn set_cached_avail(&mut self, avail: usize);
114 unsafe fn set_local_index(&mut self, index: usize);
115
116 unsafe fn advance_local(&mut self, count: usize);
117
118 fn set_atomic_index(&self, index: usize);
120
121 fn succ_index(&self) -> usize;
123
124 fn check(&mut self, count: usize) -> bool;
126
127 fn next(&mut self) -> Option<Self::PItem>;
129
130 fn next_duplicate(&mut self) -> Option<Self::PItem>;
132
133 fn next_ref<'a>(&mut self) -> Option<&'a Self::PItem>;
135
136 fn next_ref_mut<'a>(&mut self) -> Option<&'a mut Self::PItem>;
138
139 fn next_ref_mut_init(&mut self) -> Option<*mut Self::PItem>;
141
142 fn next_chunk<'a>(&mut self, count: usize) -> Option<(&'a [Self::PItem], &'a [Self::PItem])>;
143
144 fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<(&'a mut [Self::PItem], &'a mut [Self::PItem])>;
145}
146
147pub(crate) mod iter_macros {
148 macro_rules! public_impl { () => (
149 #[inline]
150 fn is_prod_alive(&self) -> bool {
151 self.buffer.prod_alive()
152 }
153
154 #[inline]
155 fn is_work_alive(&self) -> bool {
156 self.buffer.work_alive()
157 }
158
159 #[inline]
160 fn is_cons_alive(&self) -> bool {
161 self.buffer.cons_alive()
162 }
163
164 #[inline]
165 fn prod_index(&self) -> usize {
166 self.buffer.prod_index()
167 }
168
169 #[inline]
170 fn work_index(&self) -> usize {
171 self.buffer.work_index()
172 }
173
174 #[inline]
175 fn cons_index(&self) -> usize {
176 self.buffer.cons_index()
177 }
178
179 #[inline]
180 unsafe fn advance(&mut self, count: usize) {
181 self.advance_local(count);
182
183 self.set_atomic_index(self.index);
184 }
185
186 #[inline]
187 fn index(&self) -> usize {
188 self.index
189 }
190
191 #[inline]
192 fn buf_len(&self) -> usize {
193 self.buffer.inner_len()
194 }
195 )}
196
197 macro_rules! private_impl { () => (
198 #[inline]
199 fn cached_avail(&mut self) -> usize {
200 self.cached_avail
201 }
202
203 #[inline]
204 fn set_cached_avail(&mut self, avail: usize) {
205 self.cached_avail = avail;
206 }
207
208 #[inline]
209 unsafe fn set_local_index(&mut self, index: usize) {
210 self.index = index;
211 }
212
213 #[inline]
214 unsafe fn advance_local(&mut self, count: usize) {
215 self.index = self.index.unchecked_add(count);
216
217 if self.index >= self.buf_len() {
218 self.index = self.index.unchecked_sub(self.buf_len());
219 }
220
221 self.cached_avail = self.cached_avail.saturating_sub(count);
222 }
223
224 #[inline]
225 fn check(&mut self, count: usize) -> bool {
226 self.cached_avail >= count || self.available() >= count
227 }
228
229 #[inline]
230 fn next(&mut self) -> Option<T> {
231 self.check(1).then(|| unsafe {
232 let ret = self.buffer.inner()[self.index].take_inner();
233
234 self.advance(1);
235
236 ret
237 })
238 }
239
240 #[inline]
241 fn next_duplicate(&mut self) -> Option<T> {
242 self.check(1).then(|| unsafe {
243 let ret = self.buffer.inner()[self.index].inner_duplicate();
244
245 self.advance(1);
246
247 ret
248 })
249 }
250
251 #[inline]
252 fn next_ref<'a>(&mut self) -> Option<&'a T> {
253 unsafe { self.check(1).then(|| self.buffer.inner()[self.index].inner_ref()) }
254 }
255
256 #[inline]
257 fn next_ref_mut<'a>(&mut self) -> Option<&'a mut T> {
258 unsafe { self.check(1).then(|| self.buffer.inner()[self.index].inner_ref_mut()) }
259 }
260
261 #[inline]
262 fn next_ref_mut_init(&mut self) -> Option<*mut T> {
263 self.check(1).then(|| self.buffer.inner()[self.index].as_mut_ptr())
264 }
265
266 #[inline]
267 fn next_chunk<'a>(&mut self, count: usize) -> Option<(&'a [T], &'a [T])> {
268 self.check(count).then(|| {
269
270 unsafe {
271 let ptr = self.buffer.inner().as_ptr();
272
273 if self.index + count >= self.buf_len() {
274 (
275 transmute::<&[UnsafeSyncCell<T>], &[T]>(
276 slice::from_raw_parts(ptr.add(self.index), self.buf_len() - self.index)
277 ),
278 transmute::<&[UnsafeSyncCell<T>], &[T]>(
279 slice::from_raw_parts(ptr, self.index + count - self.buf_len())
280 )
281 )
282 } else {
283 (
284 transmute::<&[UnsafeSyncCell<T>], &[T]>(
285 slice::from_raw_parts(ptr.add(self.index), count)
286 ),
287 &mut [] as &[T]
288 )
289 }
290 }
291 })
292 }
293
294 #[inline]
295 fn next_chunk_mut<'a>(&mut self, count: usize) -> Option<(&'a mut [T], &'a mut [T])> {
296 self.check(count).then(|| {
297
298 unsafe {
299 let ptr = self.buffer.inner_mut().as_mut_ptr();
300
301 if self.index + count >= self.buf_len() {
302 (
303 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
304 slice::from_raw_parts_mut(ptr.add(self.index), self.buf_len() - self.index)
305 ),
306 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
307 slice::from_raw_parts_mut(ptr, self.index + count - self.buf_len())
308 )
309 )
310 } else {
311 (
312 transmute::<&mut [UnsafeSyncCell<T>], &mut [T]>(
313 slice::from_raw_parts_mut(ptr.add(self.index), count)
314 ),
315 &mut [] as &mut [T]
316 )
317 }
318 }
319 })
320 }
321 )}
322
323 pub(crate) use { public_impl, private_impl };
324}