cl_generic_vec/iter/
raw_cursor.rs1#![allow(clippy::cast_sign_loss)]
2
3use crate::{SimpleVec, Storage};
4use core::{marker::PhantomData, ops::Range, ptr::NonNull};
5
6pub struct RawCursor<'a, S: ?Sized + Storage> {
8 vec: NonNull<SimpleVec<S>>,
9 old_vec_len: usize,
10 write_front: *mut S::Item,
11 read_front: *mut S::Item,
12 read_back: *mut S::Item,
13 write_back: *mut S::Item,
14 mark: PhantomData<&'a mut SimpleVec<S>>,
15}
16
17unsafe impl<S: ?Sized + Storage + Send> Send for RawCursor<'_, S> where S::Item: Send {}
18unsafe impl<S: ?Sized + Storage + Sync> Sync for RawCursor<'_, S> where S::Item: Sync {}
19
20impl<S: ?Sized + Storage> Drop for RawCursor<'_, S> {
21 fn drop(&mut self) { self.finish() }
22}
23
24impl<'a, S: ?Sized + Storage> RawCursor<'a, S> {
25 pub(crate) const IS_ZS: bool = core::mem::size_of::<S::Item>() == 0;
26 const ZS_PTR: *mut S::Item = NonNull::<S::Item>::dangling().as_ptr();
27
28 #[inline]
29 pub(crate) fn new(vec: &'a mut SimpleVec<S>, Range { start, end }: Range<usize>) -> Self {
30 unsafe {
31 let mut raw_vec = NonNull::from(vec);
32 let vec = raw_vec.as_mut();
33 let old_vec_len = vec.len();
34
35 if Self::IS_ZS {
36 vec.set_len_unchecked(start);
37
38 Self {
39 vec: raw_vec,
40 old_vec_len,
41 write_front: start as _,
42 read_front: start as _,
43 read_back: end as _,
44 write_back: end as _,
45 mark: PhantomData,
46 }
47 } else {
48 let range = vec[start..end].as_mut_ptr_range();
49
50 vec.set_len_unchecked(start);
51
52 Self {
53 vec: raw_vec,
54 old_vec_len,
55 write_front: range.start,
56 read_front: range.start,
57 read_back: range.end,
58 write_back: range.end,
59 mark: PhantomData,
60 }
61 }
62 }
63 }
64
65 pub fn finish(&mut self) {
68 unsafe {
69 if self.old_vec_len == 0 {
70 return
71 }
72
73 self.skip_n_front(self.len());
74
75 if Self::IS_ZS {
76 let front_len = self.write_front as usize;
77 let back_len = self.old_vec_len.wrapping_sub(self.write_back as usize);
78 let len = front_len + back_len;
79 self.vec.as_mut().set_len_unchecked(len);
80 } else {
81 let start = self.vec.as_mut().as_mut_ptr();
82 let range = start..start.add(self.old_vec_len);
83
84 let front_len = self.write_front.offset_from(range.start) as usize;
85 let back_len = range.end.offset_from(self.write_back) as usize;
86 let len = front_len + back_len;
87
88 if self.write_front != self.write_back {
89 self.write_front.copy_from(self.write_back, back_len);
90 }
91
92 self.vec.as_mut().set_len_unchecked(len);
93 }
94 }
95 }
96
97 pub(crate) fn at_back_of_vec(&self) -> bool {
99 unsafe {
100 let vec = self.vec.as_ref();
101 let end = vec.as_ptr().add(self.old_vec_len);
102 end == self.write_back && end == self.write_front
103 }
104 }
105
106 pub(crate) unsafe fn vec_mut(&mut self) -> &mut SimpleVec<S> { unsafe { self.vec.as_mut() } }
108
109 #[inline]
113 pub fn len(&self) -> usize {
114 if Self::IS_ZS {
115 (self.read_back as usize).wrapping_sub(self.read_front as usize)
116 } else {
117 unsafe { self.read_back.offset_from(self.read_front) as usize }
118 }
119 }
120
121 #[inline]
123 pub fn is_empty(&self) -> bool { self.read_back == self.read_front }
124
125 #[inline]
128 pub fn is_write_empty(&self) -> bool { self.write_back == self.write_front }
129
130 pub fn is_write_front_empty(&self) -> bool {
133 self.is_write_empty() || (self.write_front == self.read_front && !self.is_empty())
134 }
135
136 pub fn is_write_back_empty(&self) -> bool {
139 self.is_write_empty() || (self.write_back == self.read_back && !self.is_empty())
140 }
141
142 pub fn write_len(&self) -> usize {
145 if Self::IS_ZS {
146 (self.write_back as usize).wrapping_sub(self.write_front as usize)
147 } else if self.is_write_empty() {
148 0
149 } else {
150 unsafe { self.write_back.offset_from(self.write_front) as usize }
151 }
152 }
153
154 pub fn write_front_len(&self) -> usize {
157 if self.is_empty() {
158 self.write_len()
159 } else if Self::IS_ZS {
160 (self.read_front as usize).wrapping_sub(self.write_front as usize)
161 } else if self.is_write_empty() {
162 0
163 } else {
164 unsafe { self.read_front.offset_from(self.write_front) as usize }
165 }
166 }
167
168 pub fn write_back_len(&self) -> usize {
171 if self.is_empty() {
172 self.write_len()
173 } else if Self::IS_ZS {
174 (self.write_back as usize).wrapping_sub(self.read_back as usize)
175 } else if self.is_write_empty() {
176 0
177 } else {
178 unsafe { self.write_back.offset_from(self.read_back) as usize }
179 }
180 }
181
182 #[inline]
191 pub unsafe fn front(&self) -> &S::Item {
192 if Self::IS_ZS {
193 unsafe { &*Self::ZS_PTR }
194 } else {
195 unsafe { &*self.read_front }
196 }
197 }
198
199 #[inline]
208 pub unsafe fn front_mut(&mut self) -> &mut S::Item {
209 if Self::IS_ZS {
210 unsafe { &mut *Self::ZS_PTR }
211 } else {
212 unsafe { &mut *self.read_front }
213 }
214 }
215
216 #[inline]
225 pub unsafe fn back(&self) -> &S::Item {
226 if Self::IS_ZS {
227 unsafe { &*Self::ZS_PTR }
228 } else {
229 unsafe { &*self.read_back.sub(1) }
230 }
231 }
232
233 #[inline]
242 pub unsafe fn back_mut(&mut self) -> &mut S::Item {
243 if Self::IS_ZS {
244 unsafe { &mut *Self::ZS_PTR }
245 } else {
246 unsafe { &mut *self.read_back.sub(1) }
247 }
248 }
249
250 #[inline]
261 pub unsafe fn take_front(&mut self) -> S::Item {
262 debug_assert!(!self.is_empty(), "Cannot take from a empty `RawCursor`");
263
264 unsafe {
265 if Self::IS_ZS {
266 self.read_front = (self.read_front as usize).wrapping_add(1) as _;
267 Self::ZS_PTR.read()
268 } else {
269 let read_front = self.read_front;
270 self.read_front = self.read_front.add(1);
271 read_front.read()
272 }
273 }
274 }
275
276 #[inline]
287 pub unsafe fn take_back(&mut self) -> S::Item {
288 debug_assert!(!self.is_empty(), "Cannot take from a empty `RawCursor`");
289
290 unsafe {
291 if Self::IS_ZS {
292 self.read_back = (self.read_back as usize).wrapping_sub(1) as _;
293 Self::ZS_PTR.read()
294 } else {
295 self.read_back = self.read_back.sub(1);
296 self.read_back.read()
297 }
298 }
299 }
300
301 #[inline]
312 pub unsafe fn drop_front(&mut self) {
313 debug_assert!(!self.is_empty(), "Cannot drop an element from a empty `RawCursor`");
314
315 unsafe {
316 if Self::IS_ZS {
317 self.read_front = (self.read_front as usize).wrapping_add(1) as _;
318 Self::ZS_PTR.drop_in_place();
319 } else {
320 let read_front = self.read_front;
321 self.read_front = self.read_front.add(1);
322 read_front.drop_in_place();
323 }
324 }
325 }
326
327 #[inline]
338 pub unsafe fn drop_back(&mut self) {
339 debug_assert!(!self.is_empty(), "Cannot drop an element from a empty `RawCursor`");
340
341 unsafe {
342 if Self::IS_ZS {
343 self.read_back = (self.read_back as usize).wrapping_sub(1) as _;
344 Self::ZS_PTR.drop_in_place();
345 } else {
346 self.read_back = self.read_back.sub(1);
347 self.read_back.drop_in_place();
348 }
349 }
350 }
351
352 #[inline]
363 pub unsafe fn drop_n_front(&mut self, n: usize) {
364 debug_assert!(
365 self.len() >= n,
366 "Cannot drop {} elements from a `RawCursor` of length {}",
367 n,
368 self.len()
369 );
370
371 unsafe {
372 let ptr = if Self::IS_ZS {
373 self.read_front = (self.read_front as usize).wrapping_add(n) as _;
374 Self::ZS_PTR
375 } else {
376 let read_front = self.read_front;
377 self.read_front = self.read_front.add(n);
378 read_front
379 };
380
381 core::ptr::slice_from_raw_parts_mut(ptr, n).drop_in_place();
382 }
383 }
384
385 #[inline]
396 pub unsafe fn drop_n_back(&mut self, n: usize) {
397 debug_assert!(
398 self.len() >= n,
399 "Cannot drop {} elements from a `RawCursor` of length {}",
400 n,
401 self.len()
402 );
403
404 unsafe {
405 let ptr = if Self::IS_ZS {
406 self.read_back = (self.read_back as usize).wrapping_sub(n) as _;
407 Self::ZS_PTR
408 } else {
409 self.read_back = self.read_back.sub(n);
410 self.read_back
411 };
412
413 core::ptr::slice_from_raw_parts_mut(ptr, n).drop_in_place();
414 }
415 }
416
417 #[inline]
426 pub unsafe fn write_front(&mut self, value: S::Item) {
427 debug_assert!(
428 !self.is_write_front_empty(),
429 "Cannot write to a empty `RawCursor` or if there are not unfilled slots at the front of the `RawCursor`"
430 );
431
432 unsafe {
433 if Self::IS_ZS {
434 core::mem::forget(value);
435 self.write_front = (self.write_front as usize).wrapping_add(1) as _;
436 } else {
437 self.write_front.write(value);
438 self.write_front = self.write_front.add(1);
439 }
440 }
441 }
442
443 #[inline]
452 pub unsafe fn write_back(&mut self, value: S::Item) {
453 debug_assert!(
454 !self.is_write_back_empty(),
455 "Cannot write to a empty `RawCursor` or if there are not unfilled slots at the back of the `RawCursor`"
456 );
457
458 unsafe {
459 if Self::IS_ZS {
460 core::mem::forget(value);
461 self.write_back = (self.write_back as usize).wrapping_sub(1) as _;
462 } else {
463 self.write_back = self.write_back.sub(1);
464 self.write_back.write(value);
465 }
466 }
467 }
468
469 pub unsafe fn write_slice_front(&mut self, slice: &[S::Item]) {
481 unsafe {
482 let write_front_len = self.write_front_len();
483 debug_assert!(
484 write_front_len >= slice.len(),
485 "Cannot write {} elements, only {} slots remaining",
486 slice.len(),
487 write_front_len
488 );
489
490 if Self::IS_ZS {
491 self.write_front = (self.write_front as usize).wrapping_add(slice.len()) as _;
492 } else {
493 self.write_front.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
494 self.write_front = self.write_front.add(slice.len());
495 }
496 }
497 }
498
499 pub unsafe fn write_slice_back(&mut self, slice: &[S::Item]) {
511 let write_back_len = self.write_back_len();
512 debug_assert!(
513 write_back_len >= slice.len(),
514 "Cannot write {} elements, only {} slots remaining",
515 slice.len(),
516 write_back_len
517 );
518
519 unsafe {
520 if Self::IS_ZS {
521 self.write_back = (self.write_back as usize).wrapping_sub(slice.len()) as _;
522 } else {
523 self.write_back = self.write_back.sub(slice.len());
524 self.write_back.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
525 }
526 }
527 }
528
529 #[inline]
540 pub unsafe fn skip_front(&mut self) {
541 debug_assert!(!self.is_empty(), "Cannot skip elements from a empty `RawCursor`");
542
543 unsafe {
544 if Self::IS_ZS {
545 self.skip_n_front(1);
546 } else {
547 if self.write_front as *const S::Item != self.read_front {
548 self.write_front.copy_from_nonoverlapping(self.read_front, 1);
549 }
550 self.read_front = self.read_front.add(1);
551 self.write_front = self.write_front.add(1);
552 }
553 }
554 }
555
556 #[inline]
567 pub unsafe fn skip_back(&mut self) {
568 debug_assert!(!self.is_empty(), "Cannot skip elements from a empty `RawCursor`");
569
570 unsafe {
571 if Self::IS_ZS {
572 self.skip_n_back(1);
573 } else {
574 self.read_back = self.read_back.sub(1);
575 self.write_back = self.write_back.sub(1);
576 if self.write_back as *const S::Item != self.read_back {
577 self.write_back.copy_from_nonoverlapping(self.read_back, 1);
578 }
579 }
580 }
581 }
582
583 #[inline]
594 pub unsafe fn skip_n_front(&mut self, n: usize) {
595 debug_assert!(
596 self.len() >= n,
597 "Cannot skip {} elements from a `RawCursor` of length {}",
598 n,
599 self.len()
600 );
601
602 unsafe {
603 if Self::IS_ZS {
604 self.read_front = (self.read_front as usize).wrapping_add(n) as _;
605 self.write_front = (self.write_front as usize).wrapping_add(n) as _;
606 } else {
607 if self.write_front as *const S::Item != self.read_front {
608 self.write_front.copy_from(self.read_front, n);
609 }
610 self.read_front = self.read_front.add(n);
611 self.write_front = self.write_front.add(n);
612 }
613 }
614 }
615
616 #[inline]
627 pub unsafe fn skip_n_back(&mut self, n: usize) {
628 debug_assert!(
629 self.len() >= n,
630 "Cannot skip {} elements from a `RawCursor` of length {}",
631 n,
632 self.len()
633 );
634
635 unsafe {
636 if Self::IS_ZS {
637 self.read_back = (self.read_back as usize).wrapping_sub(n) as _;
638 self.write_back = (self.write_back as usize).wrapping_sub(n) as _;
639 } else {
640 self.read_back = self.read_back.sub(n);
641 self.write_back = self.write_back.sub(n);
642 if self.write_back as *const S::Item != self.read_back {
643 self.write_back.copy_from(self.read_back, n);
644 }
645 }
646 }
647 }
648
649 pub fn reserve(&mut self, space: usize) {
657 assert!(self.is_empty(), "You can only call `reserve` on a empty `RawCursor`");
658 unsafe {
659 if Self::IS_ZS {
660 let write_space = (self.write_back as usize).wrapping_sub(self.write_front as usize);
661
662 if let Some(increase_by) = space.checked_sub(write_space) {
663 self.write_back = (self.write_back as usize).wrapping_add(increase_by) as _;
664 self.old_vec_len += increase_by;
665 }
666 } else {
667 let write_space = self.write_back.offset_from(self.write_front) as usize;
668
669 if write_space >= space {
670 return
671 }
672
673 let capacity = self.vec.as_ref().capacity();
674 let start = self.vec.as_mut().as_mut_ptr();
675 let range = start..start.add(self.old_vec_len);
676
677 let front_len = self.write_front.offset_from(range.start) as usize;
678 let back_len = range.end.offset_from(self.write_back) as usize;
679 let len = front_len + back_len;
680
681 if len + space > capacity {
682 let wf = self.write_front.offset_from(range.start) as usize;
683 let wb = self.write_back.offset_from(range.start) as usize;
684 let rf = self.read_front.offset_from(range.start) as usize;
685 let rb = self.read_back.offset_from(range.start) as usize;
686
687 let vec = self.vec.as_mut();
688 vec.storage.reserve(len + space);
689
690 let start = vec.as_mut_ptr();
691 self.write_front = start.add(wf);
692 self.write_back = start.add(wb);
693 self.read_front = start.add(rf);
694 self.read_back = start.add(rb);
695 }
696
697 let increase_by = space.wrapping_sub(write_space);
698 let new_write_back = self.write_back.add(increase_by);
699 new_write_back.copy_from(self.write_back, back_len);
700 self.write_back = new_write_back;
701 self.old_vec_len += increase_by;
702 }
703 }
704 }
705}