1
2pub struct PointerVec<T>(*mut T);
5
6
7impl<T> PointerVec<T>{
8 pub const fn new() -> Self{
9 Self(0 as _)
10 }
11
12 pub fn with_capacity(capacity: usize) -> Self{
13 unsafe{
14 let ptr = std::alloc::alloc(std::alloc::Layout::array::<u8>(std::mem::size_of::<[usize;2]>() + std::mem::size_of::<T>()*capacity).unwrap()) as *mut usize;
15 *ptr = capacity;
16 *ptr.add(1) = 0;
17 Self(ptr.add(2) as *mut T)
18 }
19 }
20
21 pub fn len(&self) -> usize{
22 if self.0.is_null(){
23 return 0
24 }
25
26 unsafe{*(self.0 as *mut usize).sub(1)}
27 }
28
29 pub fn capacity(&self) -> usize{
30 if self.0.is_null(){
31 return 0
32 }
33
34 unsafe{*(self.0 as *mut usize).sub(2)}
35 }
36
37 pub fn reserve(&mut self, additional: usize) {
38 if usize::MAX - additional > self.len(){
39 panic!("new capacity cannot exceed usize::Max")
40 }
41 while (self.capacity() - self.len()) < additional{
42 self.realloc();
43 }
44 }
45
46 pub fn reserve_exact(&mut self, additional: usize){
47 if usize::MAX - additional > self.len(){
48 panic!("new capacity cannot exceed usize::Max")
49 }
50 self.reserve(additional)
51 }
52
53 pub fn try_reserve(&mut self, additional: usize) -> Result<(), ()>{
54 if usize::MAX - additional > self.len(){
55 return Err(())
56 }
57 self.reserve(additional);
58 return Ok(())
59 }
60
61 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), ()>{
62 if usize::MAX - additional > self.len(){
63 return Err(())
64 }
65 self.reserve_exact(additional);
66 return Ok(())
67 }
68
69 pub fn shrink_to_fit(&mut self){
70
71 }
72
73 pub fn shrink_to(&mut self, min_capacity: usize) {
74 if self.capacity() > min_capacity {
75 let _new_cap = self.len().max(min_capacity);
76
77 }
78 }
79
80 pub fn into_boxed_slice(mut self) -> Box<[T]> {
81 unsafe {
82 self.shrink_to_fit();
83 let ptr = std::slice::from_raw_parts_mut(self.0, self.len());
84 core::mem::forget(self);
85 Box::from_raw(ptr)
86 }
87 }
88
89 pub fn truncate(&mut self, len: usize) {
90 unsafe {
98 if len > self.len() {
102 return;
103 }
104 let remaining_len = self.len() - len;
105 let s = std::slice::from_raw_parts_mut(self.0.add(len), remaining_len);
106 *((self.0 as *mut usize).sub(1)) = len;
107 core::ptr::drop_in_place(s as *mut [T]);
108 }
109 }
110
111 pub fn as_slice(&self) -> &[T] {
112 self
113 }
114
115 pub fn as_mut_slice(&mut self) -> &mut [T]{
116 self
117 }
118
119 pub fn as_ptr(&self) -> *const T {
120 if self.0.is_null(){
123 return core::ptr::NonNull::dangling().as_ptr()
124 }
125 self.0
126 }
127
128 pub fn as_mut_ptr(&self) -> *mut T {
129 if self.0.is_null(){
132 return core::ptr::NonNull::dangling().as_ptr()
133 }
134 self.0
135 }
136
137 pub unsafe fn set_len(&mut self, new_len: usize) {
138 debug_assert!(new_len <= self.capacity());
139
140 *((self.0 as *mut usize).sub(1)) = new_len;
141 }
142
143 pub fn swap_remove(&mut self, index: usize) -> T {
144 #[cold]
145 #[inline(never)]
146 fn assert_failed(index: usize, len: usize) -> ! {
147 panic!("swap_remove index (is {index}) should be < len (is {len})");
148 }
149
150 let len = self.len();
151 if index >= len {
152 assert_failed(index, len);
153 }
154 unsafe {
155 let value = core::ptr::read(self.as_ptr().add(index));
159 let base_ptr = self.as_mut_ptr();
160 core::ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1);
161 self.set_len(len - 1);
162 value
163 }
164 }
165
166 pub fn insert(&mut self, index: usize, element: T) {
167 #[cold]
168 #[inline(never)]
169 fn assert_failed(index: usize, len: usize) -> ! {
170 panic!("insertion index (is {index}) should be <= len (is {len})");
171 }
172
173 let len = self.len();
174 if index > len {
175 assert_failed(index, len);
176 }
177
178 if len == self.capacity() {
180 self.reserve(1);
181 }
182
183 unsafe {
184 {
187 let p = self.as_mut_ptr().add(index);
188 core::ptr::copy(p, p.offset(1), len - index);
191 core::ptr::write(p, element);
194 }
195 self.set_len(len + 1);
196 }
197 }
198
199 pub fn remove(&mut self, index: usize) -> T {
200 #[cold]
201 #[inline(never)]
202 #[track_caller]
203 fn assert_failed(index: usize, len: usize) -> ! {
204 panic!("removal index (is {index}) should be < len (is {len})");
205 }
206
207 let len = self.len();
208 if index >= len {
209 assert_failed(index, len);
210 }
211 unsafe {
212 let ret;
214 {
215 let ptr = self.as_mut_ptr().add(index);
217 ret = core::ptr::read(ptr);
220
221 core::ptr::copy(ptr.offset(1), ptr, len - index - 1);
223 }
224 self.set_len(len - 1);
225 ret
226 }
227 }
228
229 pub fn retain<F>(&mut self, mut f: F)
230 where
231 F: FnMut(&T) -> bool,
232 {
233 self.retain_mut(|elem| f(elem));
234 }
235
236 pub fn retain_mut<F>(&mut self, mut f: F)
237 where
238 F: FnMut(&mut T) -> bool,
239 {
240 let original_len = self.len();
241 unsafe { self.set_len(0) };
244
245 struct BackshiftOnDrop<'a, T> {
257 v: &'a mut PointerVec<T>,
258 processed_len: usize,
259 deleted_cnt: usize,
260 original_len: usize,
261 }
262
263 impl<T> Drop for BackshiftOnDrop<'_, T> {
264 fn drop(&mut self) {
265 if self.deleted_cnt > 0 {
266 unsafe {
268 core::ptr::copy(
269 self.v.as_ptr().add(self.processed_len),
270 self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt),
271 self.original_len - self.processed_len,
272 );
273 }
274 }
275 unsafe {
277 self.v.set_len(self.original_len - self.deleted_cnt);
278 }
279 }
280 }
281
282 let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len };
283
284 fn process_loop<F, T, const DELETED: bool>(
285 original_len: usize,
286 f: &mut F,
287 g: &mut BackshiftOnDrop<'_, T>,
288 ) where
289 F: FnMut(&mut T) -> bool,
290 {
291 while g.processed_len != original_len {
292 let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
294 if !f(cur) {
295 g.processed_len += 1;
297 g.deleted_cnt += 1;
298 unsafe { core::ptr::drop_in_place(cur) };
300 if DELETED {
302 continue;
303 } else {
304 break;
305 }
306 }
307 if DELETED {
308 unsafe {
311 let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
312 core::ptr::copy_nonoverlapping(cur, hole_slot, 1);
313 }
314 }
315 g.processed_len += 1;
316 }
317 }
318
319 process_loop::<F, T, false>(original_len, &mut f, &mut g);
321
322 process_loop::<F, T, true>(original_len, &mut f, &mut g);
324
325 drop(g);
327 }
328
329 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
330 where
331 F: FnMut(&mut T) -> K,
332 K: PartialEq,
333 {
334 self.dedup_by(|a, b| key(a) == key(b))
335 }
336
337 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
338 where
339 F: FnMut(&mut T, &mut T) -> bool,
340 {
341 let len = self.len();
342 if len <= 1 {
343 return;
344 }
345
346 struct FillGapOnDrop<'a, T> {
348 read: usize,
350
351 write: usize,
354
355 vec: &'a mut PointerVec<T>,
357 }
358
359 impl<'a, T> Drop for FillGapOnDrop<'a, T> {
360 fn drop(&mut self) {
361 unsafe {
367 let ptr = self.vec.as_mut_ptr();
368 let len = self.vec.len();
369
370 let items_left = len.wrapping_sub(self.read);
373
374 let dropped_ptr = ptr.add(self.write);
376 let valid_ptr = ptr.add(self.read);
378
379 core::ptr::copy(valid_ptr, dropped_ptr, items_left);
382
383 let dropped = self.read.wrapping_sub(self.write);
386
387 self.vec.set_len(len - dropped);
388 }
389 }
390 }
391
392 let mut gap = FillGapOnDrop { read: 1, write: 1, vec: self };
393 let ptr = gap.vec.as_mut_ptr();
394
395 unsafe {
401 while gap.read < len {
402 let read_ptr = ptr.add(gap.read);
403 let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
404
405 if same_bucket(&mut *read_ptr, &mut *prev_ptr) {
406 gap.read += 1;
408 core::ptr::drop_in_place(read_ptr);
410 } else {
411 let write_ptr = ptr.add(gap.write);
412
413 core::ptr::copy(read_ptr, write_ptr, 1);
417
418 gap.write += 1;
420 gap.read += 1;
421 }
422 }
423
424 gap.vec.set_len(gap.write);
428 core::mem::forget(gap);
429 }
430 }
431
432
433 fn realloc(&mut self){
434 unsafe{
435 if self.0.is_null(){
436 let ptr = std::alloc::alloc(std::alloc::Layout::array::<u8>(std::mem::size_of::<[usize;2]>() + std::mem::size_of::<T>()*24).unwrap()) as *mut usize;
437 *ptr = 24;
438 *ptr.add(1) = 0;
439 self.0 = ptr.add(2) as *mut T;
440 } else{
441 let l = self.len();
442 let cap = self.capacity();
443 let ptr = std::alloc::alloc(std::alloc::Layout::array::<u8>(
444 std::mem::size_of::<[usize;2]>() + std::mem::size_of::<T>()*cap*2).unwrap()) as *mut usize;
445 *ptr = cap*2;
446 *ptr.add(1) = l;
447 self.0 = ptr.add(2) as *mut T;
448 }
449 }
450
451 }
452
453 pub fn push(&mut self, value:T){
454 let l = self.len();
455 if l == self.capacity(){
456 self.realloc();
457 }
458 unsafe{*(self.0.add(l)) = value};
459 }
460
461 pub fn pop(&mut self) -> Option<T> {
462 let l = self.len();
463 if l == 0 {
464 None
465 } else {
466 unsafe {
467 self.set_len(l-1);
468 Some(core::ptr::read(self.as_ptr().add(self.len())))
469 }
470 }
471 }
472
473 pub fn append(&mut self, other: &mut Self) {
474 unsafe {
475 self.append_elements(other.as_slice() as _);
476 other.set_len(0);
477 }
478 }
479
480 unsafe fn append_elements(&mut self, other: *const [T]) {
481 let count = (*other).len() ;
482 self.reserve(count);
483 let len = self.len();
484 core::ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) ;
485 self.set_len(len + count);
486 }
487
488 pub fn drain<R>(&mut self, range: R) -> std::vec::Drain<'_, T>
489 where
490 R: core::ops::RangeBounds<usize>,
491 {
492 let len = self.len();
503 let bounds = ..len;
504 let (start, end ) = {
505 let len = bounds.end;
506
507 let start: std::ops::Bound<&usize> = range.start_bound();
508 let start = match start {
509 std::ops::Bound::Included(&start) => start,
510 std::ops::Bound::Excluded(start) => {
511 start.checked_add(1).unwrap_or_else(|| panic!("attempted to index slice from after maximum usize"))
512 }
513 std::ops::Bound::Unbounded => 0,
514 };
515
516 let end: std::ops::Bound<&usize> = range.end_bound();
517 let end = match end {
518 std::ops::Bound::Included(end) => {
519 end.checked_add(1).unwrap_or_else(|| panic!("attempted to index slice up to maximum usize"))
520 },
521 std::ops::Bound::Excluded(&end) => end,
522 std::ops::Bound::Unbounded => len,
523 };
524
525 if start > end {
526 panic!("slice index start is larger than end");
527 }
528 if end > len {
529 panic!("slice start index is out of range for slice");
530 }
531
532 (start, end)
533 };
534
535 unsafe {
536 self.set_len(start);
538 let _range_slice = std::slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
541 todo!()
542 }
543 }
544
545 pub fn clear(&mut self) {
546 let elems: *mut [T] = self.as_mut_slice();
547
548 unsafe {
555 self.set_len(0);
556 core::ptr::drop_in_place(elems);
557 }
558 }
559
560 pub fn is_empty(&self) -> bool {
561 self.len() == 0
562 }
563
564 pub fn split_off(&mut self, at: usize) -> Self
565 {
566 #[cold]
567 #[inline(never)]
568 fn assert_failed(at: usize, len: usize) -> ! {
569 panic!("`at` split index (is {at}) should be <= len (is {len})");
570 }
571
572 if at > self.len() {
573 assert_failed(at, self.len());
574 }
575
576 if at == 0 {
577 return std::mem::replace(
579 self,
580 Self::with_capacity(self.capacity()),
581 );
582 }
583
584 let other_len = self.len() - at;
585 let mut other = Self::with_capacity(other_len);
586
587 unsafe {
589 self.set_len(at);
590 other.set_len(other_len);
591
592 std::ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
593 }
594 other
595 }
596
597 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
598 where
599 F: FnMut() -> T,
600 {
601 let len = self.len();
602 if new_len > len {
603 let l = new_len - len;
604 self.reserve(l);
605 for _ in 0..l{
606 self.push((f)());
607 }
608 } else {
609 self.truncate(new_len);
610 }
611 }
612
613 pub fn leak<'a>(self) -> &'a mut [T]
614 {
615 let me = std::mem::ManuallyDrop::new(self);
616 unsafe { std::slice::from_raw_parts_mut(me.as_mut_ptr(), me.len()) }
617 }
618
619 pub fn spare_capacity_mut(&mut self) -> &mut [std::mem::MaybeUninit<T>] {
620 unsafe {
624 std::slice::from_raw_parts_mut(
625 self.as_mut_ptr().add(self.len()) as *mut std::mem::MaybeUninit<T>,
626 self.capacity() - self.len(),
627 )
628 }
629 }
630
631 pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [std::mem::MaybeUninit<T>]) {
632 let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() };
635 (init, spare)
636 }
637
638 unsafe fn split_at_spare_mut_with_len(
639 &mut self,
640 ) -> (&mut [T], &mut [std::mem::MaybeUninit<T>], &mut usize) {
641 let ptr = self.as_mut_ptr();
642 let spare_ptr = ptr.add(self.len()) ;
647 let spare_ptr = spare_ptr.cast::<std::mem::MaybeUninit<T>>();
648 let spare_len = self.capacity() - self.len();
649
650 let initialized = std::slice::from_raw_parts_mut(ptr, self.len());
654 let spare = std::slice::from_raw_parts_mut(spare_ptr, spare_len);
655
656 (initialized, spare, (self.0 as *mut usize).sub(1).as_mut().unwrap())
657
658 }
659
660
661}
662
663impl<T:Clone> PointerVec<T>{
664 pub fn resize(&mut self, new_len: usize, value: T){
665 let len = self.len();
666
667 if new_len > len {
668 let l = new_len - len;
669 self.reserve(l);
670 unsafe{
671 let p = self.as_mut_ptr().add(len);
672 for i in 0..l{
673 p.add(i).write(value.clone())
674 }
675 self.set_len(new_len);
676 }
677
678 } else {
679 self.truncate(new_len);
680 }
681 }
682
683 pub fn extend_from_slice(&mut self, other: &[T]){
684 self.reserve(other.len());
685 unsafe{
686 let l = self.len();
687 let mut i = 0;
688 for v in other{
689 self.0.add(l).add(i).write(v.clone());
690 i += 1;
691 }
692 self.set_len(l+other.len())
693 }
694
695 }
696
697 pub fn extend_from_within<R>(&mut self, src: R)
698 where
699 R: std::ops::RangeBounds<usize>
700 {
701 let start = match src.start_bound(){
702 std::ops::Bound::Excluded(v) => *v,
703 std::ops::Bound::Included(v) => *v,
704 std::ops::Bound::Unbounded => 0
705 };
706 let end = match src.end_bound(){
707 std::ops::Bound::Excluded(v) => *v,
708 std::ops::Bound::Included(v) => *v +1,
709 std::ops::Bound::Unbounded => self.len()
710 };
711 let s = unsafe{std::slice::from_raw_parts(self.0, self.len())};
712 self.extend_from_slice(&s[start..end]);
713 }
714}
715
716impl<T> Drop for PointerVec<T>{
717 fn drop(&mut self) {
718 unsafe{
719 let s = self.as_mut_slice() as *mut [T];
720 std::ptr::drop_in_place(s);
721 std::alloc::dealloc(self.0 as *mut u8,
722 std::alloc::Layout::array::<u8>(std::mem::size_of::<[usize;2]>() + std::mem::size_of::<T>()*self.capacity()).unwrap());
723 }
724 }
725}
726
727impl<T> std::ops::Deref for PointerVec<T> {
728 type Target = [T];
729
730 fn deref(&self) -> &[T] {
731 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len()) }
732 }
733}
734
735impl<T> std::ops::DerefMut for PointerVec<T> {
736
737 fn deref_mut(&mut self) -> &mut Self::Target {
738 unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
739 }
740}
741
742impl<T: Clone> Clone for PointerVec<T> {
743 fn clone(&self) -> Self {
744 let mut a = Self::new();
745 a.reserve(self.capacity());
746 unsafe{
747 for i in 0..self.len(){
748 a.0.add(i).write(self[i].clone());
749 };
750 a.set_len(self.len());
751 };
752 return a
753 }
754}
755
756impl<T: std::hash::Hash> std::hash::Hash for PointerVec<T> {
757 #[inline]
758 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
759 std::hash::Hash::hash(&**self, state)
760 }
761}
762
763impl<T, I: std::slice::SliceIndex<[T]>> std::ops::Index<I> for PointerVec<T> {
764 type Output = I::Output;
765
766 #[inline]
767 fn index(&self, index: I) -> &Self::Output {
768 std::ops::Index::index(&**self, index)
769 }
770}
771
772impl<T, I: std::slice::SliceIndex<[T]>> std::ops::IndexMut<I> for PointerVec<T> {
773 #[inline]
774 fn index_mut(&mut self, index: I) -> &mut Self::Output {
775 std::ops::IndexMut::index_mut(&mut **self, index)
776 }
777}
778
779impl<A> std::iter::FromIterator<A> for PointerVec<A>{
780 fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
781 let mut a = Self::new();
782 iter.into_iter().for_each(|v|{
783 a.push(v);
784 });
785 return a;
786 }
787}
788
789
790impl<'a, T> IntoIterator for &'a PointerVec<T>{
791 type Item = &'a T;
792 type IntoIter = std::slice::Iter<'a, T>;
793 fn into_iter(self) -> Self::IntoIter {
794 self.as_slice().into_iter()
795 }
796}
797
798impl<'a, T> IntoIterator for &'a mut PointerVec<T>{
799 type Item = &'a mut T;
800 type IntoIter = std::slice::IterMut<'a, T>;
801 fn into_iter(self) -> Self::IntoIter {
802 self.as_mut_slice().into_iter()
803 }
804}
805
806impl<T> IntoIterator for PointerVec<T>{
807 type IntoIter = Iter<T>;
808 type Item = T;
809
810 fn into_iter(self) -> Self::IntoIter {
811 let i = Iter{
812 v:self.0,
813 len:self.len(),
814 count:0
815 };
816 std::mem::forget(self);
818 i
819 }
820}
821
822pub struct Iter<T>{
823 v:*mut T,
824 len:usize,
825 count:usize
826}
827
828impl<T> Iterator for Iter<T>{
829 type Item = T;
830
831 fn count(self) -> usize
832 where
833 Self: Sized, {
834 return self.len - self.count
835 }
836
837
838 fn next(&mut self) -> Option<Self::Item> {
839 if self.count >= self.len{
840 return None
841 }
842 unsafe{
843 let v = self.v.add(self.count).read();
844 self.count += 1;
845 return Some(v)
846 }
847 }
848
849 fn nth(&mut self, n: usize) -> Option<Self::Item> {
850 if n >= self.len - self.count{
851 return None
852 }
853
854 return Some(unsafe{self.v.add(self.count + n).read()})
855 }
856}
857
858impl<T> Drop for Iter<T>{
859 fn drop(&mut self) {
860 let mut v = PointerVec(self.v);
861 unsafe{v.set_len(0)};
862 drop(v);
863 }
864}