1use crate::pair::Pair;
2use crate::slice::HeaderSlice;
3use crate::utils;
4use alloc::alloc::{alloc, dealloc, realloc, Layout};
5use alloc::borrow::{Borrow, BorrowMut};
6use alloc::boxed::Box;
7use core::cmp::Ordering;
8use core::fmt::{self, Debug};
9use core::hash::{self, Hash};
10use core::iter;
11use core::mem::{self, MaybeUninit};
12use core::ops::{Add, AddAssign};
13use core::ops::{Deref, DerefMut};
14use core::ptr::{self, NonNull};
15
16pub struct HeaderVec<H, T> {
17 ptr: NonNull<Pair<H, MaybeUninit<T>>>,
18 len: usize,
19 cap: usize,
20}
21
22const MIN_CAP: usize = 8;
23
24impl<H, T> HeaderVec<H, T> {
25 pub fn capacity(&self) -> usize {
27 if mem::size_of::<T>() == 0 {
28 usize::MAX
29 } else {
30 self.cap
31 }
32 }
33
34 pub fn as_ptr(&self) -> NonNull<HeaderSlice<H, T>> {
36 crate::pair::pair_as_slice_ptr(self.ptr.cast::<Pair<H, T>>(), self.len)
37 }
38
39 pub fn as_raw_parts(&mut self) -> (NonNull<Pair<H, MaybeUninit<T>>>, usize, usize) {
45 (self.ptr, self.len, self.cap)
46 }
47
48 pub fn into_raw_parts(mut self) -> (NonNull<Pair<H, MaybeUninit<T>>>, usize, usize) {
51 let parts = self.as_raw_parts();
52 mem::forget(self);
53 parts
54 }
55
56 pub unsafe fn from_raw_parts(
59 ptr: NonNull<Pair<H, MaybeUninit<T>>>,
60 len: usize,
61 cap: usize,
62 ) -> Self {
63 Self { ptr, len, cap }
64 }
65
66 fn inner_mut(&mut self) -> &mut HeaderSlice<H, MaybeUninit<T>> {
68 let ptr = crate::pair::pair_as_slice_ptr(self.ptr, self.capacity());
69 unsafe { &mut *ptr.as_ptr() }
70 }
71
72 fn get_layout(cap: usize) -> Layout {
74 HeaderSlice::<H, T>::layout_for_len(cap)
75 }
76
77 unsafe fn realloc_exact(&mut self, count: usize) {
80 if mem::size_of::<T>() == 0 {
81 return;
82 }
83 if count == self.cap {
84 return;
85 }
86 let old_layout = Self::get_layout(self.cap);
87 let new_layout = Self::get_layout(count);
88 let bytes_ptr = realloc(self.ptr.as_ptr() as *mut u8, old_layout, new_layout.size());
89 let ptr = utils::set_ptr_value_mut(self.ptr.as_ptr(), bytes_ptr);
90 self.ptr = NonNull::new(ptr).unwrap();
91 self.cap = count;
92 }
93
94 fn grow(&mut self, target_len: usize) {
96 let target_cap = (target_len * 2).max(self.cap);
97 unsafe { self.realloc_exact(target_cap) }
98 }
99
100 unsafe fn shrink(&mut self, target_len: usize) {
103 let target_cap = (target_len * 2).max(MIN_CAP).min(self.cap);
104 self.realloc_exact(target_cap);
105 }
106
107 unsafe fn realloc_for(&mut self, len: usize) {
110 if len < self.len {
111 self.shrink(len);
112 } else if len > self.capacity() {
113 self.grow(len);
114 }
115 }
116
117 pub fn push(&mut self, val: T) {
119 let new_len = self.len + 1;
120 if new_len > self.cap {
121 self.grow(new_len);
122 }
123 let index = self.len;
124 self.inner_mut().body[index] = MaybeUninit::new(val);
125 self.len = new_len;
126 }
127
128 pub fn pop(&mut self) -> Option<T> {
130 if self.len == 0 {
131 return None;
132 }
133 let new_len = self.len - 1;
134 let val = unsafe { ptr::read(self.inner_mut().body[new_len].as_ptr()) };
135 unsafe { self.shrink(new_len) };
136 self.len = new_len;
137 Some(val)
138 }
139
140 pub fn remove(&mut self, index: usize) -> Option<T> {
143 if index >= self.len {
144 return None;
145 }
146 let target_ptr = &mut self.inner_mut().body[index] as *mut MaybeUninit<T>;
147 let val = unsafe { ptr::read(target_ptr) };
148 let copy_len = self.len - index - 1;
149 let copy_src = unsafe { target_ptr.add(1) };
150 unsafe { ptr::copy(copy_src, target_ptr, copy_len) };
151 unsafe { self.shrink(self.len - 1) };
152 self.len -= 1;
153 Some(unsafe { val.assume_init() })
154 }
155
156 pub fn swap_remove(&mut self, index: usize) -> Option<T> {
159 if index >= self.len {
160 return None;
161 }
162
163 let last = self.pop().unwrap();
165
166 if index == self.len {
167 return Some(last);
168 }
169
170 Some(mem::replace(&mut self.body[index], last))
171 }
172
173 pub fn insert(&mut self, index: usize, val: T) {
177 assert!(index <= self.len);
178 if index == self.len {
179 self.push(val);
180 return;
181 }
182
183 self.grow(self.len + 1);
184 let target_ptr = unsafe { self.inner_mut().body.as_mut_ptr().add(index) };
186 let copy_len = self.len - index;
187 let copy_dest = unsafe { target_ptr.add(1) };
188 unsafe { ptr::copy(target_ptr, copy_dest, copy_len) };
189 unsafe {
190 ptr::write(target_ptr, MaybeUninit::new(val));
191 };
192 self.len += 1;
193 }
194
195 pub fn with_capacity(head: H, cap: usize) -> Self {
197 let layout = Self::get_layout(cap);
198 let bytes_ptr = unsafe { alloc(layout) };
199 let mut ptr = NonNull::new(bytes_ptr as *mut Pair<H, MaybeUninit<T>>).unwrap();
200 unsafe { ptr::write(&mut ptr.as_mut().0 as *mut H, head) }
201 Self { ptr, len: 0, cap }
202 }
203
204 pub fn new(head: H) -> Self {
206 Self::with_capacity(head, MIN_CAP)
207 }
208
209 pub fn truncate(&mut self, new_len: usize) {
212 assert!(new_len <= self.len);
213 if new_len == self.len {
214 return;
215 }
216
217 unsafe {
218 ptr::drop_in_place(&mut self.body[new_len..]);
219 }
220 unsafe { self.shrink(new_len) };
221 self.len = new_len;
222 }
223
224 pub fn resize_with(&mut self, new_len: usize, mut f: impl FnMut() -> T) {
228 if new_len < self.len {
229 self.truncate(new_len);
230 } else {
231 for _ in self.len..new_len {
232 self.push(f());
233 }
234 }
235 }
236
237 pub fn from_iter<I: IntoIterator<Item = T>>(head: H, iter: I) -> Self {
239 let iter = iter.into_iter();
240 let (lower, _) = iter.size_hint();
241 let mut this = Self::with_capacity(head, lower);
242 this.extend(iter);
243 this
244 }
245
246 pub fn shrink_to_fit(&mut self) {
248 unsafe { self.realloc_exact(self.len) }
249 }
250
251 pub fn into_box(mut self) -> Box<HeaderSlice<H, T>> {
253 self.shrink_to_fit();
254 let b = unsafe { Box::from_raw(self.as_ptr().as_ptr()) };
255 mem::forget(self);
256 b
257 }
258
259 pub fn from_box(src: Box<HeaderSlice<H, T>>) -> Self {
261 let len = src.body.len();
262 let ptr = NonNull::new(Box::into_raw(src) as *mut Pair<H, MaybeUninit<T>>).unwrap();
263 Self { ptr, len, cap: len }
264 }
265
266 pub fn reserve(&mut self, additional: usize) {
268 unsafe { self.realloc_for(self.len + additional) };
269 }
270
271 pub fn reserve_exact(&mut self, additional: usize) {
273 let new_cap = self.len + additional;
274 if new_cap <= self.cap {
275 return;
276 }
277 unsafe { self.realloc_exact(new_cap) };
278 }
279
280 unsafe fn dealloc(&mut self) {
282 dealloc(self.ptr.as_ptr() as *mut u8, Self::get_layout(self.cap));
283 }
284
285 fn into_uninit(self) -> HeaderVec<MaybeUninit<H>, MaybeUninit<T>> {
286 unsafe { mem::transmute::<Self, HeaderVec<MaybeUninit<H>, MaybeUninit<T>>>(self) }
287 }
288
289 pub fn into_values(self) -> IntoValuesIter<H, T> {
291 self.into_header_values().1
292 }
293
294 pub fn into_header_values(self) -> (H, IntoValuesIter<H, T>) {
296 let uninit = self.into_uninit();
297
298 let head = unsafe { mem::transmute_copy::<MaybeUninit<H>, H>(&uninit.head) };
299 let values = IntoValuesIter {
300 inner: uninit,
301 index: 0,
302 };
303 (head, values)
304 }
305
306 pub fn clear(&mut self) {
308 self.clear_in_place();
309 unsafe { self.realloc_exact(0) }
310 }
311
312 pub fn clear_in_place(&mut self) {
314 unsafe {
315 ptr::drop_in_place(&mut self.body);
316 }
317 self.len = 0;
318 }
319
320 pub unsafe fn dealloc_without_dropping(mut self) {
321 self.dealloc();
322 mem::forget(self);
323 }
324
325 pub unsafe fn copy_from_ptr_unsafe(head: H, src: *mut T, len: usize) -> Self {
328 let mut this = Self::with_capacity(head, len);
329 let dest = this.body.as_mut_ptr();
330 ptr::copy_nonoverlapping(src, dest, len);
331 this.len = len;
332 this
333 }
334
335 unsafe fn cast<H2, T2>(self) -> HeaderVec<H2, T2> {
336 let v = HeaderVec {
337 ptr: self.ptr.cast(),
338 len: self.len,
339 cap: self.cap,
340 };
341 mem::forget(self);
342 v
343 }
344}
345
346impl<H, T> HeaderVec<H, MaybeUninit<T>> {
347 pub fn new_uninit_values(head: H, len: usize) -> Self {
348 let mut this = Self::with_capacity(head, len);
349 this.len = len;
350 this
351 }
352
353 pub unsafe fn assume_init_values(self) -> HeaderVec<H, T> {
354 self.cast()
355 }
356}
357
358impl<H, T> HeaderVec<MaybeUninit<H>, MaybeUninit<T>> {
359 pub unsafe fn assume_init(self) -> HeaderVec<H, T> {
360 self.cast()
361 }
362}
363
364impl<H, T> HeaderVec<MaybeUninit<H>, T> {
365 pub unsafe fn assume_init_head(self) -> HeaderVec<H, T> {
366 self.cast()
367 }
368}
369
370impl<H, T: Copy> HeaderVec<H, T> {
371 pub fn copy_from_slice(head: H, src: &[T]) -> Self {
373 unsafe { Self::copy_from_ptr_unsafe(head, src.as_ptr() as *mut T, src.len()) }
374 }
375
376 pub fn extend_from_slice(&mut self, src: &[T]) {
378 let new_len = self.len + src.len();
379 if new_len > self.cap {
380 self.grow(new_len);
381 }
382 let old_len = self.len;
383 let uninit_slice = &mut self.inner_mut().body[old_len..];
384 unsafe {
385 ptr::copy(
386 src.as_ptr() as *mut MaybeUninit<T>,
387 uninit_slice.as_mut_ptr(),
388 src.len(),
389 )
390 }
391 self.len = new_len;
392 }
393}
394
395impl<H, T: Clone> HeaderVec<H, T> {
396 pub fn resize(&mut self, new_len: usize, mut val: T) {
399 if new_len < self.len {
400 self.truncate(new_len);
401 } else if new_len > self.len {
402 for _ in self.len..new_len - 1 {
403 let next_val = val.clone();
404 self.push(val);
405 val = next_val;
406 }
407 self.push(val);
408 }
409 }
410}
411
412impl<H, T: Default> HeaderVec<H, T> {
413 pub fn resize_default(&mut self, new_len: usize) {
416 self.resize_with(new_len, Default::default)
417 }
418}
419
420impl<H, T: Ord> HeaderVec<H, T> {
421 pub fn insert_sorted(&mut self, val: T) {
424 let index = self.body.binary_search(&val).unwrap_or_else(|x| x);
425 self.insert(index, val);
426 }
427
428 pub fn insert_or_replace_sorted(&mut self, val: T) -> Option<T> {
434 match self.body.binary_search(&val) {
435 Ok(i) => Some(mem::replace(&mut self.body[i], val)),
436 Err(i) => {
437 self.insert(i, val);
438 None
439 }
440 }
441 }
442}
443
444impl<H, T> Deref for HeaderVec<H, T> {
445 type Target = HeaderSlice<H, T>;
446 fn deref(&self) -> &Self::Target {
447 unsafe { &*self.as_ptr().as_ptr() }
448 }
449}
450
451impl<H, T> DerefMut for HeaderVec<H, T> {
452 fn deref_mut(&mut self) -> &mut Self::Target {
453 unsafe { &mut *self.as_ptr().as_ptr() }
454 }
455}
456
457impl<H, T> AsRef<HeaderSlice<H, T>> for HeaderVec<H, T> {
458 fn as_ref(&self) -> &HeaderSlice<H, T> {
459 self.deref()
460 }
461}
462
463impl<H, T> AsMut<HeaderSlice<H, T>> for HeaderVec<H, T> {
464 fn as_mut(&mut self) -> &mut HeaderSlice<H, T> {
465 self.deref_mut()
466 }
467}
468
469impl<H, T> Borrow<HeaderSlice<H, T>> for HeaderVec<H, T> {
470 fn borrow(&self) -> &HeaderSlice<H, T> {
471 self.deref()
472 }
473}
474
475impl<H, T> BorrowMut<HeaderSlice<H, T>> for HeaderVec<H, T> {
476 fn borrow_mut(&mut self) -> &mut HeaderSlice<H, T> {
477 self.deref_mut()
478 }
479}
480
481impl<H, T> Drop for HeaderVec<H, T> {
482 fn drop(&mut self) {
483 unsafe {
484 ptr::drop_in_place(self.deref_mut());
485 self.dealloc();
486 }
487 }
488}
489
490impl<H: Clone, T: Clone> Clone for HeaderVec<H, T> {
491 fn clone(&self) -> Self {
492 Self::from_iter(self.head.clone(), self.body.iter().cloned())
493 }
494}
495
496impl<H, T> Extend<T> for HeaderVec<H, T> {
497 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
498 for x in iter {
499 self.push(x);
500 }
501 }
502}
503
504impl<H, T, I: IntoIterator<Item = T>> AddAssign<I> for HeaderVec<H, T> {
505 fn add_assign(&mut self, rhs: I) {
506 self.extend(rhs);
507 }
508}
509
510impl<H, T, I: IntoIterator<Item = T>> Add<I> for HeaderVec<H, T> {
511 type Output = Self;
512 fn add(mut self, rhs: I) -> Self {
513 self += rhs;
514 self
515 }
516}
517
518impl<H, T, Rhs: ?Sized> PartialEq<Rhs> for HeaderVec<H, T>
519where
520 H: PartialEq,
521 T: PartialEq,
522 Rhs: Borrow<HeaderSlice<H, T>>,
523{
524 fn eq(&self, rhs: &Rhs) -> bool {
525 self.deref() == rhs.borrow()
526 }
527}
528
529impl<H: Eq, T: Eq> Eq for HeaderVec<H, T> {}
530
531impl<H, T, Rhs: ?Sized> PartialOrd<Rhs> for HeaderVec<H, T>
532where
533 H: PartialOrd,
534 T: PartialOrd,
535 Rhs: Borrow<HeaderSlice<H, T>>,
536{
537 fn partial_cmp(&self, rhs: &Rhs) -> Option<Ordering> {
538 self.deref().partial_cmp(rhs.borrow())
539 }
540}
541
542impl<H: Ord, T: Ord> Ord for HeaderVec<H, T> {
543 fn cmp(&self, rhs: &Self) -> Ordering {
544 self.deref().cmp(rhs.deref())
545 }
546}
547
548impl<H: Hash, T: Hash> Hash for HeaderVec<H, T> {
549 fn hash<S: hash::Hasher>(&self, state: &mut S) {
550 self.deref().hash(state)
551 }
552}
553
554impl<H: Debug, T: Debug> Debug for HeaderVec<H, T> {
555 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
556 let hslice: &HeaderSlice<H, T> = self.deref();
557 hslice.fmt(f)
558 }
559}
560
561impl<H: Default, T> iter::FromIterator<T> for HeaderVec<H, T> {
562 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
563 Self::from_iter(H::default(), iter)
564 }
565}
566
567impl<H, T> From<Box<HeaderSlice<H, T>>> for HeaderVec<H, T> {
568 fn from(src: Box<HeaderSlice<H, T>>) -> Self {
569 Self::from_box(src)
570 }
571}
572
573impl<H, T> From<HeaderVec<H, T>> for Box<HeaderSlice<H, T>> {
574 fn from(src: HeaderVec<H, T>) -> Self {
575 src.into_box()
576 }
577}
578
579impl<H: Default, T> Default for HeaderVec<H, T> {
580 fn default() -> Self {
581 Self::new(H::default())
582 }
583}
584
585pub struct IntoValuesIter<H, T> {
586 inner: HeaderVec<MaybeUninit<H>, MaybeUninit<T>>,
587 index: usize,
588}
589
590impl<H, T> IntoValuesIter<H, T> {
591 fn valid_slice_ptr(this: *mut Self) -> *mut [T] {
592 let body = unsafe { &mut (*this).inner.body };
593 let index = unsafe { (*this).index };
594 &mut body[index..] as *mut [MaybeUninit<T>] as *mut [T]
595 }
596
597 fn valid_slice(&self) -> &[T] {
599 unsafe { &*Self::valid_slice_ptr(self as *const Self as *mut Self) }
600 }
601 fn valid_slice_mut(&mut self) -> &mut [T] {
603 unsafe { &mut *Self::valid_slice_ptr(self as *mut Self) }
604 }
605}
606
607impl<H, T> Iterator for IntoValuesIter<H, T> {
608 type Item = T;
609 fn next(&mut self) -> Option<T> {
610 if self.index >= self.inner.len() {
611 return None;
612 }
613
614 let val: T = unsafe { mem::transmute_copy(&self.inner.body[self.index]) };
615 self.index += 1;
616 Some(val)
617 }
618
619 fn size_hint(&self) -> (usize, Option<usize>) {
620 let len = self.inner.len() - self.index;
621 (len, Some(len))
622 }
623}
624
625impl<H, T> ExactSizeIterator for IntoValuesIter<H, T> {}
626
627impl<H, T> Drop for IntoValuesIter<H, T> {
628 fn drop(&mut self) {
629 unsafe {
630 ptr::drop_in_place(self.valid_slice_mut());
631 }
632 }
633}
634
635impl<H, T: Clone> Clone for IntoValuesIter<H, T> {
636 fn clone(&self) -> Self {
637 let iter = self.valid_slice().iter().cloned().map(MaybeUninit::new);
639 let new_vec = HeaderVec::from_iter(MaybeUninit::uninit(), iter);
640 Self {
641 inner: new_vec,
642 index: 0,
643 }
644 }
645}
646
647#[macro_export]
653macro_rules! header_vec {
654 ($h:expr; $($v:expr),* $(,)?) => {{
656 let mut src = [$($v),*];
657 #[allow(unused_unsafe)]
658 let v = unsafe {
659 $crate::vec::HeaderVec::copy_from_ptr_unsafe($h, src.as_mut_ptr(), src.len())
660 };
661 core::mem::forget(src);
662 v
663 }};
664 ($h:expr; $v:expr; $len:expr) => {{
666 let mut v = $crate::vec::HeaderVec::with_capacity($h, $len);
667 v.resize($len, $v);
668 v
669 }};
670}