1#![no_std]
2
3extern crate alloc;
4
5use core::{
6 cmp,
7 fmt::Debug,
8 marker::PhantomData,
9 mem::{self, ManuallyDrop},
10 ops::{Deref, DerefMut, Index, IndexMut},
11 ptr,
12 slice::SliceIndex,
13};
14
15struct HeaderVecHeader<H> {
16 head: H,
17 capacity: usize,
18 len: usize,
19}
20
21pub struct HeaderVec<H, T> {
44 ptr: *mut T,
45 _phantom: PhantomData<H>,
46}
47
48impl<H, T> HeaderVec<H, T> {
49 pub fn new(head: H) -> Self {
50 Self::with_capacity(1, head)
51 }
52
53 pub fn with_capacity(capacity: usize, head: H) -> Self {
54 assert!(capacity > 0, "HeaderVec capacity cannot be 0");
55 let layout = Self::layout(capacity);
57 let ptr = unsafe { alloc::alloc::alloc(layout) } as *mut T;
58
59 if ptr.is_null() {
61 alloc::alloc::handle_alloc_error(layout);
62 }
63
64 let mut this = Self {
66 ptr,
67 _phantom: PhantomData,
68 };
69
70 let header = this.header_mut();
72 unsafe { core::ptr::write(&mut header.head, head) };
75 header.capacity = capacity;
77 header.len = 0;
78
79 this
80 }
81
82 #[inline(always)]
83 pub fn len(&self) -> usize {
84 self.header().len
85 }
86
87 #[inline(always)]
88 pub fn is_empty(&self) -> bool {
89 self.len() == 0
90 }
91
92 #[inline(always)]
93 pub fn capacity(&self) -> usize {
94 self.header().capacity
95 }
96
97 #[inline(always)]
98 pub fn as_slice(&self) -> &[T] {
99 unsafe { core::slice::from_raw_parts(self.start_ptr(), self.len()) }
100 }
101
102 #[inline(always)]
103 pub fn as_mut_slice(&mut self) -> &mut [T] {
104 unsafe { core::slice::from_raw_parts_mut(self.start_ptr_mut(), self.len()) }
105 }
106
107 #[inline(always)]
109 pub fn ptr(&self) -> *const () {
110 self.ptr as *const ()
111 }
112
113 #[inline(always)]
116 pub fn is(&self, ptr: *const ()) -> bool {
117 self.ptr as *const () == ptr
118 }
119
120 #[inline(always)]
139 pub unsafe fn weak(&self) -> HeaderVecWeak<H, T> {
140 HeaderVecWeak {
141 header_vec: ManuallyDrop::new(Self {
142 ptr: self.ptr,
143 _phantom: PhantomData,
144 }),
145 }
146 }
147
148 #[inline(always)]
155 pub unsafe fn update(&mut self, weak: HeaderVecWeak<H, T>) {
156 self.ptr = weak.ptr;
157 }
158
159 #[cold]
160 fn resize_insert(&mut self) -> Option<*const ()> {
161 let old_capacity = self.capacity();
162 let new_capacity = old_capacity * 2;
163 self.header_mut().capacity = new_capacity;
165 let ptr = unsafe {
167 alloc::alloc::realloc(
168 self.ptr as *mut u8,
169 Self::layout(old_capacity),
170 Self::elems_to_mem_bytes(new_capacity),
171 ) as *mut T
172 };
173 if ptr.is_null() {
175 alloc::alloc::handle_alloc_error(Self::layout(new_capacity));
176 }
177 let previous_pointer = if ptr != self.ptr {
179 Some(self.ptr as *const ())
181 } else {
182 None
183 };
184 self.ptr = ptr;
186
187 previous_pointer
188 }
189
190 pub fn push(&mut self, item: T) -> Option<*const ()> {
195 let old_len = self.len();
196 let new_len = old_len + 1;
197 let old_capacity = self.capacity();
198 let previous_pointer = if new_len > old_capacity {
200 self.resize_insert()
201 } else {
202 None
203 };
204 unsafe {
205 core::ptr::write(self.start_ptr_mut().add(old_len), item);
206 }
207 self.header_mut().len = new_len;
208 previous_pointer
209 }
210
211 pub fn retain(&mut self, mut f: impl FnMut(&T) -> bool) {
217 let mut head = 0;
220 let original_len = self.len();
221 let start_ptr = self.start_ptr_mut();
223 for index in 0..original_len {
225 unsafe {
226 if f(&*start_ptr.add(index)) {
228 if head != index {
230 ptr::copy_nonoverlapping(start_ptr.add(index), start_ptr.add(head), 1);
231 }
232 head += 1;
235 } else {
236 ptr::drop_in_place(start_ptr.add(index));
238 }
239 }
240 }
241 self.header_mut().len = head;
243 }
244
245 #[inline(always)]
247 fn offset() -> usize {
248 (mem::size_of::<HeaderVecHeader<H>>() + mem::size_of::<T>() - 1) / mem::size_of::<T>()
251 }
252
253 #[inline(always)]
255 fn elems_to_mem_elems(capacity: usize) -> usize {
256 Self::offset() + capacity
257 }
258
259 #[inline(always)]
261 fn elems_to_mem_bytes(capacity: usize) -> usize {
262 Self::elems_to_mem_elems(capacity) * mem::size_of::<T>()
263 }
264
265 #[inline(always)]
267 fn layout(capacity: usize) -> alloc::alloc::Layout {
268 alloc::alloc::Layout::from_size_align(
269 Self::elems_to_mem_bytes(capacity),
270 cmp::max(mem::align_of::<H>(), mem::align_of::<T>()),
271 )
272 .expect("unable to produce memory layout with Hrc key type (is it a zero sized type? they are not permitted)")
273 }
274
275 #[inline(always)]
277 fn start_ptr(&self) -> *const T {
278 unsafe { self.ptr.add(Self::offset()) }
279 }
280
281 #[inline(always)]
283 fn start_ptr_mut(&mut self) -> *mut T {
284 unsafe { self.ptr.add(Self::offset()) }
285 }
286
287 #[inline(always)]
288 fn header(&self) -> &HeaderVecHeader<H> {
289 unsafe { &*(self.ptr as *const HeaderVecHeader<H>) }
291 }
292
293 #[inline(always)]
294 fn header_mut(&mut self) -> &mut HeaderVecHeader<H> {
295 unsafe { &mut *(self.ptr as *mut HeaderVecHeader<H>) }
297 }
298}
299
300impl<H, T> Drop for HeaderVec<H, T> {
301 fn drop(&mut self) {
302 unsafe {
303 ptr::drop_in_place(&mut self.header_mut().head);
304 for ix in 0..self.len() {
305 ptr::drop_in_place(self.start_ptr_mut().add(ix));
306 }
307 alloc::alloc::dealloc(self.ptr as *mut u8, Self::layout(self.capacity()));
308 }
309 }
310}
311
312impl<H, T> Deref for HeaderVec<H, T> {
313 type Target = H;
314
315 #[inline(always)]
316 fn deref(&self) -> &Self::Target {
317 &self.header().head
318 }
319}
320
321impl<H, T> DerefMut for HeaderVec<H, T> {
322 #[inline(always)]
323 fn deref_mut(&mut self) -> &mut Self::Target {
324 &mut self.header_mut().head
325 }
326}
327
328impl<H, T, I> Index<I> for HeaderVec<H, T>
329where
330 I: SliceIndex<[T]>,
331{
332 type Output = I::Output;
333
334 #[inline(always)]
335 fn index(&self, index: I) -> &I::Output {
336 self.as_slice().index(index)
337 }
338}
339
340impl<H, T, I> IndexMut<I> for HeaderVec<H, T>
341where
342 I: SliceIndex<[T]>,
343{
344 #[inline(always)]
345 fn index_mut(&mut self, index: I) -> &mut I::Output {
346 self.as_mut_slice().index_mut(index)
347 }
348}
349
350impl<H, T> PartialEq for HeaderVec<H, T>
351where
352 H: PartialEq,
353 T: PartialEq,
354{
355 fn eq(&self, other: &Self) -> bool {
356 self.header().head == other.header().head && self.as_slice() == other.as_slice()
357 }
358}
359
360impl<H, T> Clone for HeaderVec<H, T>
361where
362 H: Clone,
363 T: Clone,
364{
365 fn clone(&self) -> Self {
366 let mut new_vec = Self::with_capacity(self.len(), self.header().head.clone());
367 for e in self.as_slice() {
368 new_vec.push(e.clone());
369 }
370 new_vec
371 }
372}
373
374impl<H, T> Debug for HeaderVec<H, T>
375where
376 H: Debug,
377 T: Debug,
378{
379 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
380 f.debug_struct("HeaderVec")
381 .field("header", &self.header().head)
382 .field("vec", &self.as_slice())
383 .finish()
384 }
385}
386
387pub struct HeaderVecWeak<H, T> {
388 header_vec: ManuallyDrop<HeaderVec<H, T>>,
389}
390
391impl<H, T> Deref for HeaderVecWeak<H, T> {
392 type Target = HeaderVec<H, T>;
393
394 fn deref(&self) -> &Self::Target {
395 &self.header_vec
396 }
397}
398
399impl<H, T> DerefMut for HeaderVecWeak<H, T> {
400 fn deref_mut(&mut self) -> &mut Self::Target {
401 &mut self.header_vec
402 }
403}
404
405impl<H, T> Debug for HeaderVecWeak<H, T> {
406 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
407 f.debug_struct("HeaderVecWeak").finish()
408 }
409}