unsized_stack/
raw.rs

1/*
2 * Created on Sun Jul 02 2023
3 *
4 * Copyright (c) storycraft. Licensed under the MIT Licence.
5 */
6
7use core::{
8    alloc::Layout,
9    marker::PhantomData,
10    ptr::{self, NonNull},
11};
12use std::{
13    alloc::{alloc, dealloc, realloc},
14    fmt::Debug,
15    mem,
16};
17
18use crate::fat_ptr::{self, FatPtr};
19
20#[repr(align(16))]
21struct DefaultBuffer;
22
23pub struct RawUnsizedStack<T: ?Sized> {
24    buf: NonNull<u8>,
25    buf_layout: Layout,
26    buf_occupied: usize,
27
28    table: Vec<TableItem>,
29    _phantom: PhantomData<T>,
30}
31
32impl<T: ?Sized> RawUnsizedStack<T> {
33    pub const DEFAULT_ALIGN: usize = mem::align_of::<DefaultBuffer>();
34
35    pub const fn new() -> Self {
36        fat_ptr::check_valid::<T>();
37
38        Self {
39            buf: NonNull::dangling(),
40            buf_layout: Layout::new::<DefaultBuffer>(),
41            buf_occupied: 0,
42            table: Vec::new(),
43            _phantom: PhantomData,
44        }
45    }
46
47    pub const fn bytes_occupied(&self) -> usize {
48        self.buf_occupied
49    }
50
51    pub const fn buf_layout(&self) -> Layout {
52        self.buf_layout
53    }
54
55    pub const fn buf_ptr(&self) -> NonNull<u8> {
56        self.buf.cast()
57    }
58
59    pub fn table(&self) -> &[TableItem] {
60        &self.table
61    }
62
63    pub fn reserve_for_push(&mut self, item_layout: Layout) -> Offset {
64        if item_layout.size() == 0 {
65            return Offset::Zst(item_layout.align());
66        }
67
68        let offset = {
69            let padding = ((self.buf_occupied + item_layout.align() - 1)
70                & !(item_layout.align() - 1))
71                - self.buf_occupied;
72
73            self.buf_occupied + padding
74        };
75
76        let new_buf_layout = Layout::from_size_align(
77            (offset + item_layout.size())
78                .next_power_of_two()
79                .max(self.buf_layout.size()),
80            item_layout.align().max(self.buf_layout.align()),
81        )
82        .unwrap();
83
84        if new_buf_layout.align() != self.buf_layout.align() {
85            self.buf = {
86                // Safety: allocate new memory and validate with [`NonNull`]
87                let new_buf = NonNull::new(unsafe { alloc(new_buf_layout) }).unwrap();
88
89                if self.buf_layout.size() != 0 {
90                    // Safety: copy storage data into new valid memory and deallocate old one
91                    unsafe {
92                        ptr::copy_nonoverlapping(
93                            self.buf.as_ptr(),
94                            new_buf.as_ptr(),
95                            self.buf_layout.size(),
96                        );
97                        dealloc(self.buf.as_ptr(), self.buf_layout);
98                    }
99                }
100
101                new_buf
102            };
103
104            self.buf_layout = new_buf_layout;
105        } else if new_buf_layout.size() != self.buf_layout.size() {
106            self.buf = if self.buf_layout.size() == 0 {
107                // Safety: allocate new memory and validate with [`NonNull`]
108                NonNull::new(unsafe { alloc(new_buf_layout) }).unwrap()
109            } else {
110                // Safety: reallocate existing memory and validate with [`NonNull`]
111                NonNull::new(unsafe {
112                    realloc(self.buf.as_ptr(), self.buf_layout, new_buf_layout.size())
113                })
114                .unwrap()
115            };
116
117            self.buf_layout = new_buf_layout;
118        }
119
120        Offset::Data(offset)
121    }
122
123    pub fn push<I>(&mut self, item: I, coercion: fn(&I) -> &T) {
124        let (item_layout, item_ptr) = {
125            let coercion_ref = coercion(&item);
126            (
127                Layout::for_value(coercion_ref),
128                fat_ptr::decompose(coercion_ref as *const _),
129            )
130        };
131
132        let offset = self.reserve_for_push(item_layout);
133
134        if let Offset::Data(offset) = offset {
135            self.buf_occupied = offset + item_layout.size();
136
137            // Safety: original variable copied to internal storage and forgotten. (Variable moved manually)
138            unsafe {
139                ptr::copy_nonoverlapping(
140                    item_ptr.ptr() as *const u8,
141                    self.buf.as_ptr().wrapping_add(offset),
142                    item_layout.size(),
143                );
144            }
145            mem::forget(item);
146        }
147        self.table.push(TableItem::new(offset, item_ptr.metadata()));
148    }
149
150    pub fn pop(&mut self) -> Option<()> {
151        let item = self.table.pop()?;
152        // Safety: Take out [`TableItem`] from table and drop its data
153        unsafe {
154            drop_item::<T>(self.buf.as_ptr(), item);
155        }
156
157        if let Offset::Data(offset) = item.offset {
158            self.buf_occupied = offset;
159        }
160
161        Some(())
162    }
163
164    pub fn ptr_from_table(
165        &self,
166        func: impl for<'b> FnOnce(&'b [TableItem]) -> Option<&'b TableItem>,
167    ) -> Option<*const T> {
168        Some(compose::<T>(self.buf.as_ptr(), *func(&self.table)?))
169    }
170
171    pub fn ref_from_table(
172        &self,
173        func: impl for<'b> FnOnce(&'b [TableItem]) -> Option<&'b TableItem>,
174    ) -> Option<&T> {
175        // Safety: pointer created with [`TableItem`] from table
176        Some(unsafe { &*self.ptr_from_table(func)? })
177    }
178
179    pub fn mut_from_table(
180        &mut self,
181        func: impl for<'b> FnOnce(&'b [TableItem]) -> Option<&'b TableItem>,
182    ) -> Option<&mut T> {
183        // Safety: Exclusive mutable reference, pointer created with [`TableItem`] from table
184        Some(unsafe { &mut *self.ptr_from_table(func)?.cast_mut() })
185    }
186
187    pub fn clear(&mut self) {
188        // Safety: Take out [`TableItem`] from table and drop its data
189        self.table.drain(..).for_each(|item| unsafe {
190            drop_item::<T>(self.buf.as_ptr(), item);
191        });
192        self.buf_occupied = 0;
193    }
194}
195
196// Safety: This impl is safe because values stored inside [`RawUnsizedStack`] is Send
197unsafe impl<T: ?Sized + Send> Send for RawUnsizedStack<T> {}
198
199// Safety: This impl is safe because values stored inside [`RawUnsizedStack`] is Sync
200unsafe impl<T: ?Sized + Sync> Sync for RawUnsizedStack<T> {}
201
202impl<T: ?Sized> Drop for RawUnsizedStack<T> {
203    fn drop(&mut self) {
204        for item in self.table.iter().copied() {
205            // Safety: Drop every [`TableItem`] from table
206            unsafe {
207                drop_item::<T>(self.buf.as_ptr(), item);
208            }
209        }
210
211        if self.buf_layout.size() > 0 {
212            // Safety: buf is valid if its layout has size bigger than 0
213            unsafe {
214                dealloc(self.buf.as_ptr(), self.buf_layout);
215            }
216        }
217    }
218}
219
220pub(crate) unsafe fn drop_item<T: ?Sized>(base: *const u8, item: TableItem) {
221    ptr::drop_in_place(compose::<T>(base, item).cast_mut());
222}
223
224pub(crate) const fn compose<T: ?Sized>(base: *const u8, item: TableItem) -> *const T {
225    fat_ptr::compose::<T>(item.to_fat_ptr(base))
226}
227
228#[derive(Debug, Clone, Copy)]
229pub enum Offset {
230    Data(usize),
231    Zst(usize),
232}
233
234#[derive(Debug, Clone, Copy)]
235pub struct TableItem {
236    offset: Offset,
237    metadata: *const (),
238}
239
240impl TableItem {
241    const fn new(offset: Offset, metadata: *const ()) -> Self {
242        Self { offset, metadata }
243    }
244
245    pub const fn to_fat_ptr(&self, base: *const u8) -> FatPtr {
246        FatPtr::new(
247            match self.offset {
248                Offset::Data(offset) => base.wrapping_add(offset),
249                Offset::Zst(align) => sptr::invalid(align),
250            },
251            self.metadata,
252        )
253    }
254}