alien_signals/
primitive.rs1#[derive(Clone, Copy, PartialEq, Eq)]
2pub struct Flags(u8);
3impl Flags {
4 pub const NONE: Self = Self(0);
5 pub const MUTABLE: Self = Self(1 << 0);
6 pub const WATCHING: Self = Self(1 << 1);
7 pub const RECURSED_CHECK: Self = Self(1 << 2);
8 pub const RECURSED: Self = Self(1 << 3);
9 pub const DIRTY: Self = Self(1 << 4);
10 pub const PENDING: Self = Self(1 << 5);
11}
12impl Flags {
13 #[inline(always)]
14 pub(crate) const fn is_zero(self) -> bool {
15 self.0 == 0
16 }
17 #[inline(always)]
18 pub(crate) const fn is_nonzero(self) -> bool {
19 self.0 != 0
20 }
21}
22impl std::ops::Not for Flags {
23 type Output = Self;
24 #[inline(always)]
25 fn not(self) -> Self::Output {
26 Self(!self.0)
27 }
28}
29impl std::ops::BitAnd for Flags {
30 type Output = Self;
31 #[inline(always)]
32 fn bitand(self, rhs: Self) -> Self::Output {
33 Self(self.0 & rhs.0)
34 }
35}
36impl std::ops::BitAndAssign for Flags {
37 #[inline(always)]
38 fn bitand_assign(&mut self, rhs: Self) {
39 *self = *self & rhs
40 }
41}
42impl std::ops::BitOr for Flags {
43 type Output = Self;
44 #[inline(always)]
45 fn bitor(self, rhs: Self) -> Self::Output {
46 Self(self.0 | rhs.0)
47 }
48}
49impl std::ops::BitOrAssign for Flags {
50 #[inline(always)]
51 fn bitor_assign(&mut self, rhs: Self) {
52 *self = *self | rhs
53 }
54}
55
56pub(crate) struct Stack<T>(Vec<T>);
57impl<T> Default for Stack<T> {
59 fn default() -> Self {
60 Self::new()
61 }
62}
63impl<T> Stack<T> {
64 pub(crate) const fn new() -> Self {
65 Self(Vec::new())
66 }
67
68 #[inline]
69 pub(crate) fn pop(&mut self) -> Option<T> {
70 self.0.pop()
71 }
72
73 #[inline]
74 pub(crate) fn push(&mut self, item: T) {
75 self.0.push(item);
76 }
77}
78
79pub(crate) struct Queue<T>(std::collections::VecDeque<T>);
80impl<T> Default for Queue<T> {
82 fn default() -> Self {
83 Self::new()
84 }
85}
86impl<T> Queue<T> {
87 pub(crate) const fn new() -> Self {
88 Self(std::collections::VecDeque::new())
89 }
90
91 pub(crate) fn pop(&mut self) -> Option<T> {
92 self.0.pop_front()
93 }
94
95 pub(crate) fn push(&mut self, item: T) {
96 self.0.push_back(item);
97 }
98
99 pub(crate) fn length(&self) -> usize {
100 self.0.len()
101 }
102
103 pub(crate) fn as_slice_mut(&mut self) -> &mut [T] {
104 self.0.make_contiguous()
105 }
106}
107
108#[derive(Default, Clone, Copy, PartialEq, Eq)]
109pub(crate) struct Version(usize);
110impl Version {
111 pub(crate) const fn new() -> Self {
112 Self(0)
113 }
114
115 pub(crate) fn increment(&mut self) {
116 self.0 += 1;
117 }
118}
119
120#[derive(Clone)]
121pub(crate) enum SmallAny {
122 Inline([u8; 16]),
123 Heap(std::rc::Rc<dyn std::any::Any>),
124}
125impl SmallAny {
126 pub(crate) fn new<T: std::any::Any + 'static>(value: T) -> Self {
127 use std::mem::{align_of, needs_drop, size_of};
128 if size_of::<T>() <= 16 && !needs_drop::<T>() && align_of::<T>() <= align_of::<[u8; 16]>() {
129 let mut data = [0u8; 16];
130 unsafe {
131 let ptr = &value as *const T as *const u8;
132 std::ptr::copy_nonoverlapping(ptr, data.as_mut_ptr(), size_of::<T>());
133 }
134 std::mem::forget(value);
135 Self::Inline(data)
136 } else {
137 Self::Heap(std::rc::Rc::new(value))
138 }
139 }
140
141 #[inline]
143 pub(crate) unsafe fn downcast_ref_unchecked<T: std::any::Any + 'static>(&self) -> &T {
144 match self {
145 Self::Inline(data) => unsafe { &*(data.as_ptr() as *const T) },
146 Self::Heap(rc_any) => unsafe { rc_any.downcast_ref::<T>().unwrap_unchecked() },
147 }
148 }
149}
150
151pub(crate) struct SyncUnsafeCell<T>(std::cell::UnsafeCell<T>);
152unsafe impl<T: Sync> Sync for SyncUnsafeCell<T> {}
153impl<T> SyncUnsafeCell<T> {
154 pub(crate) const fn new(value: T) -> Self {
155 Self(std::cell::UnsafeCell::new(value))
156 }
157
158 #[inline(always)]
159 pub(crate) fn with_borrow<R>(&'static self, f: impl FnOnce(&T) -> R) -> R {
160 let borrow = unsafe { &*self.0.get() };
161 f(borrow)
162 }
163 #[inline(always)]
164 pub(crate) fn with_borrow_mut<R>(&'static self, f: impl FnOnce(&mut T) -> R) -> R {
165 let borrow_mut = unsafe { &mut *self.0.get() };
166 f(borrow_mut)
167 }
168}
169
170pub(crate) struct ChunkedArena<T, const CHUNK_SIZE: usize> {
171 chunks: Vec<std::ptr::NonNull<[std::mem::MaybeUninit<T>; CHUNK_SIZE]>>,
172 current_chunk_index: usize,
173 next_slot_index: usize,
174}
175impl<T, const CHUNK_SIZE: usize> ChunkedArena<T, CHUNK_SIZE> {
176 #[cold]
177 fn make_first_chunk(&mut self) {
178 if self.chunks.is_empty() {
179 let chunk = Box::new([const { std::mem::MaybeUninit::uninit() }; CHUNK_SIZE]);
180 self.chunks
182 .push(unsafe { std::ptr::NonNull::new_unchecked(Box::into_raw(chunk)) });
183 }
184 }
185
186 pub(crate) const fn new_const() -> Self {
189 assert!(CHUNK_SIZE > 0, "CHUNK_SIZE must be >= 1");
190 Self {
191 chunks: Vec::new(),
192 current_chunk_index: 0,
193 next_slot_index: 0,
194 }
195 }
196
197 pub(crate) fn new() -> Self {
198 let mut this = Self::new_const();
199 this.make_first_chunk();
200 this
201 }
202
203 pub(crate) fn alloc(&mut self, value: T) -> std::ptr::NonNull<T> {
204 if self.chunks.is_empty() {
206 self.make_first_chunk();
207 }
208
209 if self.next_slot_index >= CHUNK_SIZE {
210 let chunk = Box::new([const { std::mem::MaybeUninit::uninit() }; CHUNK_SIZE]);
211 self.chunks
213 .push(unsafe { std::ptr::NonNull::new_unchecked(Box::into_raw(chunk)) });
214 self.current_chunk_index += 1;
215 self.next_slot_index = 0;
216 }
217 let alloced_ptr: &mut T = unsafe {
218 let chunk_ptr = self
219 .chunks
220 .get_unchecked_mut(self.current_chunk_index)
221 .as_ptr();
222 let chunk_head_ptr = chunk_ptr as *mut std::mem::MaybeUninit<T>;
223 let slot_ptr = chunk_head_ptr.add(self.next_slot_index);
224 std::mem::MaybeUninit::write(&mut *slot_ptr, value)
225 };
226 self.next_slot_index += 1;
227 unsafe { std::ptr::NonNull::new_unchecked(alloced_ptr) }
228 }
229}
230impl<T, const CHUNK_SIZE: usize> Default for ChunkedArena<T, CHUNK_SIZE> {
231 fn default() -> Self {
232 Self::new()
233 }
234}
235impl<T, const CHUNK_SIZE: usize> Drop for ChunkedArena<T, CHUNK_SIZE> {
236 fn drop(&mut self) {
237 for (i, chunk) in self.chunks.iter_mut().enumerate() {
239 let initialized_count = if i == self.current_chunk_index {
240 self.next_slot_index
241 } else {
242 CHUNK_SIZE
243 };
244
245 let chunk_ptr = chunk.as_ptr();
246 let chunk_head_ptr = chunk_ptr as *mut std::mem::MaybeUninit<T>;
247 for j in 0..initialized_count {
248 unsafe {
249 std::mem::MaybeUninit::assume_init_drop(&mut *chunk_head_ptr.add(j));
250 }
251 }
252 }
253
254 for chunk in self.chunks.drain(..) {
256 unsafe {
257 let _ = Box::from_raw(chunk.as_ptr());
258 }
259 }
260 }
261}