solar_data_structures/
thin_slice.rs1use bumpalo::Bump;
2use std::{alloc::Layout, fmt};
3
4pub type ThinSlice<T> = RawThinSlice<(), T>;
11
12pub struct RawThinSlice<H, T> {
14 skel: ThinSliceSkeleton<H, T>,
15 _opaque: OpaqueListContents,
16}
17
18#[repr(C)]
19struct ThinSliceSkeleton<H, T> {
20 header: H,
21 len: usize,
22 data: [T; 0],
25}
26
27#[cfg(not(feature = "nightly"))]
29type OpaqueListContents = ();
30#[cfg(feature = "nightly")]
31unsafe extern "C" {
32 type OpaqueListContents;
33}
34
35impl<H, T> RawThinSlice<H, T> {
36 #[inline]
38 pub fn header(&self) -> &H {
39 &self.skel.header
40 }
41
42 #[inline]
44 pub fn header_mut(&mut self) -> &mut H {
45 &mut self.skel.header
46 }
47
48 #[inline]
50 #[expect(clippy::mut_from_ref)] pub(super) fn from_arena<'a>(arena: &'a Bump, header: H, slice: &[T]) -> &'a mut Self {
52 let mem = Self::alloc_from_arena(arena, slice.len());
53 unsafe { Self::init(mem, header, slice) }
55 }
56
57 #[inline]
59 #[expect(clippy::mut_from_ref)] pub(super) fn from_arena_with(
61 arena: &Bump,
62 header: H,
63 len: usize,
64 f: impl FnMut(usize) -> T,
65 ) -> &mut Self {
66 let mem = Self::alloc_from_arena(arena, len);
67 unsafe { Self::init_with(mem, header, len, f) }
69 }
70
71 #[inline]
73 fn alloc_from_arena(arena: &Bump, len: usize) -> *mut Self {
74 let (layout, _offset) = Layout::new::<ThinSliceSkeleton<H, T>>()
75 .extend(Layout::array::<T>(len).unwrap())
76 .unwrap();
77 arena.alloc_layout(layout).as_ptr() as *mut Self
78 }
79
80 #[inline]
86 unsafe fn init<'a>(mem: *mut Self, header: H, slice: &[T]) -> &'a mut Self {
87 unsafe {
88 (&raw mut (*mem).skel.header).write(header);
89 (&raw mut (*mem).skel.len).write(slice.len());
90 (&raw mut (*mem).skel.data)
91 .cast::<T>()
92 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
93 &mut *mem
94 }
95 }
96
97 #[inline]
103 unsafe fn init_with<'a>(
104 mem: *mut Self,
105 header: H,
106 len: usize,
107 mut f: impl FnMut(usize) -> T,
108 ) -> &'a mut Self {
109 unsafe {
110 (&raw mut (*mem).skel.header).write(header);
111 (&raw mut (*mem).skel.len).write(len);
112 for i in 0..len {
113 (&raw mut (*mem).skel.data).cast::<T>().add(i).write(f(i));
114 }
115 &mut *mem
116 }
117 }
118
119 #[inline(always)]
121 pub fn len(&self) -> usize {
122 self.skel.len
123 }
124
125 #[inline(always)]
127 pub fn is_empty(&self) -> bool {
128 self.skel.len == 0
129 }
130
131 #[inline(always)]
133 pub fn as_slice(&self) -> &[T] {
134 self
135 }
136
137 #[inline(always)]
139 pub fn as_mut_slice(&mut self) -> &mut [T] {
140 self
141 }
142}
143
144unsafe impl<H: Send, T: Send> Send for RawThinSlice<H, T> {}
145unsafe impl<H: Sync, T: Sync> Sync for RawThinSlice<H, T> {}
146
147impl<H, T> std::ops::Deref for RawThinSlice<H, T> {
148 type Target = [T];
149
150 #[inline(always)]
151 fn deref(&self) -> &Self::Target {
152 let data_ptr = (&raw const self.skel.data).cast::<T>();
153 unsafe { std::slice::from_raw_parts(data_ptr, self.len()) }
158 }
159}
160
161impl<H, T> std::ops::DerefMut for RawThinSlice<H, T> {
162 #[inline(always)]
163 fn deref_mut(&mut self) -> &mut Self::Target {
164 let data_ptr = (&raw mut self.skel.data).cast::<T>();
165 unsafe { std::slice::from_raw_parts_mut(data_ptr, self.len()) }
167 }
168}
169
170impl<T> Default for &RawThinSlice<(), T> {
171 #[inline(always)]
173 fn default() -> Self {
174 assert!(align_of::<T>() <= align_of::<MaxAlign>());
175
176 unsafe { &*((&raw const EMPTY) as *const RawThinSlice<(), T>) }
179 }
180}
181
182impl<T> Default for &mut RawThinSlice<(), T> {
183 #[inline(always)]
185 fn default() -> Self {
186 assert!(align_of::<T>() <= align_of::<MaxAlign>());
187
188 unsafe { &mut *((&raw mut EMPTY) as *mut RawThinSlice<(), T>) }
191 }
192}
193
194#[repr(align(64))]
195struct MaxAlign;
196
197static mut EMPTY: ThinSliceSkeleton<(), MaxAlign> =
200 ThinSliceSkeleton { header: (), len: 0, data: [] };
201
202impl<H, T: fmt::Debug> fmt::Debug for RawThinSlice<H, T> {
203 #[inline]
204 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
205 (**self).fmt(f)
206 }
207}
208
209impl<H, T: PartialEq> PartialEq for RawThinSlice<H, T> {
210 #[inline]
211 fn eq(&self, other: &Self) -> bool {
212 self.as_slice() == other.as_slice()
213 }
214}
215
216impl<H, T: Eq> Eq for RawThinSlice<H, T> {}
217
218impl<H, T: PartialOrd> PartialOrd for RawThinSlice<H, T> {
219 #[inline]
220 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
221 self.as_slice().partial_cmp(other.as_slice())
222 }
223}
224
225impl<H, T: Ord> Ord for RawThinSlice<H, T> {
226 #[inline]
227 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
228 self.as_slice().cmp(other.as_slice())
229 }
230}
231
232impl<H, T: std::hash::Hash> std::hash::Hash for RawThinSlice<H, T> {
233 #[inline]
234 fn hash<Hasher: std::hash::Hasher>(&self, state: &mut Hasher) {
235 self.as_slice().hash(state)
236 }
237}
238
239impl<'a, H, T> IntoIterator for &'a RawThinSlice<H, T> {
240 type Item = &'a T;
241 type IntoIter = std::slice::Iter<'a, T>;
242
243 #[inline]
244 fn into_iter(self) -> Self::IntoIter {
245 self.iter()
246 }
247}
248
249impl<'a, H, T> IntoIterator for &'a mut RawThinSlice<H, T> {
250 type Item = &'a mut T;
251 type IntoIter = std::slice::IterMut<'a, T>;
252
253 #[inline]
254 fn into_iter(self) -> Self::IntoIter {
255 self.iter_mut()
256 }
257}
258
259#[cfg(test)]
260mod tests {
261 use super::*;
262
263 #[test]
264 fn empty() {
265 let thin_slice = <&ThinSlice<u64>>::default();
266 assert_eq!(thin_slice.len(), 0);
267 assert_eq!(thin_slice.as_slice().len(), 0);
268 assert!(thin_slice.is_empty());
269
270 let thin_slice = <&mut ThinSlice<u64>>::default();
271 assert_eq!(thin_slice.len(), 0);
272 assert_eq!(thin_slice.as_slice().len(), 0);
273 assert!(thin_slice.is_empty());
274 }
275}