1use core::marker::PhantomData;
10use core::mem::{self, MaybeUninit};
11use core::ops::{Deref, DerefMut};
12use core::ptr;
13
14use crate::Buffer;
15use crate::meta::{self, Metadata};
16
17const META_SIZE: usize = mem::size_of::<Metadata>();
19
20const fn assert_flat_buffer<T: ?Sized, B: Buffer>() {
25 if meta::is_fat_ptr::<T>() {
26 assert!(
27 B::CAPACITY >= META_SIZE,
28 "Flat: buffer must be at least pointer-sized for ?Sized types"
29 );
30 }
31}
32
33#[repr(C)]
50pub struct Flat<T: ?Sized, B: Buffer> {
51 inner: MaybeUninit<B>,
52 _marker: PhantomData<T>,
53}
54
55impl<T: ?Sized, B: Buffer> Flat<T, B> {
57 pub const fn capacity() -> usize {
62 if meta::is_fat_ptr::<T>() {
63 B::CAPACITY.saturating_sub(META_SIZE)
64 } else {
65 B::CAPACITY
66 }
67 }
68
69 #[doc(hidden)]
84 pub unsafe fn new_raw<V>(val: V, ptr: *const T) -> Self {
85 const { assert_flat_buffer::<T, B>() }
87
88 let size = mem::size_of::<V>();
89 let align = mem::align_of::<V>();
90
91 assert!(
92 size <= Self::capacity(),
93 "nexus_smartptr::Flat: value of type `{}` ({size} bytes) exceeds \
94 capacity ({} bytes)",
95 core::any::type_name::<V>(),
96 Self::capacity(),
97 );
98 assert!(
99 align <= mem::align_of::<usize>(),
100 "nexus_smartptr::Flat: alignment of `{}` ({align}) exceeds \
101 buffer alignment ({})",
102 core::any::type_name::<V>(),
103 mem::align_of::<usize>(),
104 );
105
106 let metadata = meta::extract_metadata(ptr);
107
108 let mut this: Self = Flat {
109 inner: MaybeUninit::uninit(),
110 _marker: PhantomData,
111 };
112
113 let base = this.inner.as_mut_ptr().cast::<u8>();
114 unsafe {
117 if meta::is_fat_ptr::<T>() {
118 base.cast::<*const ()>().write(metadata.0);
120 base.add(META_SIZE).cast::<V>().write(val);
121 } else {
122 base.cast::<V>().write(val);
124 }
125 }
126
127 this
128 }
129
130 #[inline(always)]
132 fn as_ptr(&self) -> *const T {
133 let base = self.inner.as_ptr().cast::<u8>();
134 if meta::is_fat_ptr::<T>() {
135 let metadata = Metadata(unsafe { base.cast::<*const ()>().read() });
138 let data = unsafe { base.add(META_SIZE) }.cast::<()>();
139 unsafe { meta::make_ptr(data, metadata) }
141 } else {
142 unsafe { meta::make_ptr(base.cast::<()>(), Metadata::NULL) }
144 }
145 }
146
147 #[inline(always)]
149 fn as_mut_ptr(&mut self) -> *mut T {
150 let base = self.inner.as_mut_ptr().cast::<u8>();
151 if meta::is_fat_ptr::<T>() {
152 let metadata = Metadata(unsafe { base.cast::<*const ()>().read() });
153 let data = unsafe { base.add(META_SIZE) }.cast::<()>();
154 unsafe { meta::make_ptr_mut(data, metadata) }
155 } else {
156 unsafe { meta::make_ptr_mut(base.cast::<()>(), Metadata::NULL) }
157 }
158 }
159}
160
161impl<T, B: Buffer> Flat<T, B> {
163 pub fn new(val: T) -> Self {
181 assert!(
182 mem::size_of::<T>() <= B::CAPACITY,
183 "nexus_smartptr::Flat: value of type `{}` ({} bytes) exceeds \
184 buffer capacity ({} bytes)",
185 core::any::type_name::<T>(),
186 mem::size_of::<T>(),
187 B::CAPACITY,
188 );
189 assert!(
190 mem::align_of::<T>() <= mem::align_of::<usize>(),
191 "nexus_smartptr::Flat: alignment of `{}` ({}) exceeds \
192 buffer alignment ({})",
193 core::any::type_name::<T>(),
194 mem::align_of::<T>(),
195 mem::align_of::<usize>(),
196 );
197
198 let mut this: Self = Flat {
199 inner: MaybeUninit::uninit(),
200 _marker: PhantomData,
201 };
202
203 unsafe {
206 this.inner.as_mut_ptr().cast::<T>().write(val);
207 }
208
209 this
210 }
211}
212
213impl<T: ?Sized, B: Buffer> Deref for Flat<T, B> {
214 type Target = T;
215
216 #[inline(always)]
217 fn deref(&self) -> &T {
218 unsafe { &*self.as_ptr() }
221 }
222}
223
224impl<T: ?Sized, B: Buffer> DerefMut for Flat<T, B> {
225 #[inline(always)]
226 fn deref_mut(&mut self) -> &mut T {
227 unsafe { &mut *self.as_mut_ptr() }
229 }
230}
231
232impl<T: ?Sized, B: Buffer> Drop for Flat<T, B> {
233 fn drop(&mut self) {
234 unsafe {
237 ptr::drop_in_place(self.as_mut_ptr());
238 }
239 }
240}
241
242#[allow(clippy::non_send_fields_in_send_ty)]
246unsafe impl<T: ?Sized + Send, B: Buffer> Send for Flat<T, B> {}
247unsafe impl<T: ?Sized + Sync, B: Buffer> Sync for Flat<T, B> {}
248
249#[cfg(test)]
250mod tests {
251 use super::*;
252 use crate::{B16, B32, B64};
253 use core::fmt::Display;
254 use std::sync::atomic::{AtomicUsize, Ordering};
255
256 trait Greet {
257 fn greet(&self) -> &str;
258 }
259
260 struct Hello;
261 impl Greet for Hello {
262 fn greet(&self) -> &str {
263 "hello"
264 }
265 }
266
267 struct World(u64);
268 impl Greet for World {
269 fn greet(&self) -> &str {
270 "world"
271 }
272 }
273
274 fn make_flat_greet<V: Greet + 'static, B: Buffer>(val: V) -> Flat<dyn Greet, B> {
275 let ptr: *const dyn Greet = &val as &dyn Greet;
276 unsafe { Flat::new_raw(val, ptr) }
277 }
278
279 #[test]
280 fn total_size_matches_buffer() {
281 assert_eq!(mem::size_of::<Flat<dyn Greet, B16>>(), 16);
282 assert_eq!(mem::size_of::<Flat<dyn Greet, B32>>(), 32);
283 assert_eq!(mem::size_of::<Flat<dyn Greet, B64>>(), 64);
284 assert_eq!(mem::size_of::<Flat<u64, B32>>(), 32);
285 }
286
287 #[test]
288 fn sized_capacity_is_full_buffer() {
289 assert_eq!(Flat::<u64, B32>::capacity(), 32);
290 assert_eq!(Flat::<u64, B64>::capacity(), 64);
291 }
292
293 #[test]
294 fn unsized_capacity_reserves_metadata() {
295 assert_eq!(Flat::<dyn Greet, B16>::capacity(), 8);
296 assert_eq!(Flat::<dyn Greet, B32>::capacity(), 24);
297 assert_eq!(Flat::<dyn Greet, B64>::capacity(), 56);
298 }
299
300 #[test]
301 fn sized_new() {
302 let f: Flat<u64, B32> = Flat::new(42);
303 assert_eq!(*f, 42);
304 }
305
306 #[test]
307 fn sized_new_struct() {
308 #[derive(Debug, PartialEq)]
309 struct Pair(u64, u64);
310
311 let f: Flat<Pair, B32> = Flat::new(Pair(1, 2));
312 assert_eq!(*f, Pair(1, 2));
313 }
314
315 #[test]
316 fn sized_deref_mut() {
317 let mut f: Flat<u64, B16> = Flat::new(10);
318 *f = 20;
319 assert_eq!(*f, 20);
320 }
321
322 #[test]
323 fn zst_inline() {
324 let f: Flat<dyn Greet, B16> = make_flat_greet(Hello);
325 assert_eq!(f.greet(), "hello");
326 }
327
328 #[test]
329 fn non_zst_inline() {
330 let f: Flat<dyn Greet, B32> = make_flat_greet(World(42));
331 assert_eq!(f.greet(), "world");
332 }
333
334 #[test]
335 fn deref_mut_trait_object() {
336 trait Increment {
337 fn inc(&mut self);
338 fn val(&self) -> u64;
339 }
340
341 struct Counter(u64);
342 impl Increment for Counter {
343 fn inc(&mut self) {
344 self.0 += 1;
345 }
346 fn val(&self) -> u64 {
347 self.0
348 }
349 }
350
351 fn make<V: Increment + 'static, B: Buffer>(val: V) -> Flat<dyn Increment, B> {
352 let ptr: *const dyn Increment = &val as &dyn Increment;
353 unsafe { Flat::new_raw(val, ptr) }
354 }
355
356 let mut f: Flat<dyn Increment, B32> = make(Counter(0));
357 f.inc();
358 f.inc();
359 assert_eq!(f.val(), 2);
360 }
361
362 #[test]
363 fn drop_runs() {
364 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
365
366 struct Dropper;
367 impl Drop for Dropper {
368 fn drop(&mut self) {
369 DROP_COUNT.fetch_add(1, Ordering::Relaxed);
370 }
371 }
372 impl Greet for Dropper {
373 fn greet(&self) -> &str {
374 "dropping"
375 }
376 }
377
378 DROP_COUNT.store(0, Ordering::Relaxed);
379 {
380 let f: Flat<dyn Greet, B16> = make_flat_greet(Dropper);
381 assert_eq!(f.greet(), "dropping");
382 }
383 assert_eq!(DROP_COUNT.load(Ordering::Relaxed), 1);
384 }
385
386 #[test]
387 fn drop_runs_sized() {
388 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
389
390 struct Dropper(u64);
391 impl Drop for Dropper {
392 fn drop(&mut self) {
393 DROP_COUNT.fetch_add(1, Ordering::Relaxed);
394 }
395 }
396
397 DROP_COUNT.store(0, Ordering::Relaxed);
398 {
399 let _f: Flat<Dropper, B16> = Flat::new(Dropper(99));
400 }
401 assert_eq!(DROP_COUNT.load(Ordering::Relaxed), 1);
402 }
403
404 #[test]
405 #[should_panic(expected = "exceeds capacity")]
406 fn panics_on_overflow_unsized() {
407 struct Big([u64; 8]);
408 impl Greet for Big {
409 fn greet(&self) -> &str {
410 "big"
411 }
412 }
413 let _: Flat<dyn Greet, B16> = make_flat_greet(Big([0; 8]));
414 }
415
416 #[test]
417 #[should_panic(expected = "exceeds buffer capacity")]
418 fn panics_on_overflow_sized() {
419 let _: Flat<[u64; 8], B16> = Flat::new([0u64; 8]);
420 }
421
422 #[test]
423 fn display_trait_object() {
424 let val: u32 = 42;
425 let ptr: *const dyn Display = &val as &dyn Display;
426 let f: Flat<dyn Display, B32> = unsafe { Flat::new_raw(val, ptr) };
427 assert_eq!(format!("{}", &*f), "42");
428 }
429
430 #[test]
431 fn different_concrete_types_same_trait() {
432 let f1: Flat<dyn Greet, B32> = make_flat_greet(Hello);
433 let f2: Flat<dyn Greet, B32> = make_flat_greet(World(99));
434 assert_eq!(f1.greet(), "hello");
435 assert_eq!(f2.greet(), "world");
436 }
437
438 #[test]
439 fn macro_construction() {
440 let f: Flat<dyn Greet, B32> = crate::flat!(Hello);
441 assert_eq!(f.greet(), "hello");
442 }
443
444 #[test]
445 fn macro_display() {
446 let f: Flat<dyn Display, B32> = crate::flat!(42_u32);
447 assert_eq!(format!("{}", &*f), "42");
448 }
449}