1use core::marker::PhantomData;
13use core::mem::{self, MaybeUninit};
14use core::ops::{Deref, DerefMut};
15use core::ptr;
16
17use alloc::alloc::{Layout, alloc, dealloc, handle_alloc_error};
18
19use crate::Buffer;
20use crate::meta::{self, Metadata};
21
22extern crate alloc;
23
24const META_SIZE: usize = mem::size_of::<Metadata>();
26
27const PTR_SIZE: usize = mem::size_of::<*mut u8>();
29
30const fn assert_flex_buffer<T: ?Sized, B: Buffer>() {
35 if meta::is_fat_ptr::<T>() {
36 assert!(
37 B::CAPACITY >= META_SIZE + PTR_SIZE,
38 "Flex: buffer too small for ?Sized overhead (metadata + heap pointer)"
39 );
40 } else {
41 assert!(
42 B::CAPACITY >= PTR_SIZE,
43 "Flex: buffer too small for Sized overhead (heap pointer)"
44 );
45 }
46}
47
48#[repr(C)]
80pub struct Flex<T: ?Sized, B: Buffer> {
81 inner: MaybeUninit<B>,
82 _marker: PhantomData<T>,
83}
84
85impl<T: ?Sized, B: Buffer> Flex<T, B> {
86 const PTR_OFFSET: usize = if meta::is_fat_ptr::<T>() {
88 META_SIZE
89 } else {
90 0
91 };
92
93 const VALUE_OFFSET: usize = Self::PTR_OFFSET + PTR_SIZE;
95
96 pub const fn capacity() -> usize {
101 B::CAPACITY.saturating_sub(Self::VALUE_OFFSET)
102 }
103
104 pub fn is_inline(&self) -> bool {
106 self.heap_ptr().is_null()
107 }
108
109 #[inline(always)]
111 fn heap_ptr(&self) -> *mut u8 {
112 let base = self.inner.as_ptr().cast::<u8>();
113 unsafe { base.add(Self::PTR_OFFSET).cast::<*mut u8>().read() }
115 }
116
117 #[doc(hidden)]
127 pub unsafe fn new_raw<V>(val: V, ptr: *const T) -> Self {
128 const { assert_flex_buffer::<T, B>() }
130
131 let size = mem::size_of::<V>();
132 let align = mem::align_of::<V>();
133 let metadata = meta::extract_metadata(ptr);
134
135 if size <= Self::capacity() && align <= mem::align_of::<usize>() {
136 Self::new_inline(val, metadata)
137 } else {
138 Self::new_heap(val, metadata)
139 }
140 }
141
142 fn new_inline<V>(val: V, metadata: Metadata) -> Self {
144 let mut this: Self = Flex {
145 inner: MaybeUninit::uninit(),
146 _marker: PhantomData,
147 };
148 let base = this.inner.as_mut_ptr().cast::<u8>();
149
150 unsafe {
153 if meta::is_fat_ptr::<T>() {
154 base.cast::<*const ()>().write(metadata.0);
156 }
157 base.add(Self::PTR_OFFSET)
159 .cast::<*mut u8>()
160 .write(ptr::null_mut());
161 base.add(Self::VALUE_OFFSET).cast::<V>().write(val);
163 }
164
165 this
166 }
167
168 fn new_heap<V>(val: V, metadata: Metadata) -> Self {
170 let layout = Layout::new::<V>();
171 let heap = if layout.size() == 0 {
172 core::ptr::NonNull::<V>::dangling().as_ptr().cast::<u8>()
174 } else {
175 let p = unsafe { alloc(layout) };
177 if p.is_null() {
178 handle_alloc_error(layout);
179 }
180 unsafe {
182 p.cast::<V>().write(val);
183 }
184 p
185 };
186
187 let mut this: Self = Flex {
188 inner: MaybeUninit::uninit(),
189 _marker: PhantomData,
190 };
191 let base = this.inner.as_mut_ptr().cast::<u8>();
192
193 unsafe {
195 if meta::is_fat_ptr::<T>() {
196 base.cast::<*const ()>().write(metadata.0);
197 }
198 base.add(Self::PTR_OFFSET).cast::<*mut u8>().write(heap);
199 }
200
201 this
202 }
203
204 #[inline(always)]
206 fn data_ptr(&self) -> *const () {
207 let hp = self.heap_ptr();
208 if hp.is_null() {
209 let base = self.inner.as_ptr().cast::<u8>();
211 unsafe { base.add(Self::VALUE_OFFSET) }.cast::<()>()
212 } else {
213 hp.cast::<()>().cast_const()
214 }
215 }
216
217 #[inline(always)]
219 fn data_ptr_mut(&mut self) -> *mut () {
220 let hp = self.heap_ptr();
221 if hp.is_null() {
222 let base = self.inner.as_mut_ptr().cast::<u8>();
223 unsafe { base.add(Self::VALUE_OFFSET) }.cast::<()>()
224 } else {
225 hp.cast::<()>()
226 }
227 }
228
229 #[inline(always)]
231 fn as_ptr(&self) -> *const T {
232 let data = self.data_ptr();
233 if meta::is_fat_ptr::<T>() {
234 let base = self.inner.as_ptr().cast::<u8>();
235 let metadata = Metadata(unsafe { base.cast::<*const ()>().read() });
237 unsafe { meta::make_ptr(data, metadata) }
238 } else {
239 unsafe { meta::make_ptr(data, Metadata::NULL) }
240 }
241 }
242
243 #[inline(always)]
245 fn as_mut_ptr(&mut self) -> *mut T {
246 let data = self.data_ptr_mut();
247 if meta::is_fat_ptr::<T>() {
248 let base = self.inner.as_ptr().cast::<u8>();
249 let metadata = Metadata(unsafe { base.cast::<*const ()>().read() });
250 unsafe { meta::make_ptr_mut(data, metadata) }
251 } else {
252 unsafe { meta::make_ptr_mut(data, Metadata::NULL) }
253 }
254 }
255}
256
257impl<T, B: Buffer> Flex<T, B> {
259 pub fn new(val: T) -> Self {
273 const { assert_flex_buffer::<T, B>() }
275
276 let size = mem::size_of::<T>();
277 let align = mem::align_of::<T>();
278 if size <= Self::capacity() && align <= mem::align_of::<usize>() {
279 Self::new_inline(val, Metadata::NULL)
280 } else {
281 Self::new_heap(val, Metadata::NULL)
282 }
283 }
284}
285
286impl<T: ?Sized, B: Buffer> Deref for Flex<T, B> {
287 type Target = T;
288
289 #[inline(always)]
290 fn deref(&self) -> &T {
291 unsafe { &*self.as_ptr() }
293 }
294}
295
296impl<T: ?Sized, B: Buffer> DerefMut for Flex<T, B> {
297 #[inline(always)]
298 fn deref_mut(&mut self) -> &mut T {
299 unsafe { &mut *self.as_mut_ptr() }
301 }
302}
303
304impl<T: ?Sized, B: Buffer> Drop for Flex<T, B> {
305 fn drop(&mut self) {
306 let hp = self.heap_ptr();
307 if hp.is_null() {
308 unsafe {
311 ptr::drop_in_place(self.as_mut_ptr());
312 }
313 } else {
314 let fat = self.as_mut_ptr();
316 let layout = Layout::for_value(unsafe { &*fat });
318 unsafe {
320 ptr::drop_in_place(fat);
321 }
322 if layout.size() > 0 {
323 unsafe {
326 dealloc(hp, layout);
327 }
328 }
329 }
330 }
331}
332
333#[allow(clippy::non_send_fields_in_send_ty)]
337unsafe impl<T: ?Sized + Send, B: Buffer> Send for Flex<T, B> {}
338unsafe impl<T: ?Sized + Sync, B: Buffer> Sync for Flex<T, B> {}
339
340#[cfg(test)]
341mod tests {
342 use super::*;
343 use crate::{B16, B32, B64};
344 use core::fmt::Display;
345 use std::sync::atomic::{AtomicUsize, Ordering};
346
347 trait Greet {
348 fn greet(&self) -> &str;
349 }
350
351 struct Hello;
352 impl Greet for Hello {
353 fn greet(&self) -> &str {
354 "hello"
355 }
356 }
357
358 struct World(u64);
359 impl Greet for World {
360 fn greet(&self) -> &str {
361 "world"
362 }
363 }
364
365 fn make_flex_greet<V: Greet + 'static, B: Buffer>(val: V) -> Flex<dyn Greet, B> {
366 let ptr: *const dyn Greet = &val as &dyn Greet;
367 unsafe { Flex::new_raw(val, ptr) }
368 }
369
370 #[test]
371 fn total_size_matches_buffer() {
372 assert_eq!(mem::size_of::<Flex<dyn Greet, B16>>(), 16);
373 assert_eq!(mem::size_of::<Flex<dyn Greet, B32>>(), 32);
374 assert_eq!(mem::size_of::<Flex<dyn Greet, B64>>(), 64);
375 assert_eq!(mem::size_of::<Flex<u64, B32>>(), 32);
376 }
377
378 #[test]
379 fn capacity_unsized() {
380 assert_eq!(Flex::<dyn Greet, B16>::capacity(), 0);
381 assert_eq!(Flex::<dyn Greet, B32>::capacity(), 16);
382 assert_eq!(Flex::<dyn Greet, B64>::capacity(), 48);
383 }
384
385 #[test]
386 fn capacity_sized() {
387 assert_eq!(Flex::<u64, B16>::capacity(), 8);
388 assert_eq!(Flex::<u64, B32>::capacity(), 24);
389 }
390
391 #[test]
392 fn sized_new_inline() {
393 let f: Flex<u64, B32> = Flex::new(42);
394 assert!(f.is_inline());
395 assert_eq!(*f, 42);
396 }
397
398 #[test]
399 fn sized_new_heap() {
400 let f: Flex<[u64; 4], B16> = Flex::new([1, 2, 3, 4]);
402 assert!(!f.is_inline());
403 assert_eq!(*f, [1, 2, 3, 4]);
404 }
405
406 #[test]
407 fn sized_deref_mut_inline() {
408 let mut f: Flex<u64, B16> = Flex::new(10);
409 assert!(f.is_inline());
410 *f = 20;
411 assert_eq!(*f, 20);
412 }
413
414 #[test]
415 fn sized_deref_mut_heap() {
416 let mut f: Flex<[u64; 4], B16> = Flex::new([0u64; 4]);
417 assert!(!f.is_inline());
418 f[0] = 99;
419 assert_eq!(f[0], 99);
420 }
421
422 #[test]
423 fn zst_always_inline() {
424 let f: Flex<dyn Greet, B32> = make_flex_greet(Hello);
425 assert!(f.is_inline());
426 assert_eq!(f.greet(), "hello");
427 }
428
429 #[test]
430 fn small_value_inline() {
431 let f: Flex<dyn Greet, B32> = make_flex_greet(World(42));
432 assert!(f.is_inline());
433 assert_eq!(f.greet(), "world");
434 }
435
436 #[test]
437 fn large_value_heap() {
438 struct Big([u64; 8]);
440 impl Greet for Big {
441 fn greet(&self) -> &str {
442 "big"
443 }
444 }
445
446 let f: Flex<dyn Greet, B32> = make_flex_greet(Big([0xAB; 8]));
447 assert!(!f.is_inline());
448 assert_eq!(f.greet(), "big");
449 }
450
451 #[test]
452 fn b16_unsized_zero_capacity_goes_to_heap() {
453 let f: Flex<dyn Greet, B16> = make_flex_greet(World(7));
455 assert!(!f.is_inline());
456 assert_eq!(f.greet(), "world");
457 }
458
459 #[test]
460 fn b16_unsized_zst_still_inline() {
461 let f: Flex<dyn Greet, B16> = make_flex_greet(Hello);
462 assert!(f.is_inline());
463 assert_eq!(f.greet(), "hello");
464 }
465
466 #[test]
467 fn deref_mut_inline() {
468 trait Increment {
469 fn inc(&mut self);
470 fn val(&self) -> u64;
471 }
472
473 struct Counter(u64);
474 impl Increment for Counter {
475 fn inc(&mut self) {
476 self.0 += 1;
477 }
478 fn val(&self) -> u64 {
479 self.0
480 }
481 }
482
483 fn make<V: Increment + 'static, B: Buffer>(val: V) -> Flex<dyn Increment, B> {
484 let ptr: *const dyn Increment = &val as &dyn Increment;
485 unsafe { Flex::new_raw(val, ptr) }
486 }
487
488 let mut f: Flex<dyn Increment, B32> = make(Counter(0));
489 assert!(f.is_inline());
490 f.inc();
491 f.inc();
492 assert_eq!(f.val(), 2);
493 }
494
495 #[test]
496 fn deref_mut_heap() {
497 trait Accumulate {
498 fn push(&mut self, v: u64);
499 fn sum(&self) -> u64;
500 }
501
502 struct BigAccum {
503 data: [u64; 15],
504 count: usize,
505 }
506 impl BigAccum {
507 fn new() -> Self {
508 BigAccum {
509 data: [0; 15],
510 count: 0,
511 }
512 }
513 }
514 impl Accumulate for BigAccum {
515 fn push(&mut self, v: u64) {
516 self.data[self.count] = v;
517 self.count += 1;
518 }
519 fn sum(&self) -> u64 {
520 self.data[..self.count].iter().sum()
521 }
522 }
523
524 fn make<V: Accumulate + 'static, B: Buffer>(val: V) -> Flex<dyn Accumulate, B> {
525 let ptr: *const dyn Accumulate = &val as &dyn Accumulate;
526 unsafe { Flex::new_raw(val, ptr) }
527 }
528
529 let mut f: Flex<dyn Accumulate, B32> = make(BigAccum::new());
530 assert!(!f.is_inline());
531 f.push(10);
532 f.push(20);
533 assert_eq!(f.sum(), 30);
534 }
535
536 #[test]
537 fn drop_inline() {
538 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
539
540 struct Dropper;
541 impl Drop for Dropper {
542 fn drop(&mut self) {
543 DROP_COUNT.fetch_add(1, Ordering::Relaxed);
544 }
545 }
546 impl Greet for Dropper {
547 fn greet(&self) -> &str {
548 "dropping"
549 }
550 }
551
552 DROP_COUNT.store(0, Ordering::Relaxed);
553 {
554 let f: Flex<dyn Greet, B32> = make_flex_greet(Dropper);
555 assert!(f.is_inline());
556 assert_eq!(f.greet(), "dropping");
557 }
558 assert_eq!(DROP_COUNT.load(Ordering::Relaxed), 1);
559 }
560
561 #[test]
562 fn drop_heap() {
563 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
564
565 struct BigDropper([u64; 8]);
566 impl Drop for BigDropper {
567 fn drop(&mut self) {
568 DROP_COUNT.fetch_add(1, Ordering::Relaxed);
569 }
570 }
571 impl Greet for BigDropper {
572 fn greet(&self) -> &str {
573 "big drop"
574 }
575 }
576
577 DROP_COUNT.store(0, Ordering::Relaxed);
578 {
579 let f: Flex<dyn Greet, B32> = make_flex_greet(BigDropper([0; 8]));
580 assert!(!f.is_inline());
581 assert_eq!(f.greet(), "big drop");
582 }
583 assert_eq!(DROP_COUNT.load(Ordering::Relaxed), 1);
584 }
585
586 #[test]
587 fn drop_heap_sized() {
588 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
589
590 struct BigDropper([u64; 8]);
591 impl Drop for BigDropper {
592 fn drop(&mut self) {
593 DROP_COUNT.fetch_add(1, Ordering::Relaxed);
594 }
595 }
596
597 DROP_COUNT.store(0, Ordering::Relaxed);
598 {
599 let f: Flex<BigDropper, B16> = Flex::new(BigDropper([0; 8]));
600 assert!(!f.is_inline());
601 }
602 assert_eq!(DROP_COUNT.load(Ordering::Relaxed), 1);
603 }
604
605 #[test]
606 fn display_trait_object_inline() {
607 let val: u32 = 42;
608 let ptr: *const dyn Display = &val as &dyn Display;
609 let f: Flex<dyn Display, B32> = unsafe { Flex::new_raw(val, ptr) };
610 assert!(f.is_inline());
611 assert_eq!(format!("{}", &*f), "42");
612 }
613
614 #[test]
615 fn exact_fit_is_inline() {
616 struct Exact([usize; 2]);
618 impl Greet for Exact {
619 fn greet(&self) -> &str {
620 "exact"
621 }
622 }
623
624 let f: Flex<dyn Greet, B32> = make_flex_greet(Exact([1, 2]));
625 assert!(f.is_inline());
626 assert_eq!(f.greet(), "exact");
627 }
628
629 #[test]
630 fn one_byte_over_goes_to_heap() {
631 #[repr(C)]
633 struct OneTooMany {
634 _data: [usize; 2],
635 _extra: u8,
636 }
637 impl Greet for OneTooMany {
638 fn greet(&self) -> &str {
639 "spilled"
640 }
641 }
642
643 let f: Flex<dyn Greet, B32> = make_flex_greet(OneTooMany {
644 _data: [0; 2],
645 _extra: 0,
646 });
647 assert!(!f.is_inline());
648 assert_eq!(f.greet(), "spilled");
649 }
650
651 #[test]
652 fn macro_construction_inline() {
653 let f: Flex<dyn Greet, B32> = crate::flex!(Hello);
654 assert!(f.is_inline());
655 assert_eq!(f.greet(), "hello");
656 }
657
658 #[test]
659 fn macro_construction_heap() {
660 struct Big([u64; 8]);
661 impl Greet for Big {
662 fn greet(&self) -> &str {
663 "big"
664 }
665 }
666
667 let f: Flex<dyn Greet, B32> = crate::flex!(Big([0; 8]));
668 assert!(!f.is_inline());
669 assert_eq!(f.greet(), "big");
670 }
671}