smol_buf/
buf16.rs

1use alloc::sync::Arc;
2use core::num::NonZeroU64;
3use core::{mem, ops, ptr, slice};
4
5/// A small-data optimized byte buffer.
6///
7/// See [`Str16`](crate::Str16) for all the properties.
8#[repr(transparent)]
9pub struct Buf16(Buf16Inner);
10
11pub(crate) const INLINE_CAP: usize = 15;
12
13#[repr(C)]
14#[derive(Clone, Copy, PartialEq)]
15struct Buf16Inner {
16    ptr: u64,
17    len_with_tag: NonZeroU64,
18}
19
20#[repr(C)]
21#[derive(Clone, Copy)]
22struct Buf16Inline {
23    buf: [u8; INLINE_CAP],
24    tag_and_len: u8,
25}
26
27const _: () = {
28    assert!(mem::size_of::<Buf16>() == 16);
29    assert!(mem::align_of::<Buf16>() == 8);
30    assert!(mem::size_of::<Option<Buf16>>() == 16);
31
32    assert!(mem::size_of::<Buf16Inline>() == mem::size_of::<Buf16Inner>());
33};
34
35const TAG_INLINE: u8 = 0b001 << 5;
36const TAG_ARC: u8 = 0b010 << 5;
37const TAG_STATIC: u8 = 0b100 << 5;
38const TAG_MASK: u8 = !(0b111 << 5);
39const TAG_MASK_FULL: u64 = !(0b111 << (64 - 3));
40const TAG_SHIFT: u8 = 64 - 8;
41
42impl Buf16 {
43    /// Constructs inline variant of `Buf16`.
44    ///
45    /// Panics if `input.len() > 15`.
46    #[inline]
47    pub const fn new_inline(input: &[u8]) -> Self {
48        let len = input.len();
49        assert!(len <= INLINE_CAP); // avoids checks in loop
50
51        let mut buf = [0; INLINE_CAP];
52
53        let mut i = 0;
54        while i < len {
55            buf[i] = input[i];
56            i += 1
57        }
58
59        let tag_and_len = len as u8 | TAG_INLINE;
60        unsafe { mem::transmute(Buf16Inline { buf, tag_and_len }) }
61    }
62
63    #[inline]
64    pub fn new_static(input: &'static [u8]) -> Self {
65        let len = input.len();
66        if len <= INLINE_CAP {
67            Self::new_inline(input)
68        } else {
69            let ptr = input.as_ptr() as usize as u64;
70            let len_with_tag = (len as u64 | ((TAG_STATIC as u64) << TAG_SHIFT)).to_le();
71            let len_with_tag = unsafe { NonZeroU64::new_unchecked(len_with_tag) };
72            Self(Buf16Inner { ptr, len_with_tag })
73        }
74    }
75
76    pub fn new(input: &[u8]) -> Self {
77        let len = input.len();
78        if len <= INLINE_CAP {
79            Self::new_inline(input)
80        } else {
81            let arc = Arc::from(input);
82            Self::from_arc(len, arc)
83        }
84    }
85
86    #[inline]
87    pub(crate) fn from_arc(len: usize, arc: Arc<[u8]>) -> Self {
88        let ptr = Arc::into_raw(arc) as *const u8 as usize as u64;
89        let len_with_tag = (len as u64 | ((TAG_ARC as u64) << TAG_SHIFT)).to_le();
90        let len_with_tag = unsafe { NonZeroU64::new_unchecked(len_with_tag) };
91        Self(Buf16Inner { ptr, len_with_tag })
92    }
93
94    #[inline]
95    pub(crate) fn as_arc(&self) -> Option<Arc<[u8]>> {
96        if self.tag_byte() & TAG_ARC == 0 {
97            return None;
98        }
99
100        let ptr = self.0.ptr as usize as *const u8;
101        let len = (self.0.len_with_tag.get().to_le() & TAG_MASK_FULL) as usize;
102        let arc_ptr = ptr::slice_from_raw_parts(ptr, len);
103        Some(unsafe { Arc::from_raw(arc_ptr) })
104    }
105
106    #[inline(always)]
107    fn tag_byte(&self) -> u8 {
108        unsafe { mem::transmute::<&Buf16, &Buf16Inline>(self) }.tag_and_len
109    }
110
111    #[inline(always)]
112    pub fn len(&self) -> usize {
113        let tag_byte = self.tag_byte();
114        if tag_byte & TAG_INLINE > 0 {
115            (tag_byte & TAG_MASK) as usize
116        } else {
117            (self.0.len_with_tag.get().to_le() & TAG_MASK_FULL) as usize
118        }
119    }
120
121    #[inline(always)]
122    pub fn is_heap_allocated(&self) -> bool {
123        self.tag_byte() & TAG_ARC > 0
124    }
125
126    #[inline(always)]
127    pub fn is_empty(&self) -> bool {
128        self.len() == 0
129    }
130
131    #[inline(always)]
132    pub fn as_bytes(&self) -> &[u8] {
133        let tag_byte = self.tag_byte();
134        let (ptr, len) = if tag_byte & TAG_INLINE > 0 {
135            (
136                self as *const _ as *const u8,
137                (tag_byte & TAG_MASK) as usize,
138            )
139        } else {
140            (
141                self.0.ptr as usize as *const u8,
142                (self.0.len_with_tag.get().to_le() & TAG_MASK_FULL) as usize,
143            )
144        };
145        unsafe { slice::from_raw_parts(ptr, len) }
146    }
147}
148
149impl Drop for Buf16 {
150    fn drop(&mut self) {
151        drop(self.as_arc());
152    }
153}
154
155impl Clone for Buf16 {
156    fn clone(&self) -> Self {
157        if let Some(arc) = self.as_arc() {
158            unsafe { Arc::increment_strong_count(Arc::into_raw(arc)) };
159        }
160
161        Self(self.0)
162    }
163}
164
165impl PartialEq for Buf16 {
166    fn eq(&self, other: &Self) -> bool {
167        self.0 == other.0 || self.as_bytes() == other.as_bytes()
168    }
169}
170
171impl Eq for Buf16 {}
172
173impl Default for Buf16 {
174    #[inline(always)]
175    fn default() -> Self {
176        Self::new_inline(&[])
177    }
178}
179
180impl ops::Deref for Buf16 {
181    type Target = [u8];
182
183    #[inline(always)]
184    fn deref(&self) -> &[u8] {
185        self.as_bytes()
186    }
187}
188
189// TODO: copy over more methods