1use std::alloc;
4use std::ptr::NonNull;
5
6mod fixed;
7mod growable;
8
9pub use fixed::CompactBytesSlice;
10pub use growable::CompactBytes;
11
12const INLINE_MASK: u8 = 0b1000_0000;
13
14#[repr(C, align(8))]
16#[derive(Copy, Clone)]
17struct InlineBytes<const CAPACITY: usize> {
18 buffer: [u8; CAPACITY],
19 data: u8,
20}
21
22type InlineBytes23 = InlineBytes<23>;
24static_assertions::assert_eq_size!(InlineBytes23, Vec<u8>);
25
26type InlineBytes15 = InlineBytes<15>;
28static_assertions::assert_eq_size!(InlineBytes15, Box<[u8]>);
29
30impl<const CAPACITY: usize> InlineBytes<CAPACITY> {
31 const CAPACITY: usize = CAPACITY;
33
34 #[inline]
41 pub unsafe fn new(slice: &[u8]) -> Self {
42 debug_assert!(slice.len() <= CAPACITY);
43
44 let len = slice.len();
45 let mut buffer = [0u8; CAPACITY];
46
47 unsafe {
49 buffer
50 .as_mut_ptr()
51 .copy_from_nonoverlapping(slice.as_ptr(), len)
52 };
53
54 let data = INLINE_MASK | (len as u8);
55
56 InlineBytes { buffer, data }
57 }
58
59 #[inline]
60 pub const fn empty() -> Self {
61 let buffer = [0u8; CAPACITY];
62
63 #[allow(clippy::identity_op)]
65 let data = INLINE_MASK | 0;
66
67 InlineBytes { buffer, data }
68 }
69
70 pub fn len(&self) -> usize {
71 (self.data & !INLINE_MASK) as usize
72 }
73
74 unsafe fn set_len(&mut self, new_len: usize) {
82 debug_assert!(new_len <= CAPACITY);
83 self.data = INLINE_MASK | (new_len as u8);
84 }
85}
86
87#[repr(C)]
89struct HeapBytesGrowable {
90 ptr: NonNull<u8>,
91 len: usize,
92 cap: usize,
93}
94static_assertions::assert_eq_size!(HeapBytesGrowable, Vec<u8>);
95
96impl HeapBytesGrowable {
97 pub const MIN_ALLOCATION_SIZE: usize = std::mem::size_of::<usize>() * 2;
99 pub const MAX_ALLOCATION_SIZE: usize = usize::MAX >> 1;
101
102 #[inline]
103 pub fn new(slice: &[u8]) -> Self {
104 let len = slice.len();
105 let cap = len.max(Self::MIN_ALLOCATION_SIZE);
106
107 debug_assert!(cap <= Self::MAX_ALLOCATION_SIZE, "too large of allocation");
108 let ptr = heap::alloc_ptr(cap);
109
110 unsafe { ptr.as_ptr().copy_from_nonoverlapping(slice.as_ptr(), len) };
111
112 HeapBytesGrowable { ptr, len, cap }
113 }
114
115 pub fn with_capacity(capacity: usize) -> Self {
116 assert!(
117 capacity <= Self::MAX_ALLOCATION_SIZE,
118 "too large of allocation"
119 );
120
121 let len = 0;
122 let cap = capacity.max(Self::MIN_ALLOCATION_SIZE);
123 let ptr = heap::alloc_ptr(cap);
124
125 HeapBytesGrowable {
126 ptr,
127 len,
128 cap: capacity,
129 }
130 }
131
132 pub fn with_additional(slice: &[u8], additional: usize) -> Self {
133 let new_capacity = Self::amortized_growth(slice.len(), additional);
134 let mut row = Self::with_capacity(new_capacity);
135
136 debug_assert!(row.cap > slice.len());
137
138 unsafe {
140 std::ptr::copy_nonoverlapping(slice.as_ptr(), row.ptr.as_ptr(), slice.len());
141 };
142 row.len = slice.len();
144
145 row
146 }
147
148 pub unsafe fn set_len(&mut self, len: usize) {
149 self.len = len;
150 }
151
152 pub fn realloc(&mut self, new_capacity: usize) -> Result<usize, ()> {
153 if new_capacity < self.len {
155 return Err(());
156 }
157 if new_capacity == 0 {
159 return Err(());
160 }
161
162 let new_capacity = new_capacity.max(Self::MIN_ALLOCATION_SIZE);
164
165 if new_capacity == self.cap {
167 return Ok(new_capacity);
168 }
169
170 let cur_layout = heap::layout(self.cap);
171 let new_layout = heap::layout(new_capacity);
172
173 let new_size = new_layout.size();
175 if new_size < new_capacity {
176 return Err(());
177 }
178
179 let raw_ptr = unsafe { alloc::realloc(self.ptr.as_ptr(), cur_layout, new_size) };
184 let ptr = NonNull::new(raw_ptr).ok_or(())?;
185
186 self.ptr = ptr;
187 self.cap = new_capacity;
188
189 Ok(new_capacity)
190 }
191
192 #[inline]
193 fn dealloc(&mut self) {
194 heap::dealloc_ptr(self.ptr, self.cap);
195 }
196
197 #[inline(always)]
202 pub fn amortized_growth(cur_len: usize, additional: usize) -> usize {
203 let required = cur_len.saturating_add(additional);
204 let amortized = cur_len.saturating_mul(3) / 2;
205 amortized.max(required)
206 }
207}
208
209impl Drop for HeapBytesGrowable {
210 fn drop(&mut self) {
211 self.dealloc()
212 }
213}
214
215#[repr(C)]
217struct HeapBytesFixed {
218 ptr: NonNull<u8>,
219 len: usize,
220}
221static_assertions::assert_eq_size!(HeapBytesFixed, Box<[u8]>);
222
223impl HeapBytesFixed {
224 pub const MAX_ALLOCATION_SIZE: usize = usize::MAX >> 1;
226
227 #[inline]
228 pub fn new(slice: &[u8]) -> Self {
229 let len = slice.len();
230 debug_assert!(len <= Self::MAX_ALLOCATION_SIZE, "too large of allocation");
231
232 let ptr = heap::alloc_ptr(len);
233 unsafe { ptr.as_ptr().copy_from_nonoverlapping(slice.as_ptr(), len) };
234
235 HeapBytesFixed { ptr, len }
236 }
237
238 #[inline]
239 fn dealloc(&mut self) {
240 heap::dealloc_ptr(self.ptr, self.len);
241 }
242}
243
244mod heap {
245 use std::alloc;
246 use std::ptr::NonNull;
247
248 #[inline]
250 pub(crate) fn alloc_ptr(capacity: usize) -> NonNull<u8> {
251 let layout = layout(capacity);
252 debug_assert!(layout.size() > 0);
253
254 let ptr = unsafe { alloc::alloc(layout) };
256
257 NonNull::new(ptr).expect("failed to allocate HeapRow")
258 }
259
260 #[inline]
262 pub(crate) fn dealloc_ptr(ptr: NonNull<u8>, capacity: usize) {
263 let layout = layout(capacity);
264
265 unsafe { alloc::dealloc(ptr.as_ptr(), layout) };
269 }
270
271 #[inline(always)]
276 pub(crate) fn layout(capacity: usize) -> alloc::Layout {
277 debug_assert!(capacity > 0, "tried to allocate a HeapRow with 0 capacity");
278 alloc::Layout::array::<u8>(capacity).expect("valid capacity")
279 }
280}