1use core::{marker::PhantomData, mem::MaybeUninit};
2
3use crate::{
4 backvec::BackVec, Offset, Primitive, UnionVectorOffset, WriteAsOffset, WriteAsUnionVector,
5};
6
7#[derive(Debug)]
8pub struct Builder {
20 pub(crate) inner: BackVec,
21
22 #[cfg(feature = "vtable-cache")]
23 vtable_cache: crate::builder_cache::Cache<crate::builder_cache::VTable>,
24
25 #[cfg(feature = "string-cache")]
29 pub(crate) string_cache: crate::builder_cache::Cache<crate::builder_cache::ByteVec>,
30 #[cfg(feature = "bytes-cache")]
31 pub(crate) bytes_cache: crate::builder_cache::Cache<crate::builder_cache::ByteVec>,
32
33 delayed_bytes: usize,
61 alignment_mask: usize,
62
63 #[cfg(debug_assertions)]
64 missing_bytes: usize,
66}
67
68impl Default for Builder {
69 fn default() -> Self {
70 Self::with_capacity(0)
71 }
72}
73
74impl Builder {
75 pub fn new() -> Self {
77 Self::with_capacity(0)
78 }
79
80 pub fn len(&self) -> usize {
82 self.inner.len()
83 }
84
85 pub fn is_empty(&self) -> bool {
87 self.len() == 0
88 }
89
90 pub fn with_capacity(capacity: usize) -> Self {
92 Self {
93 inner: BackVec::with_capacity(capacity),
94
95 delayed_bytes: 0,
96 alignment_mask: 0,
97
98 #[cfg(feature = "vtable-cache")]
99 vtable_cache: crate::builder_cache::Cache::default(),
100
101 #[cfg(feature = "string-cache")]
102 string_cache: crate::builder_cache::Cache::default(),
103
104 #[cfg(feature = "bytes-cache")]
105 bytes_cache: crate::builder_cache::Cache::default(),
106
107 #[cfg(debug_assertions)]
108 missing_bytes: 0,
109 }
110 }
111
112 pub fn create_string(&mut self, v: impl WriteAsOffset<str>) -> Offset<str> {
114 v.prepare(self)
115 }
116
117 pub fn create_vector<T>(&mut self, v: impl WriteAsOffset<[T]>) -> Offset<[T]> {
119 v.prepare(self)
120 }
121
122 pub fn create_union_vector<T>(
124 &mut self,
125 v: impl WriteAsUnionVector<T>,
126 ) -> UnionVectorOffset<T> {
127 v.prepare(self)
128 }
129
130 pub fn clear(&mut self) {
132 self.inner.clear();
133 #[cfg(feature = "vtable-cache")]
134 self.vtable_cache.clear();
135 #[cfg(feature = "string-cache")]
136 self.string_cache.clear();
137 #[cfg(feature = "bytes-cache")]
138 self.bytes_cache.clear();
139 self.delayed_bytes = 0;
140 self.alignment_mask = 0;
141 #[cfg(debug_assertions)]
142 {
143 self.missing_bytes = 0;
144 }
145 }
146
147 pub(crate) fn prepare_write(&mut self, size: usize, alignment_mask: usize) -> usize {
148 debug_assert!((alignment_mask + 1) & alignment_mask == 0); #[cfg(debug_assertions)]
150 debug_assert_eq!(self.missing_bytes, 0);
151
152 let delayed_bytes = self.delayed_bytes.wrapping_sub(size) & self.alignment_mask;
153 let needed_padding = delayed_bytes & alignment_mask;
154 self.delayed_bytes = delayed_bytes.wrapping_sub(needed_padding);
155 self.alignment_mask |= alignment_mask;
156 self.inner.reserve(size.wrapping_add(needed_padding));
157 self.inner.extend_with_zeros(needed_padding);
160
161 debug_assert_eq!(self.delayed_bytes & alignment_mask, 0);
162
163 #[cfg(debug_assertions)]
164 {
165 self.missing_bytes = size;
166 }
167
168 self.len() + size
169 }
170
171 #[doc(hidden)]
172 pub fn current_offset<T: ?Sized>(&self) -> Offset<T> {
173 Offset {
174 offset: self.inner.len() as u32,
175 phantom: PhantomData,
176 }
177 }
178
179 pub(crate) fn write_vtable(&mut self, vtable: &[u8]) -> usize {
180 const VTABLE_ALIGNMENT: usize = 2;
181 const VTABLE_ALIGNMENT_MASK: usize = VTABLE_ALIGNMENT - 1;
182
183 #[cfg(feature = "vtable-cache")]
184 let hash = {
185 let hash = self.vtable_cache.hash(vtable);
186 if let Some(offset) = self.vtable_cache.get(self.inner.as_slice(), hash, vtable) {
187 return offset.into();
188 }
189 hash
190 };
191
192 let offset = self.prepare_write(vtable.len(), VTABLE_ALIGNMENT_MASK);
193 self.write(vtable);
194 #[cfg(feature = "vtable-cache")]
195 self.vtable_cache
196 .insert(hash, offset.try_into().unwrap(), self.inner.as_slice());
197 offset
198 }
199
200 pub(crate) fn write(&mut self, buffer: &[u8]) {
201 #[cfg(debug_assertions)]
202 {
203 self.missing_bytes = self.missing_bytes.checked_sub(buffer.len()).unwrap();
204 }
205 self.inner.extend_from_slice(buffer);
208 }
209
210 #[doc(hidden)]
211 pub unsafe fn write_with(
215 &mut self,
216 size: usize,
217 alignment_mask: usize,
218 f: impl FnOnce(u32, &mut [MaybeUninit<u8>]),
219 ) {
220 let offset = self.prepare_write(size, alignment_mask) as u32;
221 self.inner.extend_write(size, |bytes| f(offset, bytes));
222 #[cfg(debug_assertions)]
223 {
224 self.missing_bytes = self.missing_bytes.checked_sub(size).unwrap();
225 }
226 }
227
228 pub fn finish<T>(
251 &mut self,
252 root: impl WriteAsOffset<T>,
253 file_identifier: Option<[u8; 4]>,
254 ) -> &[u8] {
255 let root = root.prepare(self);
256
257 if let Some(file_identifier) = file_identifier {
258 let offset = self.prepare_write(
260 8,
261 <Offset<T> as Primitive>::ALIGNMENT_MASK.max(self.alignment_mask),
262 ) as u32;
263 self.write(&(offset - 4 - root.offset).to_le_bytes());
264 self.write(&file_identifier);
265 } else {
266 let offset = self.prepare_write(
267 4,
268 <Offset<T> as Primitive>::ALIGNMENT_MASK.max(self.alignment_mask),
269 ) as u32;
270 self.write(&(offset - root.offset).to_le_bytes());
271 }
272 debug_assert_eq!(self.delayed_bytes, 0);
273 self.as_slice()
274 }
275
276 pub fn as_slice(&self) -> &[u8] {
282 self.inner.as_slice()
283 }
284}
285
286#[cfg(test)]
287mod tests {
288 use rand::Rng;
289
290 use super::*;
291
292 #[test]
293 fn test_buffer_random() {
294 let mut slice = [0; 128];
295 let mut rng = rand::rng();
296 let mut back_offsets: alloc::vec::Vec<(usize, usize, usize)> = alloc::vec::Vec::new();
297
298 for _ in 0..50 {
299 let mut builder = Builder::new();
300 back_offsets.clear();
301
302 for byte in 1..50 {
303 let size: usize = rng.random::<u32>() as usize % slice.len();
304 let slice = &mut slice[..size];
305 for p in &mut *slice {
306 *p = byte;
307 }
308 let alignment: usize = 1 << (rng.random::<u32>() % 5);
309 let alignment_mask = alignment - 1;
310 let offset = builder.prepare_write(size, alignment_mask);
311 let len_before = builder.inner.len();
312 builder.write(slice);
313 assert_eq!(offset, builder.len());
314 assert!(builder.inner.len() < len_before + slice.len() + alignment);
315 back_offsets.push((builder.inner.len(), size, alignment));
316 }
317 let random_padding: usize = rng.random::<u32>() as usize % slice.len();
318 let slice = &mut slice[..random_padding];
319 for p in &mut *slice {
320 *p = rng.random();
321 }
322 builder.prepare_write(random_padding, 1);
323 builder.write(slice);
324 let buffer = builder.finish(builder.current_offset::<()>(), None);
325
326 for (i, (back_offset, size, alignment)) in back_offsets.iter().enumerate() {
327 let byte = (i + 1) as u8;
328 let offset = buffer.len() - back_offset;
329 assert_eq!(offset % alignment, 0);
330 assert!(buffer[offset..offset + size].iter().all(|&b| b == byte));
331 }
332 }
333 }
334
335 #[test]
336 fn test_buffer_align() {
337 let mut builder = Builder::new();
338 builder.prepare_write(3, 0);
339 builder.write(b"MNO");
340 assert_eq!(builder.delayed_bytes, 0);
341 builder.prepare_write(4, 1);
342 builder.write(b"IJKL");
343 assert_eq!(builder.delayed_bytes, 0);
344 builder.prepare_write(8, 3);
345 builder.write(b"ABCDEFGH");
346 assert_eq!(builder.delayed_bytes, 0);
347 builder.prepare_write(7, 0);
348 builder.write(b"0123456");
349 assert_eq!(
350 builder.finish(builder.current_offset::<()>(), None),
351 b"\x05\x00\x00\x00\x000123456ABCDEFGHIJKLMNO"
352 );
353
354 builder.clear();
355 builder.prepare_write(4, 3);
356 builder.write(b"IJKL");
357 assert_eq!(builder.delayed_bytes, 0);
358 builder.prepare_write(1, 0);
359 builder.write(b"X");
360 assert_eq!(builder.delayed_bytes, 3);
361 builder.prepare_write(1, 0);
362 builder.write(b"Y");
363 assert_eq!(builder.delayed_bytes, 2);
364 builder.prepare_write(8, 7);
365 builder.write(b"ABCDEFGH");
366 assert_eq!(builder.delayed_bytes, 0);
367 assert_eq!(
368 builder.finish(builder.current_offset::<()>(), None),
369 b"\x08\x00\x00\x00\x00\x00\x00\x00ABCDEFGH\x00\x00YXIJKL"
370 );
371 }
372}