1#[cfg(feature = "alloc")]
2use core::{marker::PhantomData, ptr::NonNull};
3use core::{mem, mem::MaybeUninit, num::NonZero};
4
5use crate::{Flat, emitter::Pos, list::Segment};
6
7pub unsafe trait Buf: Sized {
16 const ALIGN: usize;
18
19 fn empty() -> Self;
21
22 fn as_ptr(&self) -> *const u8;
24
25 fn as_mut_ptr(&mut self) -> *mut u8;
27
28 fn as_bytes(&self) -> &[u8];
30
31 fn len(&self) -> u32;
33
34 fn is_empty(&self) -> bool {
36 self.len() == 0
37 }
38
39 fn capacity(&self) -> u32;
41
42 fn resize(&mut self, new_len: u32, fill: u8);
44
45 fn reserve(&mut self, additional: u32);
47
48 fn extend_from_slice(&mut self, data: &[u8]);
50
51 fn align_to(&mut self, align: usize) {
53 let rem = (self.len() as usize) % align;
54 if rem != 0 {
55 let pad = (align - rem) as u32;
56 self.resize(self.len() + pad, 0);
57 }
58 }
59
60 fn alloc<U: Flat>(&mut self) -> Pos {
68 const {
69 assert!(align_of::<U>() <= Self::ALIGN, "allocated type alignment exceeds buffer alignment");
70 }
71 self.align_to(align_of::<U>());
72 let pos = Pos(self.len());
73 let size = size_of::<U>() as u32;
74 self.resize(self.len() + size, 0);
75 pos
76 }
77
78 fn expose_provenance(&self) {
81 let _ = self.as_ptr().expose_provenance();
82 }
83}
84
85#[repr(C, align(8))]
99pub struct FixedBuf<const N: usize> {
100 data: [MaybeUninit<u8>; N],
101 len: u32,
102}
103
104impl<const N: usize> FixedBuf<N> {
105 #[must_use]
109 pub const fn new() -> Self {
110 Self { data: [MaybeUninit::uninit(); N], len: 0 }
111 }
112}
113
114impl<const N: usize> Default for FixedBuf<N> {
115 fn default() -> Self {
116 Self::new()
117 }
118}
119
120unsafe impl<const N: usize> Buf for FixedBuf<N> {
123 const ALIGN: usize = 8;
124
125 fn empty() -> Self {
126 Self::new()
127 }
128
129 fn as_ptr(&self) -> *const u8 {
130 self.data.as_ptr().cast()
131 }
132
133 fn as_mut_ptr(&mut self) -> *mut u8 {
134 self.data.as_mut_ptr().cast()
135 }
136
137 fn as_bytes(&self) -> &[u8] {
138 if self.len == 0 {
139 return &[];
140 }
141 unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.len as usize) }
143 }
144
145 fn len(&self) -> u32 {
146 self.len
147 }
148
149 fn capacity(&self) -> u32 {
150 N as u32
151 }
152
153 fn resize(&mut self, new_len: u32, fill: u8) {
154 assert!(new_len as usize <= N, "FixedBuf capacity exceeded: requested {new_len}, capacity {N}");
155 if new_len > self.len {
156 unsafe {
158 core::ptr::write_bytes(
159 self.data.as_mut_ptr().add(self.len as usize).cast::<u8>(),
160 fill,
161 (new_len - self.len) as usize,
162 );
163 }
164 }
165 self.len = new_len;
166 }
167
168 fn reserve(&mut self, additional: u32) {
169 let required = self.len.checked_add(additional).expect("capacity overflow");
170 assert!(required as usize <= N, "FixedBuf capacity exceeded: need {required}, capacity {N}");
171 }
172
173 fn extend_from_slice(&mut self, data: &[u8]) {
174 let n = data.len() as u32;
175 self.reserve(n);
176 unsafe {
178 core::ptr::copy_nonoverlapping(
179 data.as_ptr(),
180 self.data.as_mut_ptr().add(self.len as usize).cast(),
181 data.len(),
182 );
183 }
184 self.len += n;
185 }
186}
187
188impl<const N: usize> Clone for FixedBuf<N> {
189 fn clone(&self) -> Self {
190 let mut new = Self::new();
191 if self.len > 0 {
192 unsafe {
194 core::ptr::copy_nonoverlapping(
195 self.data.as_ptr(),
196 new.data.as_mut_ptr(),
197 self.len as usize,
198 );
199 }
200 new.len = self.len;
201 }
202 new
203 }
204}
205
206unsafe impl<const N: usize> Send for FixedBuf<N> {}
208
209unsafe impl<const N: usize> Sync for FixedBuf<N> {}
211
212#[cfg(feature = "alloc")]
242pub struct AlignedBuf<T> {
243 ptr: NonNull<u8>,
244 len: u32,
245 cap: u32,
246 _type: PhantomData<T>,
247}
248
249#[cfg(feature = "alloc")]
250impl<T> AlignedBuf<T> {
251 const BUF_ALIGN: usize = if align_of::<T>() >= 8 { align_of::<T>() } else { 8 };
252
253 #[must_use]
255 pub const fn new() -> Self {
256 let ptr = unsafe { NonNull::new_unchecked(core::ptr::without_provenance_mut(Self::BUF_ALIGN)) };
262 Self { ptr, len: 0, cap: 0, _type: PhantomData }
263 }
264
265 #[must_use]
267 pub fn with_capacity(capacity: u32) -> Self {
268 let mut buf = Self::new();
269 if capacity > 0 {
270 buf.reserve(capacity);
271 }
272 buf
273 }
274
275 fn grow(&mut self, new_cap: u32) {
277 debug_assert!(new_cap > self.cap);
278 let align = Self::BUF_ALIGN;
279 let new_size = new_cap as usize;
280
281 let ptr = if self.cap == 0 {
282 let layout = alloc::alloc::Layout::from_size_align(new_size, align).expect("invalid layout");
284 let p = unsafe { alloc::alloc::alloc(layout) };
286 if p.is_null() {
287 alloc::alloc::handle_alloc_error(layout);
288 }
289 p
290 } else {
291 let old_layout =
292 alloc::alloc::Layout::from_size_align(self.cap as usize, align).expect("invalid layout");
293 let p = unsafe { alloc::alloc::realloc(self.ptr.as_ptr(), old_layout, new_size) };
296 if p.is_null() {
297 alloc::alloc::handle_alloc_error(
298 alloc::alloc::Layout::from_size_align(new_size, align).expect("invalid layout"),
299 );
300 }
301 p
302 };
303
304 unsafe {
308 core::ptr::write_bytes(ptr.add(self.cap as usize), 0, (new_cap - self.cap) as usize);
309 let _ = ptr.expose_provenance();
310 self.ptr = NonNull::new_unchecked(ptr);
311 }
312 self.cap = new_cap;
313 }
314}
315
316#[cfg(feature = "alloc")]
317unsafe impl<T> Buf for AlignedBuf<T> {
320 const ALIGN: usize = Self::BUF_ALIGN;
321
322 fn empty() -> Self {
323 Self::new()
324 }
325
326 fn as_ptr(&self) -> *const u8 {
327 self.ptr.as_ptr()
328 }
329
330 fn as_mut_ptr(&mut self) -> *mut u8 {
331 self.ptr.as_ptr()
332 }
333
334 fn as_bytes(&self) -> &[u8] {
335 if self.len == 0 {
336 return &[];
337 }
338 unsafe { core::slice::from_raw_parts(self.ptr.as_ptr(), self.len as usize) }
341 }
342
343 fn len(&self) -> u32 {
344 self.len
345 }
346
347 fn capacity(&self) -> u32 {
348 self.cap
349 }
350
351 fn resize(&mut self, new_len: u32, fill: u8) {
352 if new_len > self.len {
353 self.reserve(new_len - self.len);
354 unsafe {
357 core::ptr::write_bytes(
358 self.ptr.as_ptr().add(self.len as usize),
359 fill,
360 (new_len - self.len) as usize,
361 );
362 }
363 }
364 self.len = new_len;
365 }
366
367 fn reserve(&mut self, additional: u32) {
368 let required = self.len.checked_add(additional).expect("capacity overflow");
369 if required <= self.cap {
370 return;
371 }
372 let new_cap = required.max(self.cap.saturating_mul(2)).max(64);
373 self.grow(new_cap);
374 }
375
376 fn extend_from_slice(&mut self, data: &[u8]) {
377 let n = data.len() as u32;
378 self.reserve(n);
379 unsafe {
382 core::ptr::copy_nonoverlapping(
383 data.as_ptr(),
384 self.ptr.as_ptr().add(self.len as usize),
385 data.len(),
386 );
387 }
388 self.len += n;
389 }
390}
391
392#[cfg(feature = "alloc")]
393impl<T> Clone for AlignedBuf<T> {
394 fn clone(&self) -> Self {
395 if self.cap == 0 {
396 return Self::new();
397 }
398 let align = Self::BUF_ALIGN;
399 let layout =
400 alloc::alloc::Layout::from_size_align(self.cap as usize, align).expect("invalid layout");
401 let ptr = unsafe {
405 let p = alloc::alloc::alloc(layout);
406 if p.is_null() {
407 alloc::alloc::handle_alloc_error(layout);
408 }
409 core::ptr::copy_nonoverlapping(self.ptr.as_ptr(), p, self.len as usize);
410 let _ = p.expose_provenance();
411 NonNull::new_unchecked(p)
412 };
413 Self { ptr, len: self.len, cap: self.cap, _type: PhantomData }
414 }
415}
416
417#[cfg(feature = "alloc")]
418impl<T> Default for AlignedBuf<T> {
419 fn default() -> Self {
420 Self::new()
421 }
422}
423
424#[cfg(feature = "alloc")]
425impl<T> Drop for AlignedBuf<T> {
426 fn drop(&mut self) {
427 if self.cap == 0 {
428 return; }
430 let align = Self::BUF_ALIGN;
431 unsafe {
435 let layout = alloc::alloc::Layout::from_size_align_unchecked(self.cap as usize, align);
436 alloc::alloc::dealloc(self.ptr.as_ptr(), layout);
437 }
438 }
439}
440
441#[cfg(feature = "alloc")]
442unsafe impl<T> Send for AlignedBuf<T> {}
444
445#[cfg(feature = "alloc")]
446unsafe impl<T> Sync for AlignedBuf<T> {}
448
449pub unsafe fn write_flat<U: Flat>(buf: &mut impl Buf, at: Pos, val: U) {
459 let start = at.0 as usize;
460 let size = mem::size_of::<U>();
461 assert!(
462 start + size <= buf.len() as usize,
463 "write_flat out of bounds: {}..{} but len is {}",
464 start,
465 start + size,
466 buf.len()
467 );
468 unsafe {
471 let src = core::ptr::from_ref(&val).cast::<u8>();
472 core::ptr::copy_nonoverlapping(src, buf.as_mut_ptr().add(start), size);
473 }
474 mem::forget(val);
475}
476
477pub unsafe fn patch_near(buf: &mut impl Buf, at: Pos, target: Pos) {
484 let rel = i64::from(target.0) - i64::from(at.0);
485 let rel_i32: i32 = rel.try_into().expect("near offset overflow");
486 let nz = NonZero::new(rel_i32).expect("near offset must be non-zero (target == at)");
487
488 let start = at.0 as usize;
489 let size = mem::size_of::<NonZero<i32>>();
490 assert!(start + size <= buf.len() as usize, "patch_near out of bounds");
491 unsafe {
494 let src = core::ptr::from_ref(&nz).cast::<u8>();
495 core::ptr::copy_nonoverlapping(src, buf.as_mut_ptr().add(start), size);
496 }
497}
498
499pub unsafe fn patch_list_header(buf: &mut impl Buf, at: Pos, target: Pos, len: u32) {
507 let off_pos = at.0 as usize;
508 let len_pos = off_pos + mem::size_of::<i32>();
509
510 assert!(len_pos + mem::size_of::<u32>() <= buf.len() as usize, "patch_list_header out of bounds");
511
512 let rel: i32 = if len == 0 {
513 0
514 } else {
515 let r = i64::from(target.0) - i64::from(at.0);
516 r.try_into().expect("list header offset overflow")
517 };
518
519 unsafe {
522 let buf_ptr = buf.as_mut_ptr();
523 core::ptr::copy_nonoverlapping(
524 core::ptr::from_ref(&rel).cast::<u8>(),
525 buf_ptr.add(off_pos),
526 mem::size_of::<i32>(),
527 );
528 core::ptr::copy_nonoverlapping(
529 core::ptr::from_ref(&len).cast::<u8>(),
530 buf_ptr.add(len_pos),
531 mem::size_of::<u32>(),
532 );
533 }
534}
535
536pub unsafe fn write_bytes(buf: &mut impl Buf, at: Pos, src: *const u8, len: usize) {
543 let start = at.0 as usize;
544 assert!(
545 start + len <= buf.len() as usize,
546 "write_bytes out of bounds: {}..{} but len is {}",
547 start,
548 start + len,
549 buf.len()
550 );
551 unsafe {
554 core::ptr::copy_nonoverlapping(src, buf.as_mut_ptr().add(start), len);
555 }
556}
557
558pub fn alloc_segment<U: Flat>(buf: &mut impl Buf, count: u32) -> Pos {
563 buf.align_to(align_of::<Segment<U>>());
564 let pos = Pos(buf.len());
565 let values_size = count.checked_mul(size_of::<U>() as u32).expect("segment values overflow");
566 let total =
567 (size_of::<Segment<U>>() as u32).checked_add(values_size).expect("segment total size overflow");
568 buf.resize(buf.len() + total, 0);
569 let len_offset = pos.0 as usize + size_of::<i32>();
571 unsafe {
574 core::ptr::copy_nonoverlapping(
575 core::ptr::from_ref(&count).cast::<u8>(),
576 buf.as_mut_ptr().add(len_offset),
577 size_of::<u32>(),
578 );
579 }
580 pos
581}
582
583pub unsafe fn patch_segment_next(buf: &mut impl Buf, seg_pos: Pos, next_seg_pos: Pos) {
589 let rel = i64::from(next_seg_pos.0) - i64::from(seg_pos.0);
590 let rel_i32: i32 = rel.try_into().expect("segment next offset overflow");
591 let start = seg_pos.0 as usize;
592 assert!(start + mem::size_of::<i32>() <= buf.len() as usize, "patch_segment_next out of bounds");
593 unsafe {
596 core::ptr::copy_nonoverlapping(
597 core::ptr::from_ref(&rel_i32).cast::<u8>(),
598 buf.as_mut_ptr().add(start),
599 mem::size_of::<i32>(),
600 );
601 }
602}