Skip to main content

bincode_next/features/
relative_ptr.rs

1#![allow(unsafe_code)]
2
3use core::marker::PhantomData;
4
5#[cfg(feature = "alloc")]
6use alloc::string::String;
7#[cfg(feature = "alloc")]
8use alloc::vec::Vec;
9
10/// Indicates that a type has a fixed size known at compile time.
11/// This allows us to perform bounds checking efficiently.
12pub trait StaticSize {
13    /// The size of the type in bytes.
14    const SIZE: usize;
15}
16
17macro_rules! impl_static_size {
18    ($($t:ty),*) => {
19        $(
20            impl StaticSize for $t {
21                const SIZE: usize = core::mem::size_of::<$t>();
22            }
23            unsafe impl ZeroCopy for $t {
24                const ALIGN: usize = core::mem::align_of::<$t>();
25            }
26        )*
27    };
28}
29
30impl_static_size!(
31    u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize, f32, f64, char, bool
32);
33
34impl<T: StaticSize, const N: usize> StaticSize for [T; N] {
35    const SIZE: usize = T::SIZE * N;
36}
37
38/// A marker trait indicating that a type has a fixed, predictable layout (e.g., `#[repr(C)]`)
39/// and contains no padding bytes or invalid bit patterns allowing safe zero-copy casting.
40///
41/// # Safety
42///
43/// The type must have a fixed, predictable layout (e.g., `#[repr(C)]`) and contain no padding bytes or invalid bit patterns allowing safe zero-copy casting.
44pub unsafe trait ZeroCopy: StaticSize {
45    /// The required alignment for this type.
46    const ALIGN: usize;
47}
48
49unsafe impl<T: ZeroCopy, const N: usize> ZeroCopy for [T; N] {
50    const ALIGN: usize = T::ALIGN;
51}
52
53/// Trait for handling endianness in zero-copy types.
54#[doc(hidden)]
55pub trait Endian {
56    /// Converts a u16 from native endianness to the relative pointer's endianness.
57    fn from_native_u16(v: u16) -> u16;
58    /// Converts a u16 from the relative pointer's endianness to native endianness.
59    fn to_native_u16(v: u16) -> u16;
60    /// Converts a u32 from native endianness to the relative pointer's endianness.
61    fn from_native_u32(v: u32) -> u32;
62    /// Converts a u32 from the relative pointer's endianness to native endianness.
63    fn to_native_u32(v: u32) -> u32;
64    /// Converts a u64 from native endianness to the relative pointer's endianness.
65    fn from_native_u64(v: u64) -> u64;
66    /// Converts a u64 from the relative pointer's endianness to native endianness.
67    fn to_native_u64(v: u64) -> u64;
68    /// Converts a u128 from native endianness to the relative pointer's endianness.
69    fn from_native_u128(v: u128) -> u128;
70    /// Converts a u128 from the relative pointer's endianness to native endianness.
71    fn to_native_u128(v: u128) -> u128;
72
73    /// Converts a i16 from native endianness to the relative pointer's endianness.
74    fn from_native_i16(v: i16) -> i16;
75    /// Converts a i16 from the relative pointer's endianness to native endianness.
76    fn to_native_i16(v: i16) -> i16;
77    /// Converts a i32 from native endianness to the relative pointer's endianness.
78    fn from_native_i32(v: i32) -> i32;
79    /// Converts a i32 from the relative pointer's endianness to native endianness.
80    fn to_native_i32(v: i32) -> i32;
81    /// Converts a i64 from native endianness to the relative pointer's endianness.
82    fn from_native_i64(v: i64) -> i64;
83    /// Converts a i64 from the relative pointer's endianness to native endianness.
84    fn to_native_i64(v: i64) -> i64;
85    /// Converts a i128 from native endianness to the relative pointer's endianness.
86    fn from_native_i128(v: i128) -> i128;
87    /// Converts a i128 from the relative pointer's endianness to native endianness.
88    fn to_native_i128(v: i128) -> i128;
89
90    #[inline(always)]
91    /// Converts a f32 from native endianness to the relative pointer's endianness.
92    #[must_use]
93    fn from_native_f32(v: f32) -> f32 {
94        f32::from_bits(Self::from_native_u32(v.to_bits()))
95    }
96    #[inline(always)]
97    /// Converts a f32 from the relative pointer's endianness to native endianness.
98    #[must_use]
99    fn to_native_f32(v: f32) -> f32 {
100        f32::from_bits(Self::to_native_u32(v.to_bits()))
101    }
102    #[inline(always)]
103    /// Converts a f64 from native endianness to the relative pointer's endianness.
104    #[must_use]
105    fn from_native_f64(v: f64) -> f64 {
106        f64::from_bits(Self::from_native_u64(v.to_bits()))
107    }
108    #[inline(always)]
109    /// Converts a f64 from the relative pointer's endianness to native endianness.
110    #[must_use]
111    fn to_native_f64(v: f64) -> f64 {
112        f64::from_bits(Self::to_native_u64(v.to_bits()))
113    }
114}
115
116/// Little-endian marker.
117#[doc(hidden)]
118#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
119pub struct LittleEndian;
120impl Endian for LittleEndian {
121    #[inline(always)]
122    fn from_native_u16(v: u16) -> u16 {
123        v.to_le()
124    }
125
126    #[inline(always)]
127    fn to_native_u16(v: u16) -> u16 {
128        u16::from_le(v)
129    }
130
131    #[inline(always)]
132    fn from_native_u32(v: u32) -> u32 {
133        v.to_le()
134    }
135
136    #[inline(always)]
137    fn to_native_u32(v: u32) -> u32 {
138        u32::from_le(v)
139    }
140
141    #[inline(always)]
142    fn from_native_u64(v: u64) -> u64 {
143        v.to_le()
144    }
145
146    #[inline(always)]
147    fn to_native_u64(v: u64) -> u64 {
148        u64::from_le(v)
149    }
150
151    #[inline(always)]
152    fn from_native_u128(v: u128) -> u128 {
153        v.to_le()
154    }
155
156    #[inline(always)]
157    fn to_native_u128(v: u128) -> u128 {
158        u128::from_le(v)
159    }
160
161    #[inline(always)]
162    fn from_native_i16(v: i16) -> i16 {
163        v.to_le()
164    }
165
166    #[inline(always)]
167    fn to_native_i16(v: i16) -> i16 {
168        i16::from_le(v)
169    }
170
171    #[inline(always)]
172    fn from_native_i32(v: i32) -> i32 {
173        v.to_le()
174    }
175
176    #[inline(always)]
177    fn to_native_i32(v: i32) -> i32 {
178        i32::from_le(v)
179    }
180
181    #[inline(always)]
182    fn from_native_i64(v: i64) -> i64 {
183        v.to_le()
184    }
185
186    #[inline(always)]
187    fn to_native_i64(v: i64) -> i64 {
188        i64::from_le(v)
189    }
190
191    #[inline(always)]
192    fn from_native_i128(v: i128) -> i128 {
193        v.to_le()
194    }
195
196    #[inline(always)]
197    fn to_native_i128(v: i128) -> i128 {
198        i128::from_le(v)
199    }
200}
201
202/// Big-endian marker.
203#[doc(hidden)]
204#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
205pub struct BigEndian;
206impl Endian for BigEndian {
207    #[inline(always)]
208    fn from_native_u16(v: u16) -> u16 {
209        v.to_be()
210    }
211
212    #[inline(always)]
213    fn to_native_u16(v: u16) -> u16 {
214        u16::from_be(v)
215    }
216
217    #[inline(always)]
218    fn from_native_u32(v: u32) -> u32 {
219        v.to_be()
220    }
221
222    #[inline(always)]
223    fn to_native_u32(v: u32) -> u32 {
224        u32::from_be(v)
225    }
226
227    #[inline(always)]
228    fn from_native_u64(v: u64) -> u64 {
229        v.to_be()
230    }
231
232    #[inline(always)]
233    fn to_native_u64(v: u64) -> u64 {
234        u64::from_be(v)
235    }
236
237    #[inline(always)]
238    fn from_native_u128(v: u128) -> u128 {
239        v.to_be()
240    }
241
242    #[inline(always)]
243    fn to_native_u128(v: u128) -> u128 {
244        u128::from_be(v)
245    }
246
247    #[inline(always)]
248    fn from_native_i16(v: i16) -> i16 {
249        v.to_be()
250    }
251
252    #[inline(always)]
253    fn to_native_i16(v: i16) -> i16 {
254        i16::from_be(v)
255    }
256
257    #[inline(always)]
258    fn from_native_i32(v: i32) -> i32 {
259        v.to_be()
260    }
261
262    #[inline(always)]
263    fn to_native_i32(v: i32) -> i32 {
264        i32::from_be(v)
265    }
266
267    #[inline(always)]
268    fn from_native_i64(v: i64) -> i64 {
269        v.to_be()
270    }
271
272    #[inline(always)]
273    fn to_native_i64(v: i64) -> i64 {
274        i64::from_be(v)
275    }
276
277    #[inline(always)]
278    fn from_native_i128(v: i128) -> i128 {
279        v.to_be()
280    }
281
282    #[inline(always)]
283    fn to_native_i128(v: i128) -> i128 {
284        i128::from_be(v)
285    }
286}
287
288/// Native-endian marker.
289#[doc(hidden)]
290#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
291pub struct NativeEndian;
292impl Endian for NativeEndian {
293    #[inline(always)]
294    fn from_native_u16(v: u16) -> u16 {
295        v
296    }
297
298    #[inline(always)]
299    fn to_native_u16(v: u16) -> u16 {
300        v
301    }
302
303    #[inline(always)]
304    fn from_native_u32(v: u32) -> u32 {
305        v
306    }
307
308    #[inline(always)]
309    fn to_native_u32(v: u32) -> u32 {
310        v
311    }
312
313    #[inline(always)]
314    fn from_native_u64(v: u64) -> u64 {
315        v
316    }
317
318    #[inline(always)]
319    fn to_native_u64(v: u64) -> u64 {
320        v
321    }
322
323    #[inline(always)]
324    fn from_native_u128(v: u128) -> u128 {
325        v
326    }
327
328    #[inline(always)]
329    fn to_native_u128(v: u128) -> u128 {
330        v
331    }
332
333    #[inline(always)]
334    fn from_native_i16(v: i16) -> i16 {
335        v
336    }
337
338    #[inline(always)]
339    fn to_native_i16(v: i16) -> i16 {
340        v
341    }
342
343    #[inline(always)]
344    fn from_native_i32(v: i32) -> i32 {
345        v
346    }
347
348    #[inline(always)]
349    fn to_native_i32(v: i32) -> i32 {
350        v
351    }
352
353    #[inline(always)]
354    fn from_native_i64(v: i64) -> i64 {
355        v
356    }
357
358    #[inline(always)]
359    fn to_native_i64(v: i64) -> i64 {
360        v
361    }
362
363    #[inline(always)]
364    fn from_native_i128(v: i128) -> i128 {
365        v
366    }
367
368    #[inline(always)]
369    fn to_native_i128(v: i128) -> i128 {
370        v
371    }
372}
373
374/// A relative pointer that stores the offset from its own address to the target data.
375/// This allows zero-copy deserialization without runtime allocations.
376#[repr(transparent)]
377#[derive(Debug, PartialEq, Eq, Clone, Copy)]
378pub struct RelativePtr<T, const ALIGN: usize, E: Endian = NativeEndian> {
379    offset: i32,
380    _marker: PhantomData<(T, E)>,
381}
382
383impl<T, const ALIGN: usize, E: Endian> RelativePtr<T, ALIGN, E> {
384    /// Creates a new `RelativePtr` with the given native offset.
385    #[must_use]
386    pub fn new(offset: i32) -> Self {
387        Self {
388            offset: E::from_native_i32(offset),
389            _marker: PhantomData,
390        }
391    }
392
393    /// Resolves the pointer within the given buffer.
394    /// Returns `Some(&T)` if the computed pointer is within the bounds of `buffer`
395    /// and correctly aligned. Otherwise, returns `None`.
396    #[must_use]
397    pub fn get<'a>(
398        &self,
399        buffer: &'a [u8],
400    ) -> Option<&'a T>
401    where
402        T: ZeroCopy,
403    {
404        // Compile-time check: alignment must be a power of two, and sufficiently large for T
405        const {
406            let effective_align = if ALIGN == 0 {
407                T::ALIGN
408            } else {
409                ALIGN
410            };
411            assert!(
412                effective_align > 0 && effective_align.is_power_of_two(),
413                "Alignment must be a power of two"
414            );
415            assert!(
416                effective_align >= core::mem::align_of::<T>(),
417                "ALIGN must be at least the natural alignment of T"
418            );
419        };
420
421        let self_ptr = core::ptr::from_ref(self) as usize;
422        let buffer_start = buffer.as_ptr() as usize;
423        let buffer_end = buffer_start + buffer.len();
424
425        let self_end = self_ptr + core::mem::size_of::<Self>();
426
427        if self_ptr < buffer_start || self_end > buffer_end {
428            return None;
429        }
430
431        let offset = E::to_native_i32(unsafe { core::ptr::addr_of!(self.offset).read_unaligned() });
432
433        // Here we use wrapping arithmetic because the offset is relative to the pointer's address
434        // and could potentially wrap around the address space. But in the CPU, the address space
435        // is a linear ring, so we can use wrapping arithmetic to avoid overflow.
436        let target_addr = if offset >= 0 {
437            self_ptr.wrapping_add(offset as usize)
438        } else {
439            self_ptr.wrapping_sub(offset.unsigned_abs() as usize)
440        };
441
442        let target_end = target_addr.wrapping_add(T::SIZE);
443
444        if target_addr < buffer_start || target_end > buffer_end {
445            return None;
446        }
447
448        let effective_align = if ALIGN == 0 {
449            T::ALIGN
450        } else {
451            ALIGN
452        };
453        // Runtime alignment check
454        if target_addr % effective_align != 0 {
455            return None;
456        }
457
458        // Derive target_ptr from buffer.as_ptr() to maintain provenance over the whole buffer
459        let target_offset_in_buffer = target_addr - buffer_start;
460        let target_ptr = unsafe { buffer.as_ptr().add(target_offset_in_buffer) };
461
462        // Safe because we bounds checked against the buffer, alignment checked,
463        // and we derive the lifetime from the buffer.
464        Some(unsafe { &*target_ptr.cast::<T>() })
465    }
466}
467
468#[cfg(feature = "fuzzing")]
469impl<'a, T, const ALIGN: usize, E: Endian> arbitrary::Arbitrary<'a> for RelativePtr<T, ALIGN, E> {
470    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
471        Ok(Self::new(i32::arbitrary(u)?))
472    }
473}
474
475impl<T, const ALIGN: usize, E: Endian> StaticSize for RelativePtr<T, ALIGN, E> {
476    const SIZE: usize = core::mem::size_of::<Self>();
477}
478
479unsafe impl<T, const ALIGN: usize, E: Endian> ZeroCopy for RelativePtr<T, ALIGN, E> {
480    const ALIGN: usize = core::mem::align_of::<i32>();
481}
482
483/// A zero-copy array collection equivalent to `[T; N]`.
484#[repr(transparent)]
485#[derive(Debug, PartialEq, Eq, Clone, Copy)]
486pub struct ZeroArray<T, const N: usize, const ALIGN: usize, E: Endian = NativeEndian> {
487    ptr: RelativePtr<[T; N], ALIGN, E>,
488}
489
490impl<T: ZeroCopy, const N: usize, const ALIGN: usize, E: Endian> ZeroArray<T, N, ALIGN, E> {
491    /// Resolves the array within the given buffer.
492    #[must_use]
493    pub fn get<'a>(
494        &self,
495        buffer: &'a [u8],
496    ) -> Option<&'a [T; N]> {
497        self.ptr.get(buffer)
498    }
499}
500
501#[cfg(feature = "fuzzing")]
502impl<'a, T: ZeroCopy, const N: usize, const ALIGN: usize, E: Endian> arbitrary::Arbitrary<'a>
503    for ZeroArray<T, N, ALIGN, E>
504{
505    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
506        Ok(Self {
507            ptr: arbitrary::Arbitrary::arbitrary(u)?,
508        })
509    }
510}
511
512impl<T, const N: usize, const ALIGN: usize, E: Endian> StaticSize for ZeroArray<T, N, ALIGN, E> {
513    const SIZE: usize = core::mem::size_of::<Self>();
514}
515
516// Implement ZeroCopy for zero-copy containers since their layouts are guaranteed
517unsafe impl<T: ZeroCopy, const N: usize, const ALIGN: usize, E: Endian> ZeroCopy
518    for ZeroArray<T, N, ALIGN, E>
519{
520    const ALIGN: usize = core::mem::align_of::<i32>();
521}
522
523/// A zero-copy string type with compile-time known max capacity, stored inline.
524#[repr(C)]
525#[derive(Debug, PartialEq, Eq, Clone, Copy)]
526pub struct ZeroString<const CAP: usize, E: Endian = NativeEndian> {
527    len: u32,
528    data: [u8; CAP],
529    _marker: PhantomData<E>,
530}
531
532impl<const CAP: usize, E: Endian> ZeroString<CAP, E> {
533    /// Resolves the string. Inline strings don't necessarily need the buffer.
534    #[must_use]
535    pub fn get(&self) -> Option<&str> {
536        let len =
537            E::to_native_u32(unsafe { core::ptr::addr_of!(self.len).read_unaligned() }) as usize;
538        let len = len.min(CAP);
539        core::str::from_utf8(&self.data[..len]).ok()
540    }
541}
542
543#[cfg(feature = "fuzzing")]
544impl<'a, const CAP: usize, E: Endian> arbitrary::Arbitrary<'a> for ZeroString<CAP, E> {
545    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
546        let len = u32::arbitrary(u)? % (CAP as u32 + 1);
547        let mut data = [0u8; CAP];
548        u.fill_buffer(&mut data[..len as usize])?;
549        Ok(Self {
550            len: E::from_native_u32(len),
551            data,
552            _marker: PhantomData,
553        })
554    }
555}
556
557impl<const CAP: usize, E: Endian> StaticSize for ZeroString<CAP, E> {
558    const SIZE: usize = core::mem::size_of::<Self>();
559}
560
561unsafe impl<const CAP: usize, E: Endian> ZeroCopy for ZeroString<CAP, E> {
562    const ALIGN: usize = core::mem::align_of::<u32>();
563}
564
565/// A zero-copy slice collection conceptually equivalent to `&[T]` or `Vec<T>`.
566#[repr(C)]
567#[derive(Debug, PartialEq, Eq, Clone, Copy)]
568pub struct ZeroSlice<T, const ALIGN: usize, E: Endian = NativeEndian> {
569    len: u32,
570    ptr: RelativePtr<T, ALIGN, E>,
571}
572
573impl<T: ZeroCopy, const ALIGN: usize, E: Endian> ZeroSlice<T, ALIGN, E> {
574    /// Creates a new `ZeroSlice` with the given native length and relative offset.
575    #[must_use]
576    pub fn new(
577        len: u32,
578        offset: i32,
579    ) -> Self {
580        Self {
581            len: E::from_native_u32(len),
582            ptr: RelativePtr::new(offset),
583        }
584    }
585
586    /// Resolves the slice within the given buffer.
587    #[must_use]
588    pub fn get<'a>(
589        &self,
590        buffer: &'a [u8],
591    ) -> Option<&'a [T]> {
592        let len = E::to_native_u32(unsafe { core::ptr::addr_of!(self.len).read_unaligned() });
593
594        if len == 0 {
595            // For zero-length slices, we can just return an empty slice,
596            // bypassing the pointer lookup (which might be invalid or dummy).
597            return Some(&[]);
598        }
599
600        // Get the first element's reference to validate base bounds, alignment, and offset
601        let first_ref = self.ptr.get(buffer)?;
602
603        let slice_len = len as usize;
604
605        // Calculate the total size required for the full slice.
606        let total_size = T::SIZE.checked_mul(slice_len)?;
607
608        let first_addr = core::ptr::from_ref::<T>(first_ref) as usize;
609        let buffer_start = buffer.as_ptr() as usize;
610        let target_offset = first_addr - buffer_start;
611
612        let target_end = target_offset.checked_add(total_size)?;
613        if target_end > buffer.len() {
614            return None;
615        }
616
617        // Derive target_ptr from buffer.as_ptr() to maintain provenance over the whole slice
618        let target_ptr = unsafe { buffer.as_ptr().add(target_offset).cast::<T>() };
619
620        Some(unsafe { core::slice::from_raw_parts(target_ptr, slice_len) })
621    }
622}
623
624#[cfg(feature = "fuzzing")]
625impl<'a, T: ZeroCopy, const ALIGN: usize, E: Endian> arbitrary::Arbitrary<'a>
626    for ZeroSlice<T, ALIGN, E>
627{
628    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
629        Ok(Self::new(u32::arbitrary(u)?, i32::arbitrary(u)?))
630    }
631}
632
633impl<T, const ALIGN: usize, E: Endian> StaticSize for ZeroSlice<T, ALIGN, E> {
634    const SIZE: usize = core::mem::size_of::<Self>();
635}
636
637unsafe impl<T: ZeroCopy, const ALIGN: usize, E: Endian> ZeroCopy for ZeroSlice<T, ALIGN, E> {
638    const ALIGN: usize = core::mem::align_of::<u32>();
639}
640
641/// A dynamically sized zero-copy string conceptually equivalent to `&str` or `String`.
642#[repr(transparent)]
643#[derive(Debug, PartialEq, Eq, Clone, Copy)]
644pub struct ZeroStr<E: Endian = NativeEndian> {
645    slice: ZeroSlice<u8, 0, E>,
646}
647
648impl<E: Endian> ZeroStr<E> {
649    /// Creates a new instance.
650    #[must_use]
651    pub fn new(
652        len: u32,
653        offset: i32,
654    ) -> Self {
655        Self {
656            slice: ZeroSlice::new(len, offset),
657        }
658    }
659
660    /// Returns a reference to the underlying data if valid.
661    #[must_use]
662    pub fn get<'a>(
663        &self,
664        buffer: &'a [u8],
665    ) -> Option<&'a str> {
666        let bytes = self.slice.get(buffer)?;
667        core::str::from_utf8(bytes).ok()
668    }
669}
670
671#[cfg(feature = "fuzzing")]
672impl<'a, E: Endian> arbitrary::Arbitrary<'a> for ZeroStr<E> {
673    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
674        Ok(Self {
675            slice: arbitrary::Arbitrary::arbitrary(u)?,
676        })
677    }
678}
679
680impl<E: Endian> StaticSize for ZeroStr<E> {
681    const SIZE: usize = core::mem::size_of::<Self>();
682}
683
684unsafe impl<E: Endian> ZeroCopy for ZeroStr<E> {
685    const ALIGN: usize = core::mem::align_of::<u32>();
686}
687
688/// A trait for validating zero-copy types.
689pub trait Validator {
690    /// Checks if the relative pointer is valid within the given buffer.
691    fn is_valid(
692        &self,
693        buffer: &[u8],
694    ) -> bool;
695}
696
697impl<T: ZeroCopy, const ALIGN: usize, E: Endian> Validator for RelativePtr<T, ALIGN, E> {
698    /// Checks if the relative pointer is valid within the given buffer.
699    fn is_valid(
700        &self,
701        buffer: &[u8],
702    ) -> bool {
703        self.get(buffer).is_some()
704    }
705}
706
707impl<const CAP: usize, E: Endian> Validator for ZeroString<CAP, E> {
708    /// Checks if the relative pointer is valid within the given buffer.
709    fn is_valid(
710        &self,
711        _buffer: &[u8],
712    ) -> bool {
713        self.get().is_some()
714    }
715}
716
717impl<T: ZeroCopy, const ALIGN: usize, E: Endian> Validator for ZeroSlice<T, ALIGN, E> {
718    /// Checks if the relative pointer is valid within the given buffer.
719    fn is_valid(
720        &self,
721        buffer: &[u8],
722    ) -> bool {
723        self.get(buffer).is_some()
724    }
725}
726
727impl<E: Endian> Validator for ZeroStr<E> {
728    /// Checks if the relative pointer is valid within the given buffer.
729    fn is_valid(
730        &self,
731        buffer: &[u8],
732    ) -> bool {
733        self.get(buffer).is_some()
734    }
735}
736
737macro_rules! impl_validator_primitive {
738    ($($t:ty),*) => {
739        $(
740            impl Validator for $t {
741                /// Checks if the relative pointer is valid within the given buffer.
742                fn is_valid(&self, _buffer: &[u8]) -> bool {
743                    true
744                }
745            }
746        )*
747    };
748}
749
750impl_validator_primitive!(
751    u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize, f32, f64, char, bool
752);
753
754impl<T: ZeroCopy, const N: usize, const ALIGN: usize, E: Endian> Validator
755    for ZeroArray<T, N, ALIGN, E>
756{
757    /// Checks if the relative pointer is valid within the given buffer.
758    fn is_valid(
759        &self,
760        buffer: &[u8],
761    ) -> bool {
762        self.get(buffer).is_some()
763    }
764}
765
766impl<T: ZeroCopy + Validator, const N: usize> Validator for [T; N] {
767    /// Checks if the relative pointer is valid within the given buffer.
768    fn is_valid(
769        &self,
770        buffer: &[u8],
771    ) -> bool {
772        for item in self {
773            if !item.is_valid(buffer) {
774                return false;
775            }
776        }
777        true
778    }
779}
780
781/// A trait for deep validation of zero-copy structures, recursively checking all pointers.
782pub trait DeepValidator: Validator {
783    /// Performs a deep validation check on the relative pointer.
784    fn is_valid_deep(
785        &self,
786        buffer: &[u8],
787    ) -> bool;
788}
789
790macro_rules! impl_deep_validator_primitive {
791    ($($t:ty),*) => {
792        $(
793            impl DeepValidator for $t {
794                /// Performs a deep validation check on the relative pointer.
795                fn is_valid_deep(&self, _buffer: &[u8]) -> bool {
796                    true
797                }
798            }
799        )*
800    };
801}
802
803impl_deep_validator_primitive!(
804    u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize, f32, f64, char, bool
805);
806
807impl<T: ZeroCopy + DeepValidator, const N: usize> DeepValidator for [T; N] {
808    /// Performs a deep validation check on the relative pointer.
809    fn is_valid_deep(
810        &self,
811        buffer: &[u8],
812    ) -> bool {
813        for item in self {
814            if !item.is_valid_deep(buffer) {
815                return false;
816            }
817        }
818        true
819    }
820}
821
822impl<T: ZeroCopy + DeepValidator, const ALIGN: usize, E: Endian> DeepValidator
823    for RelativePtr<T, ALIGN, E>
824{
825    /// Performs a deep validation check on the relative pointer.
826    fn is_valid_deep(
827        &self,
828        buffer: &[u8],
829    ) -> bool {
830        self.get(buffer)
831            .is_some_and(|target| target.is_valid_deep(buffer))
832    }
833}
834
835impl<const CAP: usize, E: Endian> DeepValidator for ZeroString<CAP, E> {
836    /// Performs a deep validation check on the relative pointer.
837    fn is_valid_deep(
838        &self,
839        _buffer: &[u8],
840    ) -> bool {
841        self.get().is_some()
842    }
843}
844
845impl<T: ZeroCopy + DeepValidator, const ALIGN: usize, E: Endian> DeepValidator
846    for ZeroSlice<T, ALIGN, E>
847{
848    /// Performs a deep validation check on the relative pointer.
849    fn is_valid_deep(
850        &self,
851        buffer: &[u8],
852    ) -> bool {
853        if let Some(slice) = self.get(buffer) {
854            for item in slice {
855                if !item.is_valid_deep(buffer) {
856                    return false;
857                }
858            }
859            true
860        } else {
861            false
862        }
863    }
864}
865
866impl<E: Endian> DeepValidator for ZeroStr<E> {
867    /// Performs a deep validation check on the relative pointer.
868    fn is_valid_deep(
869        &self,
870        buffer: &[u8],
871    ) -> bool {
872        self.get(buffer).is_some()
873    }
874}
875
876impl<T: ZeroCopy + DeepValidator, const N: usize, const ALIGN: usize, E: Endian> DeepValidator
877    for ZeroArray<T, N, ALIGN, E>
878{
879    /// Performs a deep validation check on the relative pointer.
880    fn is_valid_deep(
881        &self,
882        buffer: &[u8],
883    ) -> bool {
884        if let Some(arr) = self.get(buffer) {
885            for item in arr {
886                if !item.is_valid_deep(buffer) {
887                    return false;
888                }
889            }
890            true
891        } else {
892            false
893        }
894    }
895}
896
897// Bincode integration
898use crate::de::BorrowDecode;
899use crate::de::BorrowDecoder;
900use crate::de::Decode;
901use crate::de::Decoder;
902use crate::enc::Encode;
903use crate::enc::Encoder;
904use crate::error::DecodeError;
905use crate::error::EncodeError;
906
907impl<T: StaticSize, const ALIGN: usize, E: Endian, Context> Decode<Context>
908    for RelativePtr<T, ALIGN, E>
909{
910    fn decode<D: Decoder<Context = Context>>(decoder: &mut D) -> Result<Self, DecodeError> {
911        Ok(Self {
912            offset: i32::decode(decoder)?,
913            _marker: PhantomData,
914        })
915    }
916}
917impl<'de, T: StaticSize, const ALIGN: usize, E: Endian, Context> BorrowDecode<'de, Context>
918    for RelativePtr<T, ALIGN, E>
919{
920    fn borrow_decode<D: BorrowDecoder<'de, Context = Context>>(
921        decoder: &mut D
922    ) -> Result<Self, DecodeError> {
923        Ok(Self {
924            offset: i32::borrow_decode(decoder)?,
925            _marker: PhantomData,
926        })
927    }
928}
929impl<T: StaticSize, const ALIGN: usize, E: Endian> Encode for RelativePtr<T, ALIGN, E> {
930    fn encode<EN: Encoder>(
931        &self,
932        encoder: &mut EN,
933    ) -> Result<(), EncodeError> {
934        self.offset.encode(encoder)
935    }
936}
937
938impl<const CAP: usize, E: Endian, Context> Decode<Context> for ZeroString<CAP, E> {
939    fn decode<D: Decoder<Context = Context>>(decoder: &mut D) -> Result<Self, DecodeError> {
940        Ok(Self {
941            len: u32::decode(decoder)?,
942            data: Decode::decode(decoder)?,
943            _marker: PhantomData,
944        })
945    }
946}
947impl<'de, const CAP: usize, E: Endian, Context> BorrowDecode<'de, Context> for ZeroString<CAP, E> {
948    fn borrow_decode<D: BorrowDecoder<'de, Context = Context>>(
949        decoder: &mut D
950    ) -> Result<Self, DecodeError> {
951        Ok(Self {
952            len: u32::borrow_decode(decoder)?,
953            data: BorrowDecode::borrow_decode(decoder)?,
954            _marker: PhantomData,
955        })
956    }
957}
958impl<const CAP: usize, E: Endian> Encode for ZeroString<CAP, E> {
959    fn encode<EN: Encoder>(
960        &self,
961        encoder: &mut EN,
962    ) -> Result<(), EncodeError> {
963        self.len.encode(encoder)?;
964        self.data.encode(encoder)
965    }
966}
967
968impl<T: StaticSize, const ALIGN: usize, E: Endian, Context> Decode<Context>
969    for ZeroSlice<T, ALIGN, E>
970{
971    fn decode<D: Decoder<Context = Context>>(decoder: &mut D) -> Result<Self, DecodeError> {
972        Ok(Self {
973            len: u32::decode(decoder)?,
974            ptr: RelativePtr::decode(decoder)?,
975        })
976    }
977}
978impl<'de, T: StaticSize, const ALIGN: usize, E: Endian, Context> BorrowDecode<'de, Context>
979    for ZeroSlice<T, ALIGN, E>
980{
981    fn borrow_decode<D: BorrowDecoder<'de, Context = Context>>(
982        decoder: &mut D
983    ) -> Result<Self, DecodeError> {
984        Ok(Self {
985            len: u32::borrow_decode(decoder)?,
986            ptr: RelativePtr::borrow_decode(decoder)?,
987        })
988    }
989}
990impl<T: StaticSize, const ALIGN: usize, E: Endian> Encode for ZeroSlice<T, ALIGN, E> {
991    fn encode<EN: Encoder>(
992        &self,
993        encoder: &mut EN,
994    ) -> Result<(), EncodeError> {
995        self.len.encode(encoder)?;
996        self.ptr.encode(encoder)
997    }
998}
999
1000impl<E: Endian, Context> Decode<Context> for ZeroStr<E> {
1001    fn decode<D: Decoder<Context = Context>>(decoder: &mut D) -> Result<Self, DecodeError> {
1002        Ok(Self {
1003            slice: ZeroSlice::decode(decoder)?,
1004        })
1005    }
1006}
1007impl<'de, E: Endian, Context> BorrowDecode<'de, Context> for ZeroStr<E> {
1008    fn borrow_decode<D: BorrowDecoder<'de, Context = Context>>(
1009        decoder: &mut D
1010    ) -> Result<Self, DecodeError> {
1011        Ok(Self {
1012            slice: ZeroSlice::borrow_decode(decoder)?,
1013        })
1014    }
1015}
1016impl<E: Endian> Encode for ZeroStr<E> {
1017    fn encode<EN: Encoder>(
1018        &self,
1019        encoder: &mut EN,
1020    ) -> Result<(), EncodeError> {
1021        self.slice.encode(encoder)
1022    }
1023}
1024
1025/// An aligned byte buffer that correctly deallocates with the alignment it was allocated with.
1026///
1027/// This is necessary because `Vec<u8>` always deallocates with alignment 1, which is UB
1028/// if the memory was allocated with a higher alignment.
1029#[cfg(feature = "alloc")]
1030#[doc(hidden)]
1031pub struct AlignedBuffer {
1032    ptr: *mut u8,
1033    len: usize,
1034    align: usize,
1035}
1036
1037#[cfg(feature = "alloc")]
1038impl AlignedBuffer {
1039    fn from_vec(
1040        data: &[u8],
1041        align: usize,
1042    ) -> Self {
1043        let len = data.len();
1044        if len == 0 {
1045            return Self {
1046                ptr: core::ptr::NonNull::dangling().as_ptr(),
1047                len: 0,
1048                align,
1049            };
1050        }
1051        let layout = alloc::alloc::Layout::from_size_align(len, align).expect("Invalid layout");
1052        unsafe {
1053            let ptr = alloc::alloc::alloc(layout);
1054            if ptr.is_null() {
1055                alloc::alloc::handle_alloc_error(layout);
1056            }
1057            core::ptr::copy_nonoverlapping(data.as_ptr(), ptr, len);
1058            Self { ptr, len, align }
1059        }
1060    }
1061}
1062
1063#[cfg(feature = "alloc")]
1064impl core::ops::Deref for AlignedBuffer {
1065    /// The target type that this builds into.
1066    type Target = [u8];
1067
1068    fn deref(&self) -> &[u8] {
1069        if self.len == 0 {
1070            return &[];
1071        }
1072        unsafe { core::slice::from_raw_parts(self.ptr, self.len) }
1073    }
1074}
1075
1076#[cfg(feature = "alloc")]
1077impl AsRef<[u8]> for AlignedBuffer {
1078    fn as_ref(&self) -> &[u8] {
1079        self
1080    }
1081}
1082
1083#[cfg(feature = "alloc")]
1084impl Drop for AlignedBuffer {
1085    fn drop(&mut self) {
1086        if self.len > 0 {
1087            let layout = alloc::alloc::Layout::from_size_align(self.len, self.align)
1088                .expect("Invalid layout");
1089            unsafe {
1090                alloc::alloc::dealloc(self.ptr, layout);
1091            }
1092        }
1093    }
1094}
1095
1096/// Computes a relative offset and checks that it fits in an `i32`.
1097/// Panics if the distance between `from` and `to` exceeds the i32 range (~2GB).
1098#[cfg(feature = "alloc")]
1099fn checked_relative_offset(
1100    to: usize,
1101    from: usize,
1102) -> i32 {
1103    let diff = to as isize - from as isize;
1104    i32::try_from(diff).unwrap_or_else(|_| {
1105        panic!(
1106            "Relative offset overflow: distance {diff} between positions {from} and {to} exceeds i32 range"
1107        )
1108    })
1109}
1110
1111/// A builder for zero-copy structures.
1112#[cfg(feature = "alloc")]
1113pub struct ZeroBuilder {
1114    data: Vec<u8>,
1115    max_align: usize,
1116}
1117
1118#[cfg(feature = "alloc")]
1119impl Default for ZeroBuilder {
1120    fn default() -> Self {
1121        Self::new()
1122    }
1123}
1124
1125#[cfg(feature = "alloc")]
1126impl ZeroBuilder {
1127    /// Creates a new instance.
1128    #[must_use]
1129    pub const fn new() -> Self {
1130        Self {
1131            data: Vec::new(),
1132            max_align: 1,
1133        }
1134    }
1135
1136    /// Aligns the builder to the specified boundary.
1137    pub fn align(
1138        &mut self,
1139        align: usize,
1140    ) -> usize {
1141        if align > self.max_align {
1142            self.max_align = align;
1143        }
1144        if align > 1 {
1145            let padding = (align - (self.data.len() % align)) % align;
1146            if padding > 0 {
1147                self.data.resize(self.data.len() + padding, 0);
1148            }
1149        }
1150        self.data.len()
1151    }
1152
1153    /// Reserves space for a value of type T.
1154    pub fn reserve<T: ZeroCopy>(&mut self) -> usize {
1155        let offset = self.align(T::ALIGN);
1156        self.data.resize(offset + T::SIZE, 0);
1157        offset
1158    }
1159
1160    /// Reserves a specific number of bytes with alignment.
1161    pub fn reserve_bytes(
1162        &mut self,
1163        size: usize,
1164        align: usize,
1165    ) -> usize {
1166        let offset = self.align(align);
1167        self.data.resize(offset + size, 0);
1168        offset
1169    }
1170
1171    /// Writes a value to the specified offset.
1172    pub fn write<T: ZeroCopy>(
1173        &mut self,
1174        offset: usize,
1175        val: &T,
1176    ) {
1177        let size = T::SIZE;
1178        let bytes =
1179            unsafe { core::slice::from_raw_parts(core::ptr::from_ref(val).cast::<u8>(), size) };
1180        self.data[offset..offset + size].copy_from_slice(bytes);
1181    }
1182
1183    /// Pushes a value onto the builder and returns its offset.
1184    pub fn push<T: ZeroCopy>(
1185        &mut self,
1186        val: &T,
1187    ) -> usize {
1188        let offset = self.reserve::<T>();
1189        self.write(offset, val);
1190        offset
1191    }
1192
1193    /// Pushes raw bytes onto the builder.
1194    pub fn push_bytes(
1195        &mut self,
1196        bytes: &[u8],
1197        align: usize,
1198    ) -> usize {
1199        let offset = self.align(align);
1200        self.data.extend_from_slice(bytes);
1201        offset
1202    }
1203
1204    /// Finalizes the builder and returns the `AlignedBuffer`.
1205    #[must_use]
1206    pub fn finish(self) -> AlignedBuffer {
1207        AlignedBuffer::from_vec(&self.data, self.max_align)
1208    }
1209
1210    /// Returns the current length of the buffer.
1211    #[must_use]
1212    pub const fn len(&self) -> usize {
1213        self.data.len()
1214    }
1215
1216    /// Returns true if the buffer is empty.
1217    #[must_use]
1218    pub const fn is_empty(&self) -> bool {
1219        self.data.is_empty()
1220    }
1221}
1222
1223/// A trait for zero-copy types that defines their preferred builder type.
1224pub trait ZeroCopyType<E: Endian = NativeEndian>: ZeroCopy {
1225    /// The builder type associated with this trait.
1226    type Builder;
1227}
1228
1229/// A trait for types that can be built into a zero-copy structure.
1230pub trait ZeroCopyBuilder<E: Endian = NativeEndian, const ALIGN: usize = 0> {
1231    /// The target type that this builds into.
1232    type Target: ZeroCopy;
1233
1234    #[cfg(feature = "alloc")]
1235    /// Build the zero-copy type into the builder.
1236    fn build_to_target(
1237        self,
1238        builder: &mut ZeroBuilder,
1239        offset: usize,
1240    ) -> Self::Target;
1241
1242    #[cfg(feature = "alloc")]
1243    /// Build the zero-copy type into the builder and return its offset.
1244    fn build(
1245        self,
1246        builder: &mut ZeroBuilder,
1247    ) -> usize
1248    where
1249        Self: Sized,
1250    {
1251        // Use the target's natural alignment if ALIGN is 0, otherwise use ALIGN
1252        let alignment = if ALIGN == 0 {
1253            <Self::Target as ZeroCopy>::ALIGN
1254        } else {
1255            ALIGN
1256        };
1257        let offset = builder.align(alignment);
1258        builder
1259            .data
1260            .resize(offset + <Self::Target as StaticSize>::SIZE, 0);
1261        let target = self.build_to_target(builder, offset);
1262        builder.write(offset, &target);
1263        offset
1264    }
1265}
1266
1267/// A wrapper for builders that produce a `RelativePtr`.
1268#[cfg(feature = "alloc")]
1269#[doc(hidden)]
1270pub struct RelativeBuilder<B, const ALIGN: usize>(pub B);
1271
1272#[cfg(feature = "alloc")]
1273impl<E: Endian, T, B, const ALIGN: usize> ZeroCopyBuilder<E, ALIGN> for RelativeBuilder<B, ALIGN>
1274where
1275    B: ZeroCopyBuilder<E, 0, Target = T>,
1276    T: ZeroCopy,
1277{
1278    /// The target type that this builds into.
1279    type Target = RelativePtr<T, ALIGN, E>;
1280
1281    /// Builds the object into the target location.
1282    fn build_to_target(
1283        self,
1284        builder: &mut ZeroBuilder,
1285        offset: usize,
1286    ) -> Self::Target {
1287        let target_offset = self.0.build(builder);
1288        RelativePtr::new(checked_relative_offset(target_offset, offset))
1289    }
1290}
1291
1292/// A wrapper for builders that produce a `ZeroArray`.
1293#[cfg(feature = "alloc")]
1294#[doc(hidden)]
1295pub struct ArrayBuilder<B, const N: usize, const ALIGN: usize>(pub [B; N]);
1296
1297#[cfg(feature = "alloc")]
1298impl<E: Endian, T, B, const N: usize, const ALIGN: usize> ZeroCopyBuilder<E, ALIGN>
1299    for ArrayBuilder<B, N, ALIGN>
1300where
1301    B: ZeroCopyBuilder<E, 0, Target = T>,
1302    T: ZeroCopy,
1303{
1304    /// The target type that this builds into.
1305    type Target = ZeroArray<T, N, ALIGN, E>;
1306
1307    /// Builds the object into the target location.
1308    fn build_to_target(
1309        self,
1310        builder: &mut ZeroBuilder,
1311        offset: usize,
1312    ) -> Self::Target {
1313        let effective_align = if ALIGN == 0 {
1314            T::ALIGN
1315        } else {
1316            ALIGN
1317        };
1318        let data_offset = builder.reserve_bytes(N * T::SIZE, effective_align);
1319        for (i, item) in self.0.into_iter().enumerate() {
1320            let item_offset = data_offset + i * T::SIZE;
1321            let target = item.build_to_target(builder, item_offset);
1322            builder.write(item_offset, &target);
1323        }
1324        ZeroArray {
1325            ptr: RelativePtr::new(checked_relative_offset(data_offset, offset)),
1326        }
1327    }
1328}
1329
1330/// A wrapper for builders that produce a `ZeroSlice`.
1331#[cfg(feature = "alloc")]
1332#[doc(hidden)]
1333pub struct SliceBuilder<B, const ALIGN: usize>(pub Vec<B>);
1334
1335#[cfg(feature = "alloc")]
1336impl<E: Endian, T, B, const ALIGN: usize> ZeroCopyBuilder<E, ALIGN> for SliceBuilder<B, ALIGN>
1337where
1338    B: ZeroCopyBuilder<E, 0, Target = T>,
1339    T: ZeroCopy,
1340{
1341    /// The target type that this builds into.
1342    type Target = ZeroSlice<T, ALIGN, E>;
1343
1344    /// Builds the object into the target location.
1345    fn build_to_target(
1346        self,
1347        builder: &mut ZeroBuilder,
1348        offset: usize,
1349    ) -> Self::Target {
1350        let len = self.0.len() as u32;
1351        let effective_align = if ALIGN == 0 {
1352            T::ALIGN
1353        } else {
1354            ALIGN
1355        };
1356        let data_offset = builder.reserve_bytes(self.0.len() * T::SIZE, effective_align);
1357        for (i, item) in self.0.into_iter().enumerate() {
1358            let item_offset = data_offset + i * T::SIZE;
1359            let target = item.build_to_target(builder, item_offset);
1360            builder.write(item_offset, &target);
1361        }
1362        // RelativePtr is at offset + size_of::<u32>() (after the `len` field)
1363        let ptr_field_offset = offset + core::mem::size_of::<u32>();
1364        ZeroSlice::new(len, checked_relative_offset(data_offset, ptr_field_offset))
1365    }
1366}
1367
1368macro_rules! impl_zerocopy_primitive {
1369    ($($t:ty, $from_native:ident),*) => {
1370        $(
1371            impl<E: Endian> ZeroCopyType<E> for $t {
1372                /// The builder type associated with this trait.
1373                type Builder = $t;
1374            }
1375            impl<E: Endian> ZeroCopyBuilder<E, 0> for $t {
1376                /// The target type that this builds into.
1377                type Target = $t;
1378                #[cfg(feature = "alloc")]
1379                fn build_to_target(self, _builder: &mut ZeroBuilder, _offset: usize) -> Self::Target {
1380                    E::$from_native(self)
1381                }
1382            }
1383        )*
1384    };
1385}
1386
1387impl_zerocopy_primitive!(
1388    u16,
1389    from_native_u16,
1390    u32,
1391    from_native_u32,
1392    u64,
1393    from_native_u64,
1394    u128,
1395    from_native_u128,
1396    i16,
1397    from_native_i16,
1398    i32,
1399    from_native_i32,
1400    i64,
1401    from_native_i64,
1402    i128,
1403    from_native_i128,
1404    f32,
1405    from_native_f32,
1406    f64,
1407    from_native_f64
1408);
1409
1410impl<E: Endian> ZeroCopyType<E> for u8 {
1411    /// The builder type associated with this trait.
1412    type Builder = Self;
1413}
1414impl<E: Endian> ZeroCopyBuilder<E, 0> for u8 {
1415    /// The target type that this builds into.
1416    type Target = Self;
1417
1418    #[cfg(feature = "alloc")]
1419    fn build_to_target(
1420        self,
1421        _builder: &mut ZeroBuilder,
1422        _offset: usize,
1423    ) -> Self::Target {
1424        self
1425    }
1426}
1427impl<E: Endian> ZeroCopyType<E> for i8 {
1428    /// The builder type associated with this trait.
1429    type Builder = Self;
1430}
1431impl<E: Endian> ZeroCopyBuilder<E, 0> for i8 {
1432    /// The target type that this builds into.
1433    type Target = Self;
1434
1435    #[cfg(feature = "alloc")]
1436    fn build_to_target(
1437        self,
1438        _builder: &mut ZeroBuilder,
1439        _offset: usize,
1440    ) -> Self::Target {
1441        self
1442    }
1443}
1444impl<E: Endian> ZeroCopyType<E> for bool {
1445    /// The builder type associated with this trait.
1446    type Builder = Self;
1447}
1448impl<E: Endian> ZeroCopyBuilder<E, 0> for bool {
1449    /// The target type that this builds into.
1450    type Target = Self;
1451
1452    #[cfg(feature = "alloc")]
1453    fn build_to_target(
1454        self,
1455        _builder: &mut ZeroBuilder,
1456        _offset: usize,
1457    ) -> Self::Target {
1458        self
1459    }
1460}
1461impl<E: Endian> ZeroCopyType<E> for char {
1462    /// The builder type associated with this trait.
1463    type Builder = Self;
1464}
1465impl<E: Endian> ZeroCopyBuilder<E, 0> for char {
1466    /// The target type that this builds into.
1467    type Target = Self;
1468
1469    #[cfg(feature = "alloc")]
1470    fn build_to_target(
1471        self,
1472        _builder: &mut ZeroBuilder,
1473        _offset: usize,
1474    ) -> Self::Target {
1475        Self::from_u32(E::from_native_u32(self as u32)).unwrap()
1476    }
1477}
1478
1479#[cfg(feature = "alloc")]
1480impl<E: Endian> ZeroCopyType<E> for ZeroStr<E> {
1481    /// The builder type associated with this trait.
1482    type Builder = String;
1483}
1484#[cfg(feature = "alloc")]
1485impl<E: Endian> ZeroCopyBuilder<E, 0> for String {
1486    /// The target type that this builds into.
1487    type Target = ZeroStr<E>;
1488
1489    /// Builds the object into the target location.
1490    fn build_to_target(
1491        self,
1492        builder: &mut ZeroBuilder,
1493        offset: usize,
1494    ) -> Self::Target {
1495        let alignment = 1;
1496        let data_offset = builder.push_bytes(self.as_bytes(), alignment);
1497        // The RelativePtr inside ZeroStr is at offset + size_of::<u32>() (after the `len` field).
1498        // RelativePtr::get() resolves relative to its own address, so we must adjust.
1499        let ptr_field_offset = offset + core::mem::size_of::<u32>();
1500        ZeroStr::new(
1501            self.len() as u32,
1502            checked_relative_offset(data_offset, ptr_field_offset),
1503        )
1504    }
1505}
1506
1507/// A wrapper for `String` to build into an inline `ZeroString<CAP>`.
1508#[cfg(feature = "alloc")]
1509#[doc(hidden)]
1510pub struct FixedString<const CAP: usize>(pub String);
1511
1512#[cfg(feature = "alloc")]
1513impl<E: Endian, const CAP: usize> ZeroCopyType<E> for ZeroString<CAP, E> {
1514    /// The builder type associated with this trait.
1515    type Builder = FixedString<CAP>;
1516}
1517#[cfg(feature = "alloc")]
1518impl<E: Endian, const CAP: usize> ZeroCopyBuilder<E, 0> for FixedString<CAP> {
1519    /// The target type that this builds into.
1520    type Target = ZeroString<CAP, E>;
1521
1522    /// Builds the object into the target location.
1523    fn build_to_target(
1524        self,
1525        _builder: &mut ZeroBuilder,
1526        _offset: usize,
1527    ) -> Self::Target {
1528        let bytes = self.0.as_bytes();
1529        let mut len = bytes.len().min(CAP);
1530        // Ensure truncation happens at a valid UTF-8 boundary
1531        while len > 0 && !self.0.is_char_boundary(len) {
1532            len -= 1;
1533        }
1534        let mut data = [0u8; CAP];
1535        data[..len].copy_from_slice(&bytes[..len]);
1536        ZeroString {
1537            len: E::from_native_u32(len as u32),
1538            data,
1539            _marker: PhantomData,
1540        }
1541    }
1542}
1543
1544#[cfg(feature = "alloc")]
1545impl<E: Endian, T, const ALIGN: usize> ZeroCopyType<E> for ZeroSlice<T, ALIGN, E>
1546where
1547    T: ZeroCopy + ZeroCopyType<E>,
1548{
1549    /// The builder type associated with this trait.
1550    type Builder = SliceBuilder<T::Builder, ALIGN>;
1551}
1552#[cfg(feature = "alloc")]
1553impl<E: Endian, T, B, const ALIGN: usize> ZeroCopyBuilder<E, ALIGN> for Vec<B>
1554where
1555    B: ZeroCopyBuilder<E, 0, Target = T>,
1556    T: ZeroCopy,
1557{
1558    /// The target type that this builds into.
1559    type Target = ZeroSlice<T, ALIGN, E>;
1560
1561    /// Builds the object into the target location.
1562    fn build_to_target(
1563        self,
1564        builder: &mut ZeroBuilder,
1565        offset: usize,
1566    ) -> Self::Target {
1567        let len = self.len() as u32;
1568        let effective_align = if ALIGN == 0 {
1569            T::ALIGN
1570        } else {
1571            ALIGN
1572        };
1573        let data_offset = builder.reserve_bytes(self.len() * T::SIZE, effective_align);
1574        for (i, item) in self.into_iter().enumerate() {
1575            let item_offset = data_offset + i * T::SIZE;
1576            let target = item.build_to_target(builder, item_offset);
1577            builder.write(item_offset, &target);
1578        }
1579        // RelativePtr is at offset + size_of::<u32>() (after the `len` field)
1580        let ptr_field_offset = offset + core::mem::size_of::<u32>();
1581        ZeroSlice::new(len, checked_relative_offset(data_offset, ptr_field_offset))
1582    }
1583}
1584#[cfg(feature = "alloc")]
1585impl<E: Endian, T, const ALIGN: usize> ZeroCopyType<E> for RelativePtr<T, ALIGN, E>
1586where
1587    T: ZeroCopy + ZeroCopyType<E>,
1588{
1589    /// The builder type associated with this trait.
1590    type Builder = RelativeBuilder<T::Builder, ALIGN>;
1591}
1592
1593#[cfg(feature = "alloc")]
1594impl<E: Endian, T, const N: usize, const ALIGN: usize> ZeroCopyType<E> for ZeroArray<T, N, ALIGN, E>
1595where
1596    T: ZeroCopy + ZeroCopyType<E>,
1597{
1598    /// The builder type associated with this trait.
1599    type Builder = ArrayBuilder<T::Builder, N, ALIGN>;
1600}
1601
1602impl<E: Endian, T, const N: usize> ZeroCopyType<E> for [T; N]
1603where
1604    T: ZeroCopy + ZeroCopyType<E>,
1605{
1606    /// The builder type associated with this trait.
1607    type Builder = [<T as ZeroCopyType<E>>::Builder; N];
1608}
1609impl<E: Endian, T, B, const N: usize> ZeroCopyBuilder<E, 0> for [B; N]
1610where
1611    B: ZeroCopyBuilder<E, 0, Target = T>,
1612    T: ZeroCopy,
1613{
1614    /// The target type that this builds into.
1615    type Target = [T; N];
1616
1617    #[cfg(feature = "alloc")]
1618    fn build_to_target(
1619        self,
1620        builder: &mut ZeroBuilder,
1621        offset: usize,
1622    ) -> Self::Target {
1623        let mut target_array: [core::mem::MaybeUninit<T>; N] =
1624            unsafe { core::mem::MaybeUninit::uninit().assume_init() };
1625        for (i, item) in self.into_iter().enumerate() {
1626            let item_offset = offset + i * T::SIZE;
1627            target_array[i] =
1628                core::mem::MaybeUninit::new(item.build_to_target(builder, item_offset));
1629        }
1630        unsafe { core::mem::transmute_copy(&target_array) }
1631    }
1632}