musli_zerocopy/pointer/ref.rs
1use core::cmp::Ordering;
2use core::fmt;
3use core::hash::Hash;
4use core::marker::PhantomData;
5use core::mem::size_of;
6
7use crate::endian::{Big, ByteOrder, Little, Native};
8use crate::error::{Error, ErrorKind, IntoRepr};
9use crate::mem::MaybeUninit;
10use crate::pointer::Coerce;
11use crate::pointer::{DefaultSize, Pointee, Size};
12use crate::ZeroCopy;
13
14/// A stored reference to a type `T`.
15///
16/// A reference is made up of two components:
17/// * An [`offset()`] indicating the absolute offset into a [`Buf`] where the
18/// pointed-to (pointee) data is located.
19/// * An optional [`metadata()`] components, which if set indicates that this
20/// reference is a wide pointer. This is used when encoding types such as
21/// `[T]` or `str` to include additional data necessary to handle the type.
22///
23/// [`Buf`]: crate::buf::Buf
24/// [`offset()`]: Ref::offset
25/// [`metadata()`]: Ref::metadata
26///
27/// # Examples
28///
29/// ```
30/// use std::mem::align_of;
31///
32/// use musli_zerocopy::{Ref, OwnedBuf};
33///
34/// let mut buf = OwnedBuf::with_alignment::<u32>();
35/// buf.extend_from_slice(&[1, 2, 3, 4]);
36///
37/// let buf = buf.as_ref();
38///
39/// let number = Ref::<u32>::new(0);
40/// assert_eq!(*buf.load(number)?, u32::from_ne_bytes([1, 2, 3, 4]));
41/// # Ok::<_, musli_zerocopy::Error>(())
42/// ```
43#[derive(ZeroCopy)]
44#[repr(C)]
45#[zero_copy(crate, swap_bytes_self)]
46pub struct Ref<T, E = Native, O = DefaultSize>
47where
48 T: ?Sized + Pointee,
49 E: ByteOrder,
50 O: Size,
51{
52 offset: O,
53 metadata: T::Stored<O>,
54 #[zero_copy(ignore)]
55 _marker: PhantomData<(E, T)>,
56}
57
58impl<T, E, O> Ref<T, E, O>
59where
60 T: ?Sized + Pointee,
61 E: ByteOrder,
62 O: Size,
63{
64 /// Convert this reference into a [`Big`]-endian [`ByteOrder`].
65 ///
66 /// # Examples
67 ///
68 /// ```
69 /// use musli_zerocopy::{endian, Ref};
70 ///
71 /// let r: Ref<u32> = Ref::new(10);
72 /// assert_eq!(r.offset(), 10);
73 ///
74 /// let r: Ref<u32, endian::Little> = Ref::new(10);
75 /// assert_eq!(r.offset(), 10);
76 ///
77 /// let r: Ref<u32, endian::Big> = r.to_be();
78 /// assert_eq!(r.offset(), 10);
79 /// ```
80 #[inline]
81 pub fn to_be(self) -> Ref<T, Big, O> {
82 self.to_endian()
83 }
84
85 /// Convert this reference into a [`Little`]-endian [`ByteOrder`].
86 ///
87 /// # Examples
88 ///
89 /// ```
90 /// use musli_zerocopy::{endian, Ref};
91 ///
92 /// let r: Ref<u32> = Ref::new(10);
93 /// assert_eq!(r.offset(), 10);
94 ///
95 /// let r: Ref<u32, endian::Big> = Ref::new(10);
96 /// assert_eq!(r.offset(), 10);
97 ///
98 /// let r: Ref<u32, endian::Little> = r.to_le();
99 /// assert_eq!(r.offset(), 10);
100 /// ```
101 #[inline]
102 pub fn to_le(self) -> Ref<T, Little, O> {
103 self.to_endian()
104 }
105
106 /// Convert this reference into a [`Native`]-endian [`ByteOrder`].
107 ///
108 /// # Examples
109 ///
110 /// ```
111 /// use musli_zerocopy::{endian, Ref};
112 ///
113 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Big>::new(10).to_ne();
114 /// assert_eq!(r.offset(), 10);
115 ///
116 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Little>::new(10).to_ne();
117 /// assert_eq!(r.offset(), 10);
118 ///
119 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Native>::new(10).to_ne();
120 /// assert_eq!(r.offset(), 10);
121 /// ```
122 #[inline]
123 pub fn to_ne(self) -> Ref<T, Native, O> {
124 self.to_endian()
125 }
126
127 /// Convert this reference into a `U`-endian [`ByteOrder`].
128 ///
129 /// # Examples
130 ///
131 /// ```
132 /// use musli_zerocopy::{endian, Ref};
133 ///
134 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Big>::new(10).to_endian();
135 /// assert_eq!(r.offset(), 10);
136 ///
137 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Little>::new(10).to_endian();
138 /// assert_eq!(r.offset(), 10);
139 ///
140 /// let r: Ref<u32, endian::Native> = Ref::<u32, endian::Native>::new(10).to_endian();
141 /// assert_eq!(r.offset(), 10);
142 /// ```
143 #[inline]
144 pub fn to_endian<U: ByteOrder>(self) -> Ref<T, U, O> {
145 Ref {
146 offset: self.offset.swap_bytes::<E>().swap_bytes::<U>(),
147 metadata: self.metadata.swap_bytes::<E>().swap_bytes::<U>(),
148 _marker: PhantomData,
149 }
150 }
151}
152
153impl<T, E, O> Ref<T, E, O>
154where
155 T: ?Sized + Pointee,
156 E: ByteOrder,
157 O: Size,
158{
159 /// Construct a reference with custom metadata.
160 ///
161 /// # Panics
162 ///
163 /// This will panic if either:
164 /// * The `offset` or `metadata` can't be byte swapped as per
165 /// [`ZeroCopy::CAN_SWAP_BYTES`].
166 /// * Packed [`offset()`] cannot be constructed from `U` (out of range).
167 /// * Packed [`metadata()`] cannot be constructed from `T::Metadata` (reason
168 /// depends on the exact metadata).
169 ///
170 /// To guarantee that this constructor will never panic, [`Ref<T, E,
171 /// usize>`] can be used. This also ensures that construction is a no-op.
172 ///
173 /// [`offset()`]: Ref::offset
174 /// [`metadata()`]: Ref::metadata
175 ///
176 /// # Examples
177 ///
178 /// ```
179 /// use musli_zerocopy::Ref;
180 ///
181 /// let reference = Ref::<[u64]>::with_metadata(42, 10);
182 /// assert_eq!(reference.offset(), 42);
183 /// assert_eq!(reference.len(), 10);
184 /// ```
185 #[inline]
186 pub fn with_metadata<U>(offset: U, metadata: T::Metadata) -> Self
187 where
188 U: Copy + fmt::Debug,
189 O: TryFrom<U>,
190 {
191 const {
192 assert!(
193 O::CAN_SWAP_BYTES,
194 "Offset cannot be byte-ordered since it would not inhabit valid types"
195 );
196 }
197
198 let Some(offset) = O::try_from(offset).ok() else {
199 panic!("Offset {offset:?} not in legal range 0-{}", O::MAX);
200 };
201
202 let Some(metadata) = T::try_from_metadata(metadata) else {
203 panic!("Metadata {metadata:?} not in legal range 0-{}", O::MAX);
204 };
205
206 Self {
207 offset: O::swap_bytes::<E>(offset),
208 metadata: T::Stored::<O>::swap_bytes::<E>(metadata),
209 _marker: PhantomData,
210 }
211 }
212
213 /// Fallibly try to construct a reference with metadata.
214 ///
215 /// # Errors
216 ///
217 /// This will not compile through a constant assertion if the `offset` or
218 /// `metadata` can't be byte swapped as per [`ZeroCopy::CAN_SWAP_BYTES`].
219 ///
220 /// This will error if either:
221 /// * Packed [`offset()`] cannot be constructed from `U` (out of range).
222 /// * Packed [`metadata()`] cannot be constructed from `T::Metadata` (reason
223 /// depends on the exact metadata).
224 ///
225 /// To guarantee that this constructor will never error, [`Ref<T, Native,
226 /// usize>`] can be used. This also ensures that construction is a no-op.
227 ///
228 /// [`offset()`]: Ref::offset
229 /// [`metadata()`]: Ref::metadata
230 ///
231 /// # Examples
232 ///
233 /// ```
234 /// use musli_zerocopy::Ref;
235 ///
236 /// let reference = Ref::<[u64]>::try_with_metadata(42, 10)?;
237 /// assert_eq!(reference.offset(), 42);
238 /// assert_eq!(reference.len(), 10);
239 /// # Ok::<_, musli_zerocopy::Error>(())
240 /// ```
241 pub fn try_with_metadata<U>(offset: U, metadata: T::Metadata) -> Result<Self, Error>
242 where
243 U: Copy + IntoRepr + fmt::Debug,
244 O: TryFrom<U>,
245 {
246 const {
247 assert!(
248 O::CAN_SWAP_BYTES,
249 "Offset cannot be byte-ordered since it would not inhabit valid types"
250 );
251
252 assert!(
253 T::Stored::<O>::CAN_SWAP_BYTES,
254 "Packed offset cannot be byte-ordered since it would not inhabit valid types"
255 );
256 }
257
258 let Some(offset) = O::try_from(offset).ok() else {
259 return Err(Error::new(ErrorKind::InvalidOffsetRange {
260 offset: U::into_repr(offset),
261 max: O::into_repr(O::MAX),
262 }));
263 };
264
265 let Some(metadata) = T::try_from_metadata(metadata) else {
266 return Err(Error::new(ErrorKind::InvalidMetadataRange {
267 metadata: T::Metadata::into_repr(metadata),
268 max: O::into_repr(O::MAX),
269 }));
270 };
271
272 Ok(Self {
273 offset: O::swap_bytes::<E>(offset),
274 metadata: T::Stored::swap_bytes::<E>(metadata),
275 _marker: PhantomData,
276 })
277 }
278}
279
280impl<T, E, O> Ref<[T], E, O>
281where
282 T: ZeroCopy,
283 E: ByteOrder,
284 O: Size,
285{
286 /// Return the number of elements in the slice `[T]`.
287 ///
288 /// # Examples
289 ///
290 /// ```
291 /// use musli_zerocopy::pointer::Ref;
292 ///
293 /// let slice = Ref::<[u32]>::with_metadata(0, 2);
294 /// assert_eq!(slice.len(), 2);
295 /// ```
296 #[inline]
297 pub fn len(self) -> usize {
298 self.metadata.as_usize::<E>()
299 }
300
301 /// Test if the slice `[T]` is empty.
302 ///
303 /// # Examples
304 ///
305 /// ```
306 /// use musli_zerocopy::pointer::Ref;
307 ///
308 /// let slice = Ref::<[u32]>::with_metadata(0, 0);
309 /// assert!(slice.is_empty());
310 ///
311 /// let slice = Ref::<[u32]>::with_metadata(0, 2);
312 /// assert!(!slice.is_empty());
313 /// ```
314 #[inline]
315 pub fn is_empty(self) -> bool {
316 self.metadata.is_zero()
317 }
318
319 /// Try to get a reference directly out of the slice without validation.
320 ///
321 /// This avoids having to validate every element in a slice in order to
322 /// address them.
323 ///
324 /// # Examples
325 ///
326 /// ```
327 /// use musli_zerocopy::OwnedBuf;
328 ///
329 /// let mut buf = OwnedBuf::new();
330 /// let slice = buf.store_slice(&[1, 2, 3, 4]);
331 ///
332 /// let two = slice.get(2).expect("Missing element 2");
333 /// assert_eq!(buf.load(two)?, &3);
334 ///
335 /// assert!(slice.get(4).is_none());
336 /// # Ok::<_, musli_zerocopy::Error>(())
337 /// ```
338 #[inline]
339 pub fn get(self, index: usize) -> Option<Ref<T, E, O>> {
340 if index >= self.len() {
341 return None;
342 }
343
344 let offset = self.offset.as_usize::<E>() + size_of::<T>() * index;
345 Some(Ref::new(offset))
346 }
347
348 /// Get an unchecked reference directly out of the slice without validation.
349 ///
350 /// This avoids having to validate every element in a slice in order to
351 /// address them.
352 ///
353 /// In contrast to [`get()`], this does not check that the index is within
354 /// the bounds of the current slice, all though it's not unsafe since it
355 /// cannot lead to anything inherently unsafe. Only garbled data.
356 ///
357 /// [`get()`]: Ref::get
358 ///
359 /// # Examples
360 ///
361 /// ```
362 /// use musli_zerocopy::OwnedBuf;
363 ///
364 /// let mut buf = OwnedBuf::new();
365 /// let slice = buf.store_slice(&[1, 2, 3, 4]);
366 ///
367 /// let two = slice.get_unchecked(2);
368 /// assert_eq!(buf.load(two)?, &3);
369 ///
370 /// let oob = slice.get_unchecked(4);
371 /// assert!(buf.load(oob).is_err());
372 /// # Ok::<_, musli_zerocopy::Error>(())
373 /// ```
374 pub fn get_unchecked(self, index: usize) -> Ref<T, E, O> {
375 let offset = self.offset.as_usize::<E>() + size_of::<T>() * index;
376 Ref::new(offset)
377 }
378
379 /// Split the slice reference at the given position `at`.
380 ///
381 /// # Panics
382 ///
383 /// This panics if the given range is out of bounds.
384 ///
385 /// # Examples
386 ///
387 /// ```
388 /// use musli_zerocopy::OwnedBuf;
389 ///
390 /// let mut buf = OwnedBuf::new();
391 /// let slice = buf.store_slice(&[1, 2, 3, 4]);
392 ///
393 /// buf.align_in_place();
394 ///
395 /// let (a, b) = slice.split_at(3);
396 /// let (c, d) = slice.split_at(4);
397 ///
398 /// assert_eq!(buf.load(a)?, &[1, 2, 3]);
399 /// assert_eq!(buf.load(b)?, &[4]);
400 /// assert_eq!(buf.load(c)?, &[1, 2, 3, 4]);
401 /// assert_eq!(buf.load(d)?, &[]);
402 /// # Ok::<_, musli_zerocopy::Error>(())
403 /// ```
404 #[inline]
405 pub fn split_at(self, at: usize) -> (Self, Self) {
406 let offset = self.offset();
407 let len = self.len();
408 assert!(at <= len, "Split point {at} is out of bounds 0..={len}");
409 let a = Self::with_metadata(offset, at);
410 let b = Self::with_metadata(offset + at * size_of::<T>(), len - at);
411 (a, b)
412 }
413
414 /// Perform an fetch like `get` which panics with diagnostics in case the
415 /// index is out-of-bounds.
416 #[inline]
417 #[cfg(feature = "alloc")]
418 pub(crate) fn at(self, index: usize) -> Ref<T, E, O> {
419 let Some(r) = self.get(index) else {
420 panic!("Index {index} out of bounds 0-{}", self.len());
421 };
422
423 r
424 }
425
426 /// Construct an iterator over this reference.
427 ///
428 /// # Examples
429 ///
430 /// ```
431 /// use musli_zerocopy::OwnedBuf;
432 ///
433 /// let mut buf = OwnedBuf::new();
434 /// buf.extend_from_slice(&[1, 2, 3, 4]);
435 ///
436 /// let slice = buf.store_slice(&[1, 2, 3, 4]);
437 ///
438 /// buf.align_in_place();
439 ///
440 /// let mut out = Vec::new();
441 ///
442 /// for r in slice.iter() {
443 /// out.push(*buf.load(r)?);
444 /// }
445 ///
446 /// for r in slice.iter().rev() {
447 /// out.push(*buf.load(r)?);
448 /// }
449 ///
450 /// assert_eq!(out, [1, 2, 3, 4, 4, 3, 2, 1]);
451 /// # Ok::<_, musli_zerocopy::Error>(())
452 /// ```
453 #[inline]
454 pub fn iter(self) -> Iter<T, E, O> {
455 let start = self.offset.as_usize::<E>();
456 let end = start + self.metadata.as_usize::<E>() * size_of::<T>();
457
458 Iter {
459 start,
460 end,
461 _marker: PhantomData,
462 }
463 }
464}
465
466impl<E, O> Ref<str, E, O>
467where
468 E: ByteOrder,
469 O: Size,
470{
471 /// Return the length of the string.
472 ///
473 /// # Examples
474 ///
475 /// ```
476 /// use musli_zerocopy::pointer::Ref;
477 ///
478 /// let slice = Ref::<str>::with_metadata(0, 2);
479 /// assert_eq!(slice.len(), 2);
480 /// ```
481 #[inline]
482 pub fn len(self) -> usize {
483 self.metadata.as_usize::<E>()
484 }
485
486 /// Test if the slice `[T]` is empty.
487 ///
488 /// # Examples
489 ///
490 /// ```
491 /// use musli_zerocopy::pointer::Ref;
492 ///
493 /// let slice = Ref::<str>::with_metadata(0, 0);
494 /// assert!(slice.is_empty());
495 ///
496 /// let slice = Ref::<str>::with_metadata(0, 2);
497 /// assert!(!slice.is_empty());
498 /// ```
499 #[inline]
500 pub fn is_empty(self) -> bool {
501 self.metadata.is_zero()
502 }
503}
504
505/// An iterator over a `Ref<[T]>` which produces `Ref<T>` values.
506///
507/// See [`Ref::iter`].
508pub struct Iter<T, E, O> {
509 start: usize,
510 end: usize,
511 _marker: PhantomData<(T, E, O)>,
512}
513
514impl<T, E, O> Iterator for Iter<T, E, O>
515where
516 T: ZeroCopy,
517 E: ByteOrder,
518 O: Size,
519{
520 type Item = Ref<T, E, O>;
521
522 #[inline]
523 fn next(&mut self) -> Option<Self::Item> {
524 if self.start == self.end {
525 return None;
526 }
527
528 let start = self.start;
529 self.start += size_of::<T>();
530 Some(Ref::new(start))
531 }
532}
533
534impl<T, E, O> DoubleEndedIterator for Iter<T, E, O>
535where
536 T: ZeroCopy,
537 E: ByteOrder,
538 O: Size,
539{
540 #[inline]
541 fn next_back(&mut self) -> Option<Self::Item> {
542 if self.start == self.end {
543 return None;
544 }
545
546 self.end -= size_of::<T>();
547 Some(Ref::new(self.end))
548 }
549}
550
551impl<T, E, O> Ref<T, E, O>
552where
553 T: ?Sized + Pointee,
554 E: ByteOrder,
555 O: Size,
556{
557 /// The number of elements in the slice.
558 ///
559 /// # Examples
560 ///
561 /// ```
562 /// use musli_zerocopy::pointer::Ref;
563 ///
564 /// let slice = Ref::<str>::with_metadata(0, 10);
565 /// assert_eq!(slice.metadata(), 10);
566 /// ```
567 #[inline]
568 pub fn metadata(self) -> T::Stored<O> {
569 self.metadata
570 }
571}
572
573impl<T, E, O> Ref<T, E, O>
574where
575 T: Pointee<Metadata = (), Stored<O> = ()>,
576 E: ByteOrder,
577 O: Size,
578{
579 /// Construct a reference at the given offset.
580 ///
581 /// # Errors
582 ///
583 /// This will not compile through a constant assertion if the `offset` or
584 /// can't be byte swapped as per [`ZeroCopy::CAN_SWAP_BYTES`].
585 ///
586 /// # Panics
587 ///
588 /// This will panic if:
589 /// * Packed [`offset()`] cannot be constructed from `U` (out of range).
590 ///
591 /// [`offset()`]: Self::offset
592 ///
593 /// # Examples
594 ///
595 /// ```
596 /// use musli_zerocopy::Ref;
597 ///
598 /// let reference = Ref::<u64>::new(42);
599 /// assert_eq!(reference.offset(), 42);
600 /// ```
601 ///
602 /// Characters cannot be used as offsets:
603 ///
604 /// ```compile_fail
605 /// use musli_zerocopy::Ref;
606 ///
607 /// let reference = Ref::<_, _, char>::new('a');
608 /// ```
609 #[inline]
610 pub fn new<U>(offset: U) -> Self
611 where
612 U: Copy + fmt::Debug,
613 O: TryFrom<U>,
614 {
615 const {
616 assert!(
617 O::CAN_SWAP_BYTES,
618 "Offset cannot be byte-ordered since it would not inhabit valid types",
619 );
620 }
621
622 let Some(offset) = O::try_from(offset).ok() else {
623 panic!("Offset {offset:?} not in the legal range 0-{}", O::MAX);
624 };
625
626 Self {
627 offset: O::swap_bytes::<E>(offset),
628 metadata: (),
629 _marker: PhantomData,
630 }
631 }
632
633 /// Construct a typed reference to the zeroeth offset in a buffer.
634 ///
635 /// # Examples
636 ///
637 /// ```
638 /// use musli_zerocopy::Ref;
639 ///
640 /// let reference = Ref::<u64>::zero();
641 /// assert_eq!(reference.offset(), 0);
642 /// ```
643 #[inline]
644 pub const fn zero() -> Self {
645 Self {
646 offset: O::ZERO,
647 metadata: (),
648 _marker: PhantomData,
649 }
650 }
651}
652
653impl<T, E, O> Ref<T, E, O>
654where
655 T: ?Sized + Pointee,
656 E: ByteOrder,
657 O: Size,
658{
659 /// Get the offset the reference points to.
660 ///
661 /// # Examples
662 ///
663 /// ```
664 /// use musli_zerocopy::Ref;
665 ///
666 /// let reference = Ref::<u64>::new(42);
667 /// assert_eq!(reference.offset(), 42);
668 /// ```
669 #[inline]
670 pub fn offset(self) -> usize {
671 self.offset.as_usize::<E>()
672 }
673
674 /// Coerce from one kind of reference to another ensuring that the
675 /// destination type `U` is size-compatible.
676 ///
677 /// This performs metadata conversion if the destination metadata for `U`
678 /// differs from `T`, such as for `[u32]` to `[u8]` it would multiply the
679 /// length by 4 to ensure that the slice points to an appropriately sized
680 /// region.
681 ///
682 /// If the metadata conversion would overflow, this will wrap around the
683 /// numerical bounds or panic for debug builds.
684 ///
685 /// See [`try_coerce()`] for more documentation, which is also a checked
686 /// variant of this method.
687 ///
688 /// [`try_coerce()`]: Self::try_coerce
689 pub fn coerce<U>(self) -> Ref<U, E, O>
690 where
691 T: Coerce<U>,
692 U: ?Sized + Pointee,
693 {
694 Ref {
695 offset: self.offset,
696 metadata: T::coerce_metadata(self.metadata),
697 _marker: PhantomData,
698 }
699 }
700
701 /// Try to coerce from one kind of reference to another ensuring that the
702 /// destination type `U` is size-compatible.
703 ///
704 /// This performs metadata conversion if the destination metadata for `U`
705 /// differs from `T`, such as for `[u32]` to `[u8]` it would multiply the
706 /// length by 4 to ensure that the slice points to an appropriately sized
707 /// region.
708 ///
709 /// This returns `None` in case metadata would overflow due to the
710 /// conversion.
711 ///
712 /// ```
713 /// use musli_zerocopy::Ref;
714 ///
715 /// let reference: Ref<u64> = Ref::zero();
716 /// let reference2 = reference.coerce::<[u32]>();
717 /// assert_eq!(reference2.len(), 2);
718 /// ```
719 ///
720 /// This method ensures that coercions across inappropriate types are
721 /// prohibited, such as coercing from a reference to a slice which is too
722 /// large.
723 ///
724 /// ```compile_fail
725 /// use musli_zerocopy::Ref;
726 ///
727 /// let reference: Ref<u32> = Ref::zero();
728 /// let reference2 = reference.coerce::<[u64]>();
729 /// ```
730 ///
731 /// If metadata needs to be adjusted for the destination type such as for
732 /// slices, it will be:
733 ///
734 /// ```
735 /// use musli_zerocopy::Ref;
736 ///
737 /// let reference: Ref<[u32]> = Ref::with_metadata(0, 1);
738 /// let reference2 = reference.try_coerce::<[u8]>().ok_or("bad coercion")?;
739 /// assert_eq!(reference2.len(), 4);
740 ///
741 /// let reference: Ref<str> = Ref::with_metadata(0, 12);
742 /// let reference2 = reference.try_coerce::<[u8]>().ok_or("bad coercion")?;
743 /// assert_eq!(reference2.len(), 12);
744 /// # Ok::<_, &'static str>(())
745 /// ```
746 ///
747 /// This does mean that numerical overflow might occur if the packed
748 /// metadata is too small:
749 ///
750 /// ```
751 /// use musli_zerocopy::Ref;
752 /// use musli_zerocopy::endian::Native;
753 ///
754 /// let reference = Ref::<[u32], Native, u8>::with_metadata(0, 64);
755 /// let reference2 = reference.try_coerce::<[u8]>();
756 /// assert!(reference2.is_none()); // 64 * 4 would overflow u8 packed metadata.
757 /// ```
758 ///
759 /// Coercion of non-zero types are supported, but do not guarantee that the
760 /// destination data is valid.
761 pub fn try_coerce<U>(self) -> Option<Ref<U, E, O>>
762 where
763 T: Coerce<U>,
764 U: ?Sized + Pointee,
765 {
766 Some(Ref {
767 offset: self.offset,
768 metadata: T::try_coerce_metadata(self.metadata)?,
769 _marker: PhantomData,
770 })
771 }
772
773 #[cfg(test)]
774 pub(crate) fn cast<U>(self) -> Ref<U, E, O>
775 where
776 U: ?Sized + Pointee<Stored<O> = T::Stored<O>>,
777 {
778 Ref {
779 offset: self.offset,
780 metadata: self.metadata,
781 _marker: PhantomData,
782 }
783 }
784}
785
786impl<T, const N: usize, E, O> Ref<[T; N], E, O>
787where
788 T: ZeroCopy,
789 E: ByteOrder,
790 O: Size,
791{
792 /// Coerce a reference to an array into a slice.
793 ///
794 /// # Examples
795 ///
796 /// ```
797 /// use musli_zerocopy::OwnedBuf;
798 ///
799 /// let mut buf = OwnedBuf::new();
800 ///
801 /// let values = buf.store(&[1, 2, 3, 4]);
802 /// let slice = values.array_into_slice();
803 ///
804 /// assert_eq!(buf.load(slice)?, &[1, 2, 3, 4]);
805 /// # Ok::<_, musli_zerocopy::Error>(())
806 /// ```
807 #[inline]
808 pub fn array_into_slice(self) -> Ref<[T], E, O> {
809 Ref::with_metadata(self.offset, N)
810 }
811}
812
813impl<T, E, O> Ref<MaybeUninit<T>, E, O>
814where
815 T: Pointee,
816 E: ByteOrder,
817 O: Size,
818{
819 /// Assume that the reference is initialized.
820 ///
821 /// Unlike the counterpart in Rust, this isn't actually unsafe. Because in
822 /// order to load the reference again we'd have to validate it anyways.
823 #[inline]
824 pub const fn assume_init(self) -> Ref<T, E, O> {
825 Ref {
826 offset: self.offset,
827 metadata: self.metadata,
828 _marker: PhantomData,
829 }
830 }
831}
832
833impl<T, E, O> fmt::Debug for Ref<T, E, O>
834where
835 T: ?Sized + Pointee<Stored<O>: fmt::Debug>,
836 E: ByteOrder,
837 O: Size + fmt::Debug,
838{
839 #[inline]
840 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
841 write!(
842 f,
843 "Ref<{}> {{ offset: {:?}, metadata: {:?} }}",
844 core::any::type_name::<T>(),
845 self.offset,
846 self.metadata,
847 )
848 }
849}
850
851impl<T, E, O> Clone for Ref<T, E, O>
852where
853 T: ?Sized + Pointee,
854 E: ByteOrder,
855 O: Size,
856{
857 #[inline]
858 fn clone(&self) -> Self {
859 *self
860 }
861}
862
863impl<T, E, O> Copy for Ref<T, E, O>
864where
865 T: ?Sized + Pointee,
866 E: ByteOrder,
867 O: Size,
868{
869}
870
871impl<T, E, O> PartialEq for Ref<T, E, O>
872where
873 T: ?Sized + Pointee<Stored<O>: PartialEq>,
874 E: ByteOrder,
875 O: PartialEq + Size,
876{
877 #[inline]
878 fn eq(&self, other: &Self) -> bool {
879 self.offset == other.offset && self.metadata == other.metadata
880 }
881}
882
883impl<T, E, O> Eq for Ref<T, E, O>
884where
885 T: ?Sized + Pointee<Stored<O>: Eq>,
886 E: ByteOrder,
887 O: Eq + Size,
888{
889}
890
891impl<T, E, O> PartialOrd for Ref<T, E, O>
892where
893 T: ?Sized + Pointee<Stored<O>: PartialOrd>,
894 E: ByteOrder,
895 O: Ord + Size,
896{
897 #[inline]
898 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
899 match self.offset.partial_cmp(&other.offset) {
900 Some(Ordering::Equal) => {}
901 ord => return ord,
902 }
903
904 self.metadata.partial_cmp(&other.metadata)
905 }
906}
907
908impl<T, E, O> Ord for Ref<T, E, O>
909where
910 T: ?Sized + Pointee<Stored<O>: Ord>,
911 E: ByteOrder,
912 O: Ord + Size,
913{
914 #[inline]
915 fn cmp(&self, other: &Self) -> Ordering {
916 match self.offset.cmp(&other.offset) {
917 Ordering::Equal => {}
918 ord => return ord,
919 }
920
921 self.metadata.cmp(&other.metadata)
922 }
923}
924
925impl<T, E, O> Hash for Ref<T, E, O>
926where
927 T: ?Sized + Pointee<Stored<O>: Hash>,
928 E: ByteOrder,
929 O: Hash + Size,
930{
931 #[inline]
932 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
933 self.offset.hash(state);
934 self.metadata.hash(state);
935 }
936}