Struct musli_zerocopy::pointer::Ref
source · #[repr(C)]pub struct Ref<T, E: ByteOrder = Native, O: Size = DefaultSize>{ /* private fields */ }
Expand description
A stored reference to a type T
.
A reference is made up of two components:
- An
offset()
indicating the absolute offset into aBuf
where the pointed-to (pointee) data is located. - An optional
metadata()
components, which if set indicates that this reference is a wide pointer. This is used when encoding types such as[T]
orstr
to include additional data necessary to handle the type.
§Examples
use std::mem::align_of;
use musli_zerocopy::{Ref, OwnedBuf};
let mut buf = OwnedBuf::with_alignment::<u32>();
buf.extend_from_slice(&[1, 2, 3, 4]);
let buf = buf.as_ref();
let number = Ref::<u32>::new(0);
assert_eq!(*buf.load(number)?, u32::from_ne_bytes([1, 2, 3, 4]));
Implementations§
source§impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
sourcepub fn to_be(self) -> Ref<T, Big, O>
pub fn to_be(self) -> Ref<T, Big, O>
Convert this reference into a Big
-endian ByteOrder
.
§Examples
use musli_zerocopy::{endian, Ref};
let r: Ref<u32> = Ref::new(10);
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Little> = Ref::new(10);
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Big> = r.to_be();
assert_eq!(r.offset(), 10);
sourcepub fn to_le(self) -> Ref<T, Little, O>
pub fn to_le(self) -> Ref<T, Little, O>
Convert this reference into a Little
-endian ByteOrder
.
§Examples
use musli_zerocopy::{endian, Ref};
let r: Ref<u32> = Ref::new(10);
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Big> = Ref::new(10);
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Little> = r.to_le();
assert_eq!(r.offset(), 10);
sourcepub fn to_ne(self) -> Ref<T, Native, O>
pub fn to_ne(self) -> Ref<T, Native, O>
Convert this reference into a Native
-endian ByteOrder
.
§Examples
use musli_zerocopy::{endian, Ref};
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Big>::new(10).to_ne();
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Little>::new(10).to_ne();
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Native>::new(10).to_ne();
assert_eq!(r.offset(), 10);
sourcepub fn to_endian<U: ByteOrder>(self) -> Ref<T, U, O>
pub fn to_endian<U: ByteOrder>(self) -> Ref<T, U, O>
Convert this reference into a U
-endian ByteOrder
.
§Examples
use musli_zerocopy::{endian, Ref};
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Big>::new(10).to_endian();
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Little>::new(10).to_endian();
assert_eq!(r.offset(), 10);
let r: Ref<u32, endian::Native> = Ref::<u32, endian::Native>::new(10).to_endian();
assert_eq!(r.offset(), 10);
source§impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
sourcepub fn with_metadata<U>(offset: U, metadata: T::Metadata) -> Self
pub fn with_metadata<U>(offset: U, metadata: T::Metadata) -> Self
Construct a reference with custom metadata.
§Panics
This will panic if either:
- The
offset
ormetadata
can’t be byte swapped as perZeroCopy::CAN_SWAP_BYTES
. - Packed
offset()
cannot be constructed fromU
(out of range). - Packed
metadata()
cannot be constructed fromT::Metadata
(reason depends on the exact metadata).
To guarantee that this constructor will never panic, Ref<T, E, usize>
can be used. This also ensures that construction is a no-op.
§Examples
use musli_zerocopy::Ref;
let reference = Ref::<[u64]>::with_metadata(42, 10);
assert_eq!(reference.offset(), 42);
assert_eq!(reference.len(), 10);
sourcepub fn try_with_metadata<U>(
offset: U,
metadata: T::Metadata
) -> Result<Self, Error>
pub fn try_with_metadata<U>( offset: U, metadata: T::Metadata ) -> Result<Self, Error>
Fallibly try to construct a reference with metadata.
§Errors
This will error if either:
- The
offset
ormetadata
can’t be byte swapped as perZeroCopy::CAN_SWAP_BYTES
. - Packed
offset()
cannot be constructed fromU
(out of range). - Packed
metadata()
cannot be constructed fromT::Metadata
(reason depends on the exact metadata).
To guarantee that this constructor will never error, Ref<T, Native, usize>
can be used. This also ensures that construction is a no-op.
§Examples
use musli_zerocopy::Ref;
let reference = Ref::<[u64]>::try_with_metadata(42, 10)?;
assert_eq!(reference.offset(), 42);
assert_eq!(reference.len(), 10);
source§impl<T, E: ByteOrder, O: Size> Ref<[T], E, O>where
T: ZeroCopy,
impl<T, E: ByteOrder, O: Size> Ref<[T], E, O>where
T: ZeroCopy,
sourcepub fn len(self) -> usize
pub fn len(self) -> usize
Return the number of elements in the slice [T]
.
§Examples
use musli_zerocopy::pointer::Ref;
let slice = Ref::<[u32]>::with_metadata(0, 2);
assert_eq!(slice.len(), 2);
sourcepub fn is_empty(self) -> bool
pub fn is_empty(self) -> bool
Test if the slice [T]
is empty.
§Examples
use musli_zerocopy::pointer::Ref;
let slice = Ref::<[u32]>::with_metadata(0, 0);
assert!(slice.is_empty());
let slice = Ref::<[u32]>::with_metadata(0, 2);
assert!(!slice.is_empty());
sourcepub fn get(self, index: usize) -> Option<Ref<T, E, O>>
pub fn get(self, index: usize) -> Option<Ref<T, E, O>>
Try to get a reference directly out of the slice without validation.
This avoids having to validate every element in a slice in order to address them.
§Examples
use musli_zerocopy::OwnedBuf;
let mut buf = OwnedBuf::new();
let slice = buf.store_slice(&[1, 2, 3, 4]);
let two = slice.get(2).expect("Missing element 2");
assert_eq!(buf.load(two)?, &3);
assert!(slice.get(4).is_none());
sourcepub fn get_unchecked(self, index: usize) -> Ref<T, E, O>
pub fn get_unchecked(self, index: usize) -> Ref<T, E, O>
Get an unchecked reference directly out of the slice without validation.
This avoids having to validate every element in a slice in order to address them.
In contrast to get()
, this does not check that the index is within
the bounds of the current slice, all though it’s not unsafe since it
cannot lead to anything inherently unsafe. Only garbled data.
§Examples
use musli_zerocopy::OwnedBuf;
let mut buf = OwnedBuf::new();
let slice = buf.store_slice(&[1, 2, 3, 4]);
let two = slice.get_unchecked(2);
assert_eq!(buf.load(two)?, &3);
let oob = slice.get_unchecked(4);
assert!(buf.load(oob).is_err());
sourcepub fn split_at(self, at: usize) -> (Self, Self)
pub fn split_at(self, at: usize) -> (Self, Self)
Split the slice reference at the given position at
.
§Panics
This panics if the given range is out of bounds.
§Examples
use musli_zerocopy::OwnedBuf;
let mut buf = OwnedBuf::new();
let slice = buf.store_slice(&[1, 2, 3, 4]);
buf.align_in_place();
let (a, b) = slice.split_at(3);
let (c, d) = slice.split_at(4);
assert_eq!(buf.load(a)?, &[1, 2, 3]);
assert_eq!(buf.load(b)?, &[4]);
assert_eq!(buf.load(c)?, &[1, 2, 3, 4]);
assert_eq!(buf.load(d)?, &[]);
sourcepub fn iter(self) -> Iter<T, E, O>
pub fn iter(self) -> Iter<T, E, O>
Construct an iterator over this reference.
§Examples
use musli_zerocopy::OwnedBuf;
let mut buf = OwnedBuf::new();
buf.extend_from_slice(&[1, 2, 3, 4]);
let slice = buf.store_slice(&[1, 2, 3, 4]);
buf.align_in_place();
let mut out = Vec::new();
for r in slice.iter() {
out.push(*buf.load(r)?);
}
for r in slice.iter().rev() {
out.push(*buf.load(r)?);
}
assert_eq!(out, [1, 2, 3, 4, 4, 3, 2, 1]);
source§impl<E: ByteOrder, O: Size> Ref<str, E, O>
impl<E: ByteOrder, O: Size> Ref<str, E, O>
source§impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
sourcepub fn new<U>(offset: U) -> Self
pub fn new<U>(offset: U) -> Self
Construct a reference at the given offset.
§Panics
This will panic if either:
- The
offset
can’t be byte swapped as perZeroCopy::CAN_SWAP_BYTES
. - Packed
offset()
cannot be constructed fromU
(out of range).
§Examples
use musli_zerocopy::Ref;
let reference = Ref::<u64>::new(42);
assert_eq!(reference.offset(), 42);
source§impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
impl<T, E: ByteOrder, O: Size> Ref<T, E, O>
sourcepub fn offset(self) -> usize
pub fn offset(self) -> usize
Get the offset the reference points to.
§Examples
use musli_zerocopy::Ref;
let reference = Ref::<u64>::new(42);
assert_eq!(reference.offset(), 42);
sourcepub fn coerce<U>(self) -> Ref<U, E, O>
pub fn coerce<U>(self) -> Ref<U, E, O>
Coerce from one kind of reference to another ensuring that the
destination type U
is size-compatible.
This performs metadata conversion if the destination metadata for U
differs from T
, such as for [u32]
to [u8]
it would multiply the
length by 4 to ensure that the slice points to an appropriately sized
region.
If the metadata conversion would overflow, this will wrap around the numerical bounds or panic for debug builds.
See try_coerce()
for more documentation, which is also a checked
variant of this method.
sourcepub fn try_coerce<U>(self) -> Option<Ref<U, E, O>>
pub fn try_coerce<U>(self) -> Option<Ref<U, E, O>>
Try to coerce from one kind of reference to another ensuring that the
destination type U
is size-compatible.
This performs metadata conversion if the destination metadata for U
differs from T
, such as for [u32]
to [u8]
it would multiply the
length by 4 to ensure that the slice points to an appropriately sized
region.
This returns None
in case metadata would overflow due to the
conversion.
use musli_zerocopy::Ref;
let reference: Ref<u64> = Ref::zero();
let reference2 = reference.coerce::<[u32]>();
assert_eq!(reference2.len(), 2);
This method ensures that coercions across inappropriate types are prohibited, such as coercing from a reference to a slice which is too large.
use musli_zerocopy::Ref;
let reference: Ref<u32> = Ref::zero();
let reference2 = reference.coerce::<[u64]>();
If metadata needs to be adjusted for the destination type such as for slices, it will be:
use musli_zerocopy::Ref;
let reference: Ref<[u32]> = Ref::with_metadata(0, 1);
let reference2 = reference.try_coerce::<[u8]>().ok_or("bad coercion")?;
assert_eq!(reference2.len(), 4);
let reference: Ref<str> = Ref::with_metadata(0, 12);
let reference2 = reference.try_coerce::<[u8]>().ok_or("bad coercion")?;
assert_eq!(reference2.len(), 12);
This does mean that numerical overflow might occur if the packed metadata is too small:
use musli_zerocopy::Ref;
use musli_zerocopy::endian::Native;
let reference = Ref::<[u32], Native, u8>::with_metadata(0, 64);
let reference2 = reference.try_coerce::<[u8]>();
assert!(reference2.is_none()); // 64 * 4 would overflow u8 packed metadata.
Coercion of non-zero types are supported, but do not guarantee that the destination data is valid.
source§impl<T, const N: usize, E: ByteOrder, O: Size> Ref<[T; N], E, O>where
T: ZeroCopy,
impl<T, const N: usize, E: ByteOrder, O: Size> Ref<[T; N], E, O>where
T: ZeroCopy,
sourcepub fn array_into_slice(self) -> Ref<[T], E, O>
pub fn array_into_slice(self) -> Ref<[T], E, O>
Coerce a reference to an array into a slice.
§Examples
use musli_zerocopy::OwnedBuf;
let mut buf = OwnedBuf::new();
let values = buf.store(&[1, 2, 3, 4]);
let slice = values.array_into_slice();
assert_eq!(buf.load(slice)?, &[1, 2, 3, 4]);
source§impl<T, E: ByteOrder, O: Size> Ref<MaybeUninit<T>, E, O>where
T: Pointee,
impl<T, E: ByteOrder, O: Size> Ref<MaybeUninit<T>, E, O>where
T: Pointee,
sourcepub const fn assume_init(self) -> Ref<T, E, O>
pub const fn assume_init(self) -> Ref<T, E, O>
Assume that the reference is initialized.
Unlike the counterpart in Rust, this isn’t actually unsafe. Because in order to load the reference again we’d have to validate it anyways.
Trait Implementations§
source§impl<T, E: ByteOrder, O> Ord for Ref<T, E, O>
impl<T, E: ByteOrder, O> Ord for Ref<T, E, O>
source§impl<T, E: ByteOrder, O> PartialEq for Ref<T, E, O>
impl<T, E: ByteOrder, O> PartialEq for Ref<T, E, O>
source§impl<T, E: ByteOrder, O> PartialOrd for Ref<T, E, O>
impl<T, E: ByteOrder, O> PartialOrd for Ref<T, E, O>
1.0.0 · source§fn le(&self, other: &Rhs) -> bool
fn le(&self, other: &Rhs) -> bool
self
and other
) and is used by the <=
operator. Read moresource§impl<T, A: ByteOrder, B: Size> Slice for Ref<[T], A, B>where
T: ZeroCopy,
impl<T, A: ByteOrder, B: Size> Slice for Ref<[T], A, B>where
T: ZeroCopy,
source§fn from_ref<E: ByteOrder, O: Size>(slice: Ref<[T], E, O>) -> Selfwhere
T: ZeroCopy,
fn from_ref<E: ByteOrder, O: Size>(slice: Ref<[T], E, O>) -> Selfwhere
T: ZeroCopy,
Ref<[Self::Item]>
]. Read moresource§fn try_from_ref<E: ByteOrder, O: Size>(
slice: Ref<[T], E, O>
) -> Result<Self, Error>where
T: ZeroCopy,
fn try_from_ref<E: ByteOrder, O: Size>(
slice: Ref<[T], E, O>
) -> Result<Self, Error>where
T: ZeroCopy,
Ref<[Self::Item]>
]. Read moresource§fn with_metadata(offset: usize, len: usize) -> Self
fn with_metadata(offset: usize, len: usize) -> Self
source§fn get(self, index: usize) -> Option<Self::ItemRef>
fn get(self, index: usize) -> Option<Self::ItemRef>
source§fn split_at(self, at: usize) -> (Self, Self)
fn split_at(self, at: usize) -> (Self, Self)
at
. Read moresource§impl<T, E: ByteOrder, O: Size> ZeroCopy for Ref<T, E, O>
impl<T, E: ByteOrder, O: Size> ZeroCopy for Ref<T, E, O>
source§const ANY_BITS: bool = _
const ANY_BITS: bool = _
size_of::<Self>()
bytes.source§const CAN_SWAP_BYTES: bool = true
const CAN_SWAP_BYTES: bool = true
source§fn swap_bytes<__E: ByteOrder>(self) -> Self
fn swap_bytes<__E: ByteOrder>(self) -> Self
self
using the specified byte ordering to match the
native byte ordering. Read moresource§fn initialize_padding(&mut self)
fn initialize_padding(&mut self)
source§fn to_bytes(&mut self) -> &[u8] ⓘ
fn to_bytes(&mut self) -> &[u8] ⓘ
ZeroCopy
type into bytes. Read moresource§fn from_bytes(bytes: &[u8]) -> Result<&Self, Error>
fn from_bytes(bytes: &[u8]) -> Result<&Self, Error>
Self
. Read moresource§fn from_bytes_mut(bytes: &mut [u8]) -> Result<&mut Self, Error>
fn from_bytes_mut(bytes: &mut [u8]) -> Result<&mut Self, Error>
Self
. Read moresource§fn transpose_bytes<F: ByteOrder, T: ByteOrder>(self) -> Self
fn transpose_bytes<F: ByteOrder, T: ByteOrder>(self) -> Self
F
to another T
.